Import Cobalt 21.master.0.301702
diff --git a/src/.pre-commit-config.yaml b/src/.pre-commit-config.yaml
index 2b7e100..6240f63 100644
--- a/src/.pre-commit-config.yaml
+++ b/src/.pre-commit-config.yaml
@@ -5,7 +5,7 @@
 default_language_version:
     python: python3
 
-exclude: '^(third_party|v8)/'
+exclude: '^(build|third_party|v8)/'
 
 repos:
 -   repo: https://cobalt.googlesource.com/pre-commit-hooks
@@ -123,6 +123,7 @@
         entry: python precommit_hooks/run_python2_unittests.py
         language: python
         language_version: python2
+        additional_dependencies: ['mock']
         types: [python]
     -   id: osslint
         name: osslint
diff --git a/src/base/test/test_child_process.cc b/src/base/test/test_child_process.cc
index 3beac5b..ce15856 100644
--- a/src/base/test/test_child_process.cc
+++ b/src/base/test/test_child_process.cc
@@ -6,9 +6,6 @@
 #include <stdlib.h>
 #include <string.h>
 
-#include "starboard/common/string.h"
-#include "starboard/types.h"
-
 // Simple testing command, used to exercise child process launcher calls.
 //
 // Usage:
@@ -33,7 +30,7 @@
     int start_idx = 1;
 
     if (strcmp(argv[1], "-x") == 0) {
-      return_code = SbStringAToI(argv[2]);
+      return_code = atoi(argv[2]);
       start_idx = 3;
     }
 
diff --git a/src/build/.style.yapf b/src/build/.style.yapf
new file mode 100644
index 0000000..b4ebbe2
--- /dev/null
+++ b/src/build/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+
+# New directories should use a .style.yapf that does not include the following:
+column_limit = 80
+indent_width = 2
diff --git a/src/build/BUILD.gn b/src/build/BUILD.gn
new file mode 100644
index 0000000..51ef9b0
--- /dev/null
+++ b/src/build/BUILD.gn
@@ -0,0 +1,46 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/buildflag_header.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+
+source_set("buildflag_header_h") {
+  sources = [ "buildflag.h" ]
+}
+
+buildflag_header("branding_buildflags") {
+  header = "branding_buildflags.h"
+
+  if (is_chrome_branded) {
+    flags = [
+      "CHROMIUM_BRANDING=0",
+      "GOOGLE_CHROME_BRANDING=1",
+    ]
+  } else {
+    flags = [
+      "CHROMIUM_BRANDING=1",
+      "GOOGLE_CHROME_BRANDING=0",
+    ]
+  }
+}
+
+buildflag_header("chromecast_buildflags") {
+  header = "chromecast_buildflags.h"
+
+  flags = [ "IS_CHROMECAST=$is_chromecast" ]
+}
+
+buildflag_header("chromeos_buildflags") {
+  header = "chromeos_buildflags.h"
+
+  flags = [
+    "IS_CHROMEOS_DEVICE=$is_chromeos_device",
+
+    "IS_CHROMEOS_LACROS=$is_chromeos_lacros",
+    "IS_CHROMEOS_ASH=$is_chromeos_ash",
+  ]
+}
diff --git a/src/build/DIR_METADATA b/src/build/DIR_METADATA
new file mode 100644
index 0000000..c914ddc
--- /dev/null
+++ b/src/build/DIR_METADATA
@@ -0,0 +1,5 @@
+monorail {
+  component: "Build"
+}
+
+team_email: "build@chromium.org"
diff --git a/src/build/METADATA b/src/build/METADATA
new file mode 100644
index 0000000..91f4e6b
--- /dev/null
+++ b/src/build/METADATA
@@ -0,0 +1,20 @@
+name: "build"
+description:
+  "Subtree at build."
+
+third_party {
+  url {
+    type: LOCAL_SOURCE
+    value: "/build_mirror"
+  }
+  url {
+    type: GIT
+    value: "https://chromium.googlesource.com/chromium/src/build"
+  }
+  version: "4cb2bd7db6575df5a62f65ea60fb7ca2f2ff9f05"
+  last_upgrade_date {
+    year: 2021
+    month: 5
+    day: 4
+  }
+}
diff --git a/src/build/OWNERS.setnoparent b/src/build/OWNERS.setnoparent
new file mode 100644
index 0000000..5797d4d
--- /dev/null
+++ b/src/build/OWNERS.setnoparent
@@ -0,0 +1,62 @@
+# List of OWNERS files that can be used together with "set noparent". See
+# docs/code_reviews.md#owners-file-details for more details.
+
+# Overall project governance.
+file://ENG_REVIEW_OWNERS
+
+# Third-party dependency review, see //docs/adding_to_third_party.md
+file://third_party/OWNERS
+
+# Security reviews
+file://chromeos/SECURITY_OWNERS
+file://content/browser/SITE_ISOLATION_OWNERS
+file://fuchsia/SECURITY_OWNERS
+file://ipc/SECURITY_OWNERS
+file://net/base/SECURITY_OWNERS
+file://sandbox/linux/OWNERS
+file://sandbox/mac/OWNERS
+file://sandbox/OWNERS
+file://sandbox/win/OWNERS
+file://third_party/blink/SECURITY_OWNERS
+
+# Privacy reviews
+file://tools/traffic_annotation/summary/TRAFFIC_ANNOTATION_OWNERS
+file://tools/metrics/ukm/PRIVACY_OWNERS
+file://base/metrics/OWNERS
+
+# Blink API owners are responsible for decisions about what APIs Blink should
+# expose to the open web.
+file://third_party/blink/API_OWNERS
+
+# Extension related files.
+file://chrome/browser/extensions/component_extensions_allowlist/EXTENSION_ALLOWLIST_OWNERS
+file://extensions/common/api/API_OWNERS
+
+# This restriction is in place to avoid accidental addition to our top level
+# layout files, such as add duplicated assets, or introducing new colors when
+# we don't want them.
+file://ui/android/java/res/LAYOUT_OWNERS
+
+# Updating policy_templates.json can have drastic effects for systems depending
+# on policy definitions (for example, Google's cloud management tools for
+# Chrome and Chrome OS).
+# The rules are documented at:
+# https://sites.google.com/a/chromium.org/dev/developers/how-tos/enterprise/adding-new-policies
+file://components/policy/resources/ENTERPRISE_POLICY_OWNERS
+
+# This restriction is in place due to the complicated compliance regulations
+# around this code.
+file://chrome/android/java/src/org/chromium/chrome/browser/searchwidget/COMPLIANCE_OWNERS
+
+# Notification channels appear in system UI and are persisted forever by
+# Android, so should not be added or removed lightly, and the proper
+# deprecation and versioning steps must be taken when doing so.
+file://chrome/android/java/src/org/chromium/chrome/browser/notifications/channels/NOTIFICATION_CHANNEL_OWNERS
+
+# The Weblayer API is supposed to be stable and will be used outside of the
+# chromium repository.
+file://weblayer/API_OWNERS
+
+# New features for lock/login UI on Chrome OS need to work stably in all corner
+# cases.
+file://ash/login/LOGIN_LOCK_OWNERS
diff --git a/src/build/OWNERS.status b/src/build/OWNERS.status
new file mode 100644
index 0000000..f5cc1fc
--- /dev/null
+++ b/src/build/OWNERS.status
@@ -0,0 +1,12 @@
+# Use this file to set a global status message that should be shown whenever
+# git cl owners proposes to add you as a reviewer.
+#
+# The status messages should be somewhat stable, so please don't use this for
+# short term, or frequently changing updates.
+#
+# The format of the file is
+#
+#  you@chromium.org: Single line status message.
+#
+
+jochen@chromium.org: EMEA based reviewer.
diff --git a/src/build/README.chromium b/src/build/README.chromium
deleted file mode 100644
index 012df35..0000000
--- a/src/build/README.chromium
+++ /dev/null
@@ -1,15 +0,0 @@
-List of property sheets to be included by projects:
-  common.vsprops
-    Not used anymore. No-op. Kept for compatibility with current projects.
-
-  debug.vsprops
-    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
-
-  external_code.vsprops
-    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
-
-  output_dll_copy.rules
-    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
-
-  release.vsprops
-    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/src/build/README.md b/src/build/README.md
new file mode 100644
index 0000000..f9dde97
--- /dev/null
+++ b/src/build/README.md
@@ -0,0 +1,36 @@
+# About
+`//build` contains:
+ * Core GN templates and configuration
+ * Core Python build scripts
+
+Since this directory is DEPS'ed in by some other repositories (webrtc, pdfium,
+v8, etc), it should be kept as self-contained as possible by not referring
+to files outside of it. Some exceptions exist (`//testing`, select
+`//third_party` subdirectories), but new dependencies tend to break these other
+projects, and so should be avoided.
+
+Changes to `//build` should be landed in the Chromium repo. They will then be
+replicated to the stand-alone [build repo](https://chromium.googlesource.com/chromium/src/build)
+by the [gsubtreed tool.](https://chromium.googlesource.com/infra/infra/+/master/infra/services/gsubtreed)
+Note: You can find all directories already  available through gsubtreed in the
+[list of all chromium repos](https://chromium.googlesource.com/).
+
+## Contents
+ * `//build/config` - Common templates via `.gni` files.
+ * `//build/toolchain` - GN toolchain definitions.
+ * `Other .py files` - Some are used by GN/Ninja. Some by gclient hooks, some
+   are just random utilities.
+
+Files referenced by `//.gn`:
+ * `//build/BUILDCONFIG.gn` - Included by all `BUILD.gn` files.
+ * `//build/secondary` - An overlay for `BUILD.gn` files. Enables adding
+   `BUILD.gn` to directories that live in sub-repositories.
+ * `//build_overrides` -
+   Refer to [//build_overrides/README.md](../build_overrides/README.md).
+
+## Docs
+
+* [Writing GN Templates](docs/writing_gn_templates.md)
+* [Debugging Slow Builds](docs/debugging_slow_builds.md)
+* [Mac Hermetic Toolchains](docs/mac_hermetic_toolchain.md)
+* [Android Build Documentation](android/docs/README.md)
diff --git a/src/build/add_rts_filters.py b/src/build/add_rts_filters.py
new file mode 100755
index 0000000..4186c39
--- /dev/null
+++ b/src/build/add_rts_filters.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a dummy RTS filter file if a real one doesn't exist yes.
+  Real filter files are  generated by the RTS binary for suites with any
+  skippable tests. The rest of the suites need to have dummy files because gn
+  will expect the file to be present.
+
+  Implementation uses try / except because the filter files are written
+  relatively close to when this code creates the dummy files.
+
+  The following type of implementation would have a race condition:
+  if not os.path.isfile(filter_file):
+    open(filter_file, 'w') as fp:
+      fp.write('*')
+"""
+import errno
+import os
+import sys
+
+
+def main():
+  filter_file = sys.argv[1]
+  directory = os.path.dirname(filter_file)
+  try:
+    os.makedirs(directory)
+  except OSError as err:
+    if err.errno == errno.EEXIST:
+      pass
+    else:
+      raise
+
+  try:
+    fp = os.open(filter_file, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+  except OSError as err:
+    if err.errno == errno.EEXIST:
+      pass
+    else:
+      raise
+  else:
+    with os.fdopen(fp, 'w') as file_obj:
+      file_obj.write('*')  # '*' is a dummy that means run everything
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/AndroidManifest.xml b/src/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..3c4ed29
--- /dev/null
+++ b/src/build/android/AndroidManifest.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy"
+    android:versionCode="1"
+    android:versionName="1.0">
+
+</manifest>
diff --git a/src/build/android/BUILD.gn b/src/build/android/BUILD.gn
new file mode 100644
index 0000000..1be9f47
--- /dev/null
+++ b/src/build/android/BUILD.gn
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/build_vars.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+import("//build/config/python.gni")
+import("//build_overrides/build.gni")
+
+if (enable_java_templates) {
+  # Create or update the API versions cache if necessary by running a
+  # functionally empty lint task. This prevents racy creation of the
+  # cache while linting java targets in android_lint.
+  android_lint("prepare_android_lint_cache") {
+    create_cache = true
+  }
+
+  if (enable_jdk_library_desugaring) {
+    dex_jdk_libs("all_jdk_libs") {
+      output = "$target_out_dir/$target_name.l8.dex"
+      min_sdk_version = default_min_sdk_version
+    }
+  }
+
+  generate_build_config_srcjar("build_config_gen") {
+    use_final_fields = false
+  }
+
+  java_library("build_config_java") {
+    supports_android = true
+    srcjar_deps = [ ":build_config_gen" ]
+    jar_excluded_patterns = [ "*/build/BuildConfig.class" ]
+  }
+
+  write_native_libraries_java("native_libraries_gen") {
+    use_final_fields = false
+  }
+
+  android_library("native_libraries_java") {
+    srcjar_deps = [ ":native_libraries_gen" ]
+
+    # New version of NativeLibraries.java (with the actual correct values) will
+    # be created when creating an apk.
+    jar_excluded_patterns = [ "*/NativeLibraries.class" ]
+  }
+}
+
+python_library("devil_chromium_py") {
+  pydeps_file = "devil_chromium.pydeps"
+  data = [
+    "devil_chromium.py",
+    "devil_chromium.json",
+    "//third_party/catapult/third_party/gsutil/",
+    "//third_party/catapult/devil/devil/devil_dependencies.json",
+
+    # Read by gn_helpers.BuildWithChromium()
+    "//build/config/gclient_args.gni",
+  ]
+}
+
+# Contains runtime deps for installing apks.
+# E.g. from test_runner.py or from apk_operations.py.
+group("apk_installer_data") {
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps = [
+      "//build/android/pylib/device/commands",
+      "//tools/android/md5sum",
+    ]
+    data = [
+      "//third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar",
+    ]
+  }
+}
+
+python_library("apk_operations_py") {
+  pydeps_file = "apk_operations.pydeps"
+  deps = [ ":apk_installer_data" ]
+}
+
+python_library("test_runner_py") {
+  testonly = true
+  pydeps_file = "test_runner.pydeps"
+  data = [
+    "pylib/gtest/filter/",
+    "pylib/instrumentation/render_test.html.jinja",
+    "test_wrapper/logdog_wrapper.py",
+    "${android_sdk_build_tools}/aapt",
+    "${android_sdk_build_tools}/dexdump",
+    "${android_sdk_build_tools}/lib64/libc++.so",
+    "${android_sdk_build_tools}/split-select",
+    "${android_sdk_root}/platform-tools/adb",
+    "//third_party/requests/",
+  ]
+  data_deps = [
+    ":apk_installer_data",
+    ":devil_chromium_py",
+    ":logdog_wrapper_py",
+    ":stack_tools",
+  ]
+
+  # Other //build users let devil library fetch these from Google Storage.
+  if (build_with_chromium) {
+    data_deps += [ "//tools/android/forwarder2" ]
+    data += [ "//tools/android/avd/proto/" ]
+    if (is_asan) {
+      data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+    }
+  }
+
+  # Proguard is needed only when using apks (rather than native executables).
+  if (enable_java_templates) {
+    deps = [ "//build/android/stacktrace:java_deobfuscate" ]
+  }
+}
+
+python_library("logdog_wrapper_py") {
+  pydeps_file = "test_wrapper/logdog_wrapper.pydeps"
+}
+
+python_library("resource_sizes_py") {
+  pydeps_file = "resource_sizes.pydeps"
+  data_deps = [
+    ":devil_chromium_py",
+    "//third_party/catapult/tracing:convert_chart_json",
+  ]
+  data = [
+    build_vars_file,
+    android_readelf,
+  ]
+}
+
+# Tools necessary for symbolizing tombstones or stack traces that are output to
+# logcat.
+# Hidden behind build_with_chromium because some third party repos that use
+# //build don't pull in //third_party/android_platform.
+# TODO(crbug.com/1120190): Move stack script into //build/third_party
+#     and enable unconditionally.
+group("stack_tools") {
+  if (build_with_chromium) {
+    data = [
+      "tombstones.py",
+      "pylib/symbols/",
+      "stacktrace/",
+    ]
+
+    data_deps =
+        [ "//third_party/android_platform/development/scripts:stack_py" ]
+  }
+}
+
+# GN evaluates each .gn file once per toolchain, so restricting to default
+# toolchain will ensure write_file() is called only once.
+assert(current_toolchain == default_toolchain)
+
+# NOTE: If other platforms would benefit from exporting variables, we should
+# move this to a more top-level place.
+# It is currently here (instead of //BUILD.gn) to ensure that the file is
+# written even for non-chromium embedders of //build.
+_build_vars_json = {
+  # Underscore prefix so that it appears at the top.
+  _HEADER = "Generated during 'gn gen' by //build/android/BUILD.gn."
+  forward_variables_from(android_build_vars_json, "*")
+}
+
+write_file(build_vars_file, _build_vars_json, "json")
diff --git a/src/build/android/CheckInstallApk-debug.apk b/src/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc3191
--- /dev/null
+++ b/src/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/src/build/android/DIR_METADATA b/src/build/android/DIR_METADATA
new file mode 100644
index 0000000..7a2580a
--- /dev/null
+++ b/src/build/android/DIR_METADATA
@@ -0,0 +1 @@
+os: ANDROID
diff --git a/src/build/android/adb_chrome_public_command_line b/src/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000..86ece8c
--- /dev/null
+++ b/src/build/android/adb_chrome_public_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+#   adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+#   adb_chrome_public_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@"
diff --git a/src/build/android/adb_command_line.py b/src/build/android/adb_command_line.py
new file mode 100755
index 0000000..c3ec8d4
--- /dev/null
+++ b/src/build/android/adb_command_line.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for reading / writing command-line flag files on device(s)."""
+
+from __future__ import print_function
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.tools import script_common
+from devil.utils import cmd_helper
+from devil.utils import logging_common
+
+
+def CheckBuildTypeSupportsFlags(device, command_line_flags_file):
+  is_webview = command_line_flags_file == 'webview-command-line'
+  if device.IsUserBuild() and is_webview:
+    raise device_errors.CommandFailedError(
+        'WebView only respects flags on a userdebug or eng device, yours '
+        'is a user build.', device)
+  elif device.IsUserBuild():
+    logging.warning(
+        'Your device (%s) is a user build; Chrome may or may not pick up '
+        'your commandline flags. Check your '
+        '"command_line_on_non_rooted_enabled" preference, or switch '
+        'devices.', device)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...]
+
+No flags: Prints existing command-line file.
+Empty string: Deletes command-line file.
+Otherwise: Writes command-line file.
+
+'''
+  parser.add_argument('--name', required=True,
+                      help='Name of file where to store flags on the device.')
+  parser.add_argument('-e', '--executable', dest='executable', default='chrome',
+                      help='(deprecated) No longer used.')
+  script_common.AddEnvironmentArguments(parser)
+  script_common.AddDeviceArguments(parser)
+  logging_common.AddLoggingArguments(parser)
+
+  args, remote_args = parser.parse_known_args()
+  devil_chromium.Initialize(adb_path=args.adb_path)
+  logging_common.InitializeLogging(args)
+
+  devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices,
+                                                    default_retries=0)
+  all_devices = device_utils.DeviceUtils.parallel(devices)
+
+  if not remote_args:
+    # No args == do not update, just print flags.
+    remote_args = None
+    action = ''
+  elif len(remote_args) == 1 and not remote_args[0]:
+    # Single empty string arg == delete flags
+    remote_args = []
+    action = 'Deleted command line file. '
+  else:
+    action = 'Wrote command line file. '
+
+  def update_flags(device):
+    CheckBuildTypeSupportsFlags(device, args.name)
+    changer = flag_changer.FlagChanger(device, args.name)
+    if remote_args is not None:
+      flags = changer.ReplaceFlags(remote_args)
+    else:
+      flags = changer.GetCurrentFlags()
+    return (device, device.build_description, flags)
+
+  updated_values = all_devices.pMap(update_flags).pGet(None)
+
+  print('%sCurrent flags (in %s):' % (action, args.name))
+  for d, desc, flags in updated_values:
+    if flags:
+      # Shell-quote flags for easy copy/paste as new args on the terminal.
+      quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags))
+    else:
+      quoted_flags = '( empty )'
+    print('  %s (%s): %s' % (d, desc, quoted_flags))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_gdb b/src/build/android/adb_gdb
new file mode 100755
index 0000000..6de4273
--- /dev/null
+++ b/src/build/android/adb_gdb
@@ -0,0 +1,1000 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Force locale to C to allow recognizing output from subprocesses.
+LC_ALL=C
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+COMMAND_SUFFIX=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+      rm -f "$GDBSERVER_PIDFILE"
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \
+          "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER"
+IDE_DIR="$DEFAULT_PULL_LIBS_DIR"
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ADB=
+ANNOTATE=
+CGDB=
+GDBINIT=
+GDBSERVER=
+HELP=
+IDE=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+ATTACH_DELAY=1
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --device=*)
+      export ANDROID_SERIAL=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ide)
+      IDE=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --port=*)
+      PORT=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --attach-delay=*)
+      ATTACH_DELAY=$optarg
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --output-directory=*)
+      CHROMIUM_OUTPUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --cgdb)
+      CGDB=cgdb
+      ;;
+    --cgdb=*)
+      CGDB=$optarg
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    -*)
+      panic "Unknown option $opt, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/lib/                (used by GYP builds)
+  \$CHROMIUM_SRC/<out>/lib.unstripped/     (used by GN builds)
+
+Where <out> is determined by CHROMIUM_OUTPUT_DIR, or --output-directory.
+
+You can set the path manually via --symbol-dir.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and a device is not specified with either --device or ANDROID_SERIAL).
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --cgdb[=<file>]       Use cgdb (an interface for gdb that shows the code).
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --output-directory=<path> Specify the output directory (e.g. "out/Debug").
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --attach-delay=<num>  Seconds to wait for gdbserver to attach to the
+                        remote process before starting gdb. Default 1.
+                        <num> may be a float if your sleep(1) supports it.
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+  --device=<file>       ADB device serial to use (-s flag).
+  --port=<port>         Specify the tcp port to use.
+  --ide                 Forward gdb port, but do not enter gdb console.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then
+  if [[ -e "build.ninja" ]]; then
+    CHROMIUM_OUTPUT_DIR=$PWD
+  else
+    panic "Please specify an output directory by using one of:
+       --output-directory=out/Debug
+       CHROMIUM_OUTPUT_DIR=out/Debug
+       Setting working directory to an output directory.
+       See --help."
+   fi
+fi
+
+if ls *.so >/dev/null 2>&1; then
+  panic ".so files found in your working directory. These will conflict with" \
+      "library lookup logic. Change your working directory and try again."
+fi
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_OUTPUT_DIR.
+#
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  # GYP places unstripped libraries under out/lib
+  # GN places them under out/lib.unstripped
+  local PARENT_DIR="$CHROMIUM_OUTPUT_DIR"
+  if [[ ! -e "$PARENT_DIR" ]]; then
+    PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR"
+  fi
+  SYMBOL_DIR="$PARENT_DIR/lib.unstripped"
+  if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+    SYMBOL_DIR="$PARENT_DIR/lib"
+    if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+      panic "Could not find any symbols under \
+$PARENT_DIR/lib{.unstripped}. Please build the program first!"
+    fi
+  fi
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir
+elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+  panic "Could not find any symbols under $SYMBOL_DIR"
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# Find the target architecture from a local shared library.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  # ls prints a broken pipe error when there are a lot of libs.
+  local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1)
+  local SO_DESC=$(file $RANDOM_LIB)
+  case $ARCH in
+    *32-bit*ARM,*) echo "arm";;
+    *64-bit*ARM,*) echo "arm64";;
+    *32-bit*Intel,*) echo "x86";;
+    *x86-64,*) echo "x86_64";;
+    *32-bit*MIPS,*) echo "mips";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    if [ "$HOST_ARCH" = "unknown" ]; then
+      # In case where "-p" returns "unknown" just use "-m" (machine hardware
+      # name). According to this patch from Fedora "-p" is equivalent to "-m"
+      # anyway: https://goo.gl/Pd47x3
+      HOST_ARCH=$(uname -m)
+    fi
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86 and x86_64!
+  if [ "$1" = "x86" -o "$1" = "x86_64" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH LD CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  LD=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  LD=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-ld")
+  if [ -z "$LD" -a "$HOST_ARCH" = "x86_64" ]; then
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-ld")
+  fi
+  if [ ! -f "$LD" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    LD=$(get_ndk_toolchain_prebuilt \
+         "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-ld")
+  fi
+  if [ -z "$LD" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${LD%%ld}"
+}
+
+# $1: NDK install path
+get_ndk_host_gdb_client() {
+  local NDK_DIR="$1"
+  local HOST_OS HOST_ARCH
+
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT")
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Return the timestamp of a given file, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Allow several concurrent debugging sessions
+APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd)
+fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?"
+TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID"
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process32;;
+      arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;;
+      *) panic "Unknown app_process for architecture!";;
+esac
+
+# Default to app_process if bit-width specific process isn't found.
+adb_shell ls /system/bin/$GDBEXEC > /dev/null
+if [ $? != 0 ]; then
+    GDBEXEC=app_process
+fi
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+if [[ -n "$ANDROID_SERIAL" ]]; then
+  DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT"
+fi
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint"
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then
+  log "Auto-config: --pull-libs  (no cached libraries)"
+  PULL_LIBS=true
+else
+  HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint")
+  log "Host build fingerprint:   $HOST_FINGERPRINT"
+  if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+    log "Auto-config: --no-pull-libs (fingerprint match)"
+    NO_PULL_LIBS=true
+  else
+    log "Auto-config: --pull-libs  (fingerprint mismatch)"
+    PULL_LIBS=true
+  fi
+fi
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    panic "Can't find application process PID."
+  fi
+  log "Found process PID: $PID"
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX \"echo foo\""
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX \""
+  COMMAND_SUFFIX="\""
+else
+  SHELL_UID=$("$ADB" shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+    COMMAND_SUFFIX=
+  else
+    COMMAND_PREFIX=
+    COMMAND_SUFFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  # Remove the fingerprint file in case pulling one of the libs fails.
+  rm -f "$PULL_LIBS_DIR/build.fingerprint"
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/(system|apex|vendor)\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Writing the device fingerprint"
+  echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint"
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+SOLIB_DIRS=${SOLIB_DIRS%:}  # Strip trailing :
+
+# Applications with minSdkVersion >= 24 will have their data directories
+# created with rwx------ permissions, preventing adbd from forwarding to
+# the gdbserver socket.
+adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \
+    adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \
+    adb_shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+  # Random port to allow multiple concurrent sessions.
+  PORT=$(( $RANDOM % 1000 + 5039 ))
+fi
+HOST_PORT=$PORT
+TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)"
+"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+
+for i in 1 2; do
+  log "Starting gdbserver in the background:"
+  GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+  log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX"
+  "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+    --once +$TARGET_DOMAIN_SOCKET \
+    --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 &
+  GDBSERVER_PID=$!
+  echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+  log "background job pid: $GDBSERVER_PID"
+
+  # Sleep to allow gdbserver to attach to the remote process and be
+  # ready to connect to.
+  log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive"
+  sleep "$ATTACH_DELAY"
+  log "Job control: $(jobs -l)"
+  STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+  if [ "$STATE" != "Running" ]; then
+    pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null)
+    if [[ -n "$pid_msg" ]]; then
+      old_pid=${pid_msg##* }
+      old_pid=${old_pid//[$'\r\n']}  # Trim trailing \r.
+      echo "Killing previous gdb server process (pid=$old_pid)"
+      adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX
+      continue
+    fi
+    echo "ERROR: GDBServer either failed to run or attach to PID $PID!"
+    echo "Here is the output from gdbserver (also try --verbose for more):"
+    echo "===== gdbserver.log start ====="
+    cat $GDBSERVER_LOG
+    echo ="===== gdbserver.log end ======"
+    exit 1
+  fi
+  break
+done
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+cat > "$COMMANDS" <<EOF
+set osabi GNU/Linux  # Copied from ndk-gdb.py.
+set print pretty 1
+python
+import sys
+sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')
+try:
+  import gdb_chrome
+finally:
+  sys.path.pop(0)
+end
+file $TMPDIR/$GDBEXEC
+directory $CHROMIUM_OUTPUT_DIR
+set solib-absolute-prefix $PULL_LIBS_DIR
+set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR
+
+python
+# Copied from ndk-gdb.py:
+def target_remote_with_retry(target, timeout_seconds):
+  import time
+  end_time = time.time() + timeout_seconds
+  while True:
+    try:
+      gdb.execute('target remote ' + target)
+      return True
+    except gdb.error as e:
+      time_left = end_time - time.time()
+      if time_left < 0 or time_left > timeout_seconds:
+        print("Error: unable to connect to device.")
+        print(e)
+        return False
+      time.sleep(min(0.25, time_left))
+
+print("Connecting to :$HOST_PORT...")
+if target_remote_with_retry(':$HOST_PORT', 5):
+  print("Attached! Reading symbols (takes ~30 seconds).")
+end
+EOF
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> "$COMMANDS"
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat "$COMMANDS"
+  echo "### END $COMMANDS"
+fi
+
+if [ "$IDE" ]; then
+  mkdir -p "$IDE_DIR"
+  SYM_GDB="$IDE_DIR/gdb"
+  SYM_EXE="$IDE_DIR/app_process"
+  SYM_INIT="$IDE_DIR/gdbinit"
+  ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE"
+  ln -sf "$COMMANDS" "$SYM_INIT"
+  # gdb doesn't work when symlinked, so create a wrapper.
+  echo
+  cat > $SYM_GDB <<EOF
+#!/bin/sh
+exec $GDB "\$@"
+EOF
+  chmod u+x $SYM_GDB
+
+  echo "GDB server listening on: localhost:$PORT"
+  echo "GDB wrapper script: $SYM_GDB"
+  echo "App executable: $SYM_EXE"
+  echo "gdbinit: $SYM_INIT"
+  echo "Connect with vscode: https://chromium.googlesource.com/chromium/src/+/master/docs/vscode.md#Launch-Commands"
+  echo "Showing gdbserver logs. Press Ctrl-C to disconnect."
+  tail -f "$GDBSERVER_LOG"
+else
+  log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+  echo "Server log: $GDBSERVER_LOG"
+  if [ "$CGDB" ]; then
+    $CGDB -d $GDB -- $GDB_ARGS -x "$COMMANDS"
+  else
+    $GDB $GDB_ARGS -x "$COMMANDS"
+  fi
+fi
diff --git a/src/build/android/adb_install_apk.py b/src/build/android/adb_install_apk.py
new file mode 100755
index 0000000..6ec98e2
--- /dev/null
+++ b/src/build/android/adb_install_apk.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  apk_group = parser.add_mutually_exclusive_group(required=True)
+  apk_group.add_argument('--apk', dest='apk_name',
+                         help='DEPRECATED The name of the apk containing the'
+                              ' application (with the .apk extension).')
+  apk_group.add_argument('apk_path', nargs='?',
+                         help='The path to the APK to install.')
+
+  # TODO(jbudorick): Remove once no clients pass --apk_package
+  parser.add_argument('--apk_package', help='DEPRECATED unused')
+  parser.add_argument('--split',
+                      action='append',
+                      dest='splits',
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument('--keep_data',
+                      action='store_true',
+                      default=False,
+                      help='Keep the package data when installing '
+                           'the application.')
+  parser.add_argument('--debug', action='store_const', const='Debug',
+                      dest='build_type',
+                      default=os.environ.get('BUILDTYPE', 'Debug'),
+                      help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  parser.add_argument('--release', action='store_const', const='Release',
+                      dest='build_type',
+                      help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  parser.add_argument('-d', '--device', dest='devices', action='append',
+                      default=[],
+                      help='Target device for apk to install on. Enter multiple'
+                           ' times for multiple devices.')
+  parser.add_argument('--adb-path', type=os.path.abspath,
+                      help='Absolute path to the adb binary to use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Enable verbose logging.')
+  parser.add_argument('--downgrade', action='store_true',
+                      help='If set, allows downgrading of apk.')
+  parser.add_argument('--timeout', type=int,
+                      default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT,
+                      help='Seconds to wait for APK installation. '
+                           '(default: %(default)s)')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose)
+  constants.SetBuildType(args.build_type)
+
+  devil_chromium.Initialize(
+      output_directory=constants.GetOutDirectory(),
+      adb_path=args.adb_path)
+
+  apk = args.apk_path or args.apk_name
+  if not apk.endswith('.apk'):
+    apk += '.apk'
+  if not os.path.exists(apk):
+    apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+    if not os.path.exists(apk):
+      parser.error('%s not found.' % apk)
+
+  if args.splits:
+    splits = []
+    base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+    for split_glob in args.splits:
+      apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+      if not apks:
+        logging.warning('No apks matched for %s.', split_glob)
+      for f in apks:
+        helper = apk_helper.ApkHelper(f)
+        if (helper.GetPackageName() == base_apk_package
+            and helper.GetSplitName()):
+          splits.append(f)
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  devices = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                    device_arg=args.devices)
+
+  def denylisting_install(device):
+    try:
+      if args.splits:
+        device.InstallSplitApk(apk, splits, reinstall=args.keep_data,
+                               allow_downgrade=args.downgrade)
+      else:
+        device.Install(apk, reinstall=args.keep_data,
+                       allow_downgrade=args.downgrade,
+                       timeout=args.timeout)
+    except (device_errors.CommandFailedError,
+            device_errors.DeviceUnreachableError):
+      logging.exception('Failed to install %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_failure')
+        logging.warning('Denylisting %s', str(device))
+    except device_errors.CommandTimeoutError:
+      logging.exception('Timed out while installing %s', apk)
+      if denylist:
+        denylist.Extend([str(device)], reason='install_timeout')
+        logging.warning('Denylisting %s', str(device))
+
+  device_utils.DeviceUtils.parallel(devices).pMap(denylisting_install)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/adb_logcat_monitor.py b/src/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..a919722
--- /dev/null
+++ b/src/build/android/adb_logcat_monitor.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+from __future__ import print_function
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print('adb_logcat_monitor: %s already exists? Cleaning' % base_dir)
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except: # pylint: disable=bare-except
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print('adb_logcat_monitor: Initializing')
+    sys.exit(main(*sys.argv[1:3]))
+
+  print('Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0])
diff --git a/src/build/android/adb_logcat_printer.py b/src/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..a715170
--- /dev/null
+++ b/src/build/android/adb_logcat_printer.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  if options.output_path:
+    if not os.path.exists(os.path.dirname(options.output_path)):
+      logger.warning('Output dir %s doesn\'t exist. Creating it.',
+                      os.path.dirname(options.output_path))
+      os.makedirs(os.path.dirname(options.output_path))
+    output_file = open(options.output_path, 'w')
+    logger.info('Dumping logcat to local file %s. If running in a build, '
+                'this file will likely will be uploaded to google storage '
+                'in a later step. It can be downloaded from there.',
+                options.output_path)
+  else:
+    output_file = sys.stdout
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_profile_chrome b/src/build/android/adb_profile_chrome
new file mode 100755
index 0000000..d3244ff
--- /dev/null
+++ b/src/build/android/adb_profile_chrome
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@"
diff --git a/src/build/android/adb_profile_chrome_startup b/src/build/android/adb_profile_chrome_startup
new file mode 100755
index 0000000..d5836cd
--- /dev/null
+++ b/src/build/android/adb_profile_chrome_startup
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling for chrome startup.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@"
diff --git a/src/build/android/adb_reverse_forwarder.py b/src/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..90d3139
--- /dev/null
+++ b/src/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import argparse
+import sys
+import time
+
+import devil_chromium
+
+from devil.android import device_denylist
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.utils import run_tests_helper
+
+from pylib import constants
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+      usage='Usage: %(prog)s [options] device_port '
+            'host_port [device_port_2 host_port_2] ...',
+      description=__doc__)
+  parser.add_argument(
+      '-v', '--verbose',
+      dest='verbose_count',
+      default=0,
+      action='count',
+      help='Verbose level (multiple times for more)')
+  parser.add_argument(
+      '--device',
+      help='Serial number of device we should use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument(
+      '--debug',
+      action='store_const',
+      const='Debug',
+      dest='build_type',
+      default='Release',
+      help='DEPRECATED: use --output-directory instead.')
+  parser.add_argument(
+      '--output-directory',
+      help='Path to the root build directory.')
+  parser.add_argument(
+      'ports',
+      nargs='+',
+      type=int,
+      help='Port pair to reverse forward.')
+
+  args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if len(args.ports) < 2 or len(args.ports) % 2:
+    parser.error('Need even number of port pairs')
+
+  port_pairs = zip(args.ports[::2], args.ports[1::2])
+
+  if args.build_type:
+    constants.SetBuildType(args.build_type)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  device = device_utils.DeviceUtils.HealthyDevices(denylist=denylist,
+                                                   device_arg=args.device)[0]
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/adb_system_webview_command_line b/src/build/android/adb_system_webview_command_line
new file mode 100755
index 0000000..a0d2705
--- /dev/null
+++ b/src/build/android/adb_system_webview_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_system_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_system_webview_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@"
diff --git a/src/build/android/android_only_explicit_jni_exports.lst b/src/build/android/android_only_explicit_jni_exports.lst
new file mode 100644
index 0000000..f989691
--- /dev/null
+++ b/src/build/android/android_only_explicit_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only JNI_OnLoad.
+# Should be used for libraries that do explicit JNI registration.
+
+{
+  global:
+    JNI_OnLoad;
+  local:
+    *;
+};
diff --git a/src/build/android/android_only_jni_exports.lst b/src/build/android/android_only_jni_exports.lst
new file mode 100644
index 0000000..1336fee
--- /dev/null
+++ b/src/build/android/android_only_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only symbols required for JNI to work.
+
+{
+  global:
+    JNI_OnLoad;
+    Java_*;
+  local:
+    *;
+};
diff --git a/src/build/android/apk_operations.py b/src/build/android/apk_operations.py
new file mode 100755
index 0000000..d6cd583
--- /dev/null
+++ b/src/build/android/apk_operations.py
@@ -0,0 +1,1970 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Using colorama.Fore/Back/Style members
+# pylint: disable=no-member
+
+from __future__ import print_function
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import posixpath
+import random
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+
+import adb_command_line
+import devil_chromium
+from devil import devil_env
+from devil.android import apk_helper
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.sdk import adb_wrapper
+from devil.android.sdk import build_tools
+from devil.android.sdk import intent
+from devil.android.sdk import version_codes
+from devil.utils import run_tests_helper
+
+_DIR_SOURCE_ROOT = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '..', '..'))
+_JAVA_HOME = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')):
+  import colorama
+
+from incremental_install import installer
+from pylib import constants
+from pylib.symbols import deobfuscator
+from pylib.utils import simpleperf
+from pylib.utils import app_bundle_utils
+
+with devil_env.SysPath(
+    os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')):
+  import bundletool
+
+BASE_MODULE = 'base'
+
+
+def _Colorize(text, style=''):
+  return (style
+      + text
+      + colorama.Style.RESET_ALL)
+
+
+def _InstallApk(devices, apk, install_dict):
+  def install(device):
+    if install_dict:
+      installer.Install(device, install_dict, apk=apk, permissions=[])
+    else:
+      device.Install(apk, permissions=[], allow_downgrade=True, reinstall=True)
+
+  logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
+  device_utils.DeviceUtils.parallel(devices).pMap(install)
+
+
+# A named tuple containing the information needed to convert a bundle into
+# an installable .apks archive.
+# Fields:
+#   bundle_path: Path to input bundle file.
+#   bundle_apk_path: Path to output bundle .apks archive file.
+#   aapt2_path: Path to aapt2 tool.
+#   keystore_path: Path to keystore file.
+#   keystore_password: Password for the keystore file.
+#   keystore_alias: Signing key name alias within the keystore file.
+#   system_image_locales: List of Chromium locales to include in system .apks.
+BundleGenerationInfo = collections.namedtuple(
+    'BundleGenerationInfo',
+    'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,'
+    'keystore_alias,system_image_locales')
+
+
+def _GenerateBundleApks(info,
+                        output_path=None,
+                        minimal=False,
+                        minimal_sdk_version=None,
+                        mode=None,
+                        optimize_for=None):
+  """Generate an .apks archive from a bundle on demand.
+
+  Args:
+    info: A BundleGenerationInfo instance.
+    output_path: Path of output .apks archive.
+    minimal: Create the minimal set of apks possible (english-only).
+    minimal_sdk_version: When minimal=True, use this sdkVersion.
+    mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+    optimize_for: Override split config, either None, or one of
+      app_bundle_utils.OPTIMIZE_FOR_OPTIONS.
+  """
+  logging.info('Generating .apks file')
+  app_bundle_utils.GenerateBundleApks(
+      info.bundle_path,
+      # Store .apks file beside the .aab file by default so that it gets cached.
+      output_path or info.bundle_apks_path,
+      info.aapt2_path,
+      info.keystore_path,
+      info.keystore_password,
+      info.keystore_alias,
+      system_image_locales=info.system_image_locales,
+      mode=mode,
+      minimal=minimal,
+      minimal_sdk_version=minimal_sdk_version,
+      optimize_for=optimize_for)
+
+
+def _InstallBundle(devices, apk_helper_instance, package_name,
+                   command_line_flags_file, modules, fake_modules):
+  # Path Chrome creates after validating fake modules. This needs to be cleared
+  # for pushed fake modules to be picked up.
+  SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
+  # Chrome command line flag needed for fake modules to work.
+  FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
+
+  def ShouldWarnFakeFeatureModuleInstallFlag(device):
+    if command_line_flags_file:
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
+    return False
+
+  def ClearFakeModules(device):
+    if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
+      device.RemovePath(
+          SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
+      logging.info('Removed %s', SPLITCOMPAT_PATH)
+    else:
+      logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+
+  def Install(device):
+    ClearFakeModules(device)
+    if fake_modules and ShouldWarnFakeFeatureModuleInstallFlag(device):
+      # Print warning if command line is not set up for fake modules.
+      msg = ('Command line has no %s: Fake modules will be ignored.' %
+             FAKE_FEATURE_MODULE_INSTALL)
+      print(_Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT))
+
+    device.Install(
+        apk_helper_instance,
+        permissions=[],
+        modules=modules,
+        fake_modules=fake_modules,
+        allow_downgrade=True)
+
+  # Basic checks for |modules| and |fake_modules|.
+  # * |fake_modules| cannot include 'base'.
+  # * If |fake_modules| is given, ensure |modules| includes 'base'.
+  # * They must be disjoint (checked by device.Install).
+  modules_set = set(modules) if modules else set()
+  fake_modules_set = set(fake_modules) if fake_modules else set()
+  if BASE_MODULE in fake_modules_set:
+    raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE))
+  if fake_modules_set and BASE_MODULE not in modules_set:
+    raise Exception(
+        '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
+
+  logging.info('Installing bundle.')
+  device_utils.DeviceUtils.parallel(devices).pMap(Install)
+
+
+def _UninstallApk(devices, install_dict, package_name):
+  def uninstall(device):
+    if install_dict:
+      installer.Uninstall(device, package_name)
+    else:
+      device.Uninstall(package_name)
+  device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
+
+
+def _IsWebViewProvider(apk_helper_instance):
+  meta_data = apk_helper_instance.GetAllMetadata()
+  meta_data_keys = [pair[0] for pair in meta_data]
+  return 'com.android.webview.WebViewLibrary' in meta_data_keys
+
+
+def _SetWebViewProvider(devices, package_name):
+
+  def switch_provider(device):
+    if device.build_version_sdk < version_codes.NOUGAT:
+      logging.error('No need to switch provider on pre-Nougat devices (%s)',
+                    device.serial)
+    else:
+      device.SetWebViewImplementation(package_name)
+
+  device_utils.DeviceUtils.parallel(devices).pMap(switch_provider)
+
+
+def _NormalizeProcessName(debug_process_name, package_name):
+  if not debug_process_name:
+    debug_process_name = package_name
+  elif debug_process_name.startswith(':'):
+    debug_process_name = package_name + debug_process_name
+  elif '.' not in debug_process_name:
+    debug_process_name = package_name + ':' + debug_process_name
+  return debug_process_name
+
+
+def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
+               url=None, apk=None, wait_for_java_debugger=False,
+               debug_process_name=None, nokill=None):
+  if argv and command_line_flags_file is None:
+    raise Exception('This apk does not support any flags.')
+  if url:
+    # TODO(agrieve): Launch could be changed to require only package name by
+    #     parsing "dumpsys package" rather than relying on the apk.
+    if not apk:
+      raise Exception('Launching with URL is not supported when using '
+                      '--package-name. Use --apk-path instead.')
+    view_activity = apk.GetViewActivityName()
+    if not view_activity:
+      raise Exception('APK does not support launching with URLs.')
+
+  debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+
+  def launch(device):
+    # --persistent is required to have Settings.Global.DEBUG_APP be set, which
+    # we currently use to allow reading of flags. https://crbug.com/784947
+    if not nokill:
+      cmd = ['am', 'set-debug-app', '--persistent', debug_process_name]
+      if wait_for_java_debugger:
+        cmd[-1:-1] = ['-w']
+      # Ignore error since it will fail if apk is not debuggable.
+      device.RunShellCommand(cmd, check_return=False)
+
+      # The flags are first updated with input args.
+      if command_line_flags_file:
+        changer = flag_changer.FlagChanger(device, command_line_flags_file)
+        flags = []
+        if argv:
+          adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                       command_line_flags_file)
+          flags = shlex.split(argv)
+        try:
+          changer.ReplaceFlags(flags)
+        except device_errors.AdbShellCommandFailedError:
+          logging.exception('Failed to set flags')
+
+    if url is None:
+      # Simulate app icon click if no url is present.
+      cmd = [
+          'am', 'start', '-p', package_name, '-c',
+          'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
+      ]
+      device.RunShellCommand(cmd, check_return=True)
+    else:
+      launch_intent = intent.Intent(action='android.intent.action.VIEW',
+                                    activity=view_activity, data=url,
+                                    package=package_name)
+      device.StartActivity(launch_intent)
+  device_utils.DeviceUtils.parallel(devices).pMap(launch)
+  if wait_for_java_debugger:
+    print('Waiting for debugger to attach to process: ' +
+          _Colorize(debug_process_name, colorama.Fore.YELLOW))
+
+
+def _ChangeFlags(devices, argv, command_line_flags_file):
+  if argv is None:
+    _DisplayArgs(devices, command_line_flags_file)
+  else:
+    flags = shlex.split(argv)
+    def update(device):
+      adb_command_line.CheckBuildTypeSupportsFlags(device,
+                                                   command_line_flags_file)
+      changer = flag_changer.FlagChanger(device, command_line_flags_file)
+      changer.ReplaceFlags(flags)
+    device_utils.DeviceUtils.parallel(devices).pMap(update)
+
+
+def _TargetCpuToTargetArch(target_cpu):
+  if target_cpu == 'x64':
+    return 'x86_64'
+  if target_cpu == 'mipsel':
+    return 'mips'
+  return target_cpu
+
+
+def _RunGdb(device, package_name, debug_process_name, pid, output_directory,
+            target_cpu, port, ide, verbose):
+  if not pid:
+    debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+    pid = device.GetApplicationPids(debug_process_name, at_most_one=True)
+  if not pid:
+    # Attaching gdb makes the app run so slow that it takes *minutes* to start
+    # up (as of 2018). Better to just fail than to start & attach.
+    raise Exception('App not running.')
+
+  gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
+  cmd = [
+      gdb_script_path,
+      '--package-name=%s' % package_name,
+      '--output-directory=%s' % output_directory,
+      '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
+      '--device=%s' % device.serial,
+      '--pid=%s' % pid,
+      '--port=%d' % port,
+  ]
+  if ide:
+    cmd.append('--ide')
+  # Enable verbose output of adb_gdb if it's set for this script.
+  if verbose:
+    cmd.append('--verbose')
+  if target_cpu:
+    cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
+  logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
+  print(_Colorize('All subsequent output is from adb_gdb script.',
+                  colorama.Fore.YELLOW))
+  os.execv(gdb_script_path, cmd)
+
+
+def _PrintPerDeviceOutput(devices, results, single_line=False):
+  for d, result in zip(devices, results):
+    if not single_line and d is not devices[0]:
+      sys.stdout.write('\n')
+    sys.stdout.write(
+          _Colorize('{} ({}):'.format(d, d.build_description),
+                    colorama.Fore.YELLOW))
+    sys.stdout.write(' ' if single_line else '\n')
+    yield result
+
+
+def _RunMemUsage(devices, package_name, query_app=False):
+  cmd_args = ['dumpsys', 'meminfo']
+  if not query_app:
+    cmd_args.append('--local')
+
+  def mem_usage_helper(d):
+    ret = []
+    for process in sorted(_GetPackageProcesses(d, package_name)):
+      meminfo = d.RunShellCommand(cmd_args + [str(process.pid)])
+      ret.append((process.name, '\n'.join(meminfo)))
+    return ret
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('No processes found.')
+    else:
+      for name, usage in sorted(result):
+        print(_Colorize('==== Output of "dumpsys meminfo %s" ====' % name,
+                        colorama.Fore.GREEN))
+        print(usage)
+
+
+def _DuHelper(device, path_spec, run_as=None):
+  """Runs "du -s -k |path_spec|" on |device| and returns parsed result.
+
+  Args:
+    device: A DeviceUtils instance.
+    path_spec: The list of paths to run du on. May contain shell expansions
+        (will not be escaped).
+    run_as: Package name to run as, or None to run as shell user. If not None
+        and app is not android:debuggable (run-as fails), then command will be
+        run as root.
+
+  Returns:
+    A dict of path->size in KiB containing all paths in |path_spec| that exist
+    on device. Paths that do not exist are silently ignored.
+  """
+  # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
+  # 144     /data/data/org.chromium.chrome/cache
+  # 8       /data/data/org.chromium.chrome/files
+  # <snip>
+  # du: .*: No such file or directory
+
+  # The -d flag works differently across android version, so use -s instead.
+  # Without the explicit 2>&1, stderr and stdout get combined at random :(.
+  cmd_str = 'du -s -k ' + path_spec + ' 2>&1'
+  lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
+                                 check_return=False)
+  output = '\n'.join(lines)
+  # run-as: Package 'com.android.chrome' is not debuggable
+  if output.startswith('run-as:'):
+    # check_return=False needed for when some paths in path_spec do not exist.
+    lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
+                                   check_return=False)
+  ret = {}
+  try:
+    for line in lines:
+      # du: .*: No such file or directory
+      if line.startswith('du:'):
+        continue
+      size, subpath = line.split(None, 1)
+      ret[subpath] = int(size)
+    return ret
+  except ValueError:
+    logging.error('du command was: %s', cmd_str)
+    logging.error('Failed to parse du output:\n%s', output)
+    raise
+
+
+def _RunDiskUsage(devices, package_name):
+  # Measuring dex size is a bit complicated:
+  # https://source.android.com/devices/tech/dalvik/jit-compiler
+  #
+  # For KitKat and below:
+  #   dumpsys package contains:
+  #     dataDir=/data/data/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-1.apk
+  #     resourcePath=/data/app/org.chromium.chrome-1.apk
+  #     nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
+  #   To measure odex:
+  #     ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
+  #
+  # For Android L and M (and maybe for N+ system apps):
+  #   dumpsys package contains:
+  #     codePath=/data/app/org.chromium.chrome-1
+  #     resourcePath=/data/app/org.chromium.chrome-1
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
+  #   To measure odex:
+  #     # Option 1:
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
+  #  /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
+  #     ls -l /data/dalvik-cache/profiles/org.chromium.chrome
+  #         (these profiles all appear to be 0 bytes)
+  #     # Option 2:
+  #     ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
+  #
+  # For Android N+:
+  #   dumpsys package contains:
+  #     dataDir=/data/user/0/org.chromium.chrome
+  #     codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+  #     legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
+  #     Instruction Set: arm
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
+  #       ilter=quicken]
+  #     Instruction Set: arm64
+  #       path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+  #       status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
+  #       uicken]
+  #   To measure odex:
+  #     ls -l /data/app/.../oat/arm/base.odex
+  #     ls -l /data/app/.../oat/arm/base.vdex (optional)
+  #   To measure the correct odex size:
+  #     cmd package compile -m speed org.chromium.chrome  # For webview
+  #     cmd package compile -m speed-profile org.chromium.chrome  # For others
+  def disk_usage_helper(d):
+    package_output = '\n'.join(d.RunShellCommand(
+        ['dumpsys', 'package', package_name], check_return=True))
+    # Does not return error when apk is not installed.
+    if not package_output or 'Unable to find package:' in package_output:
+      return None
+
+    # Ignore system apks that have updates installed.
+    package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+                            package_output, flags=re.S | re.M)
+
+    try:
+      data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
+      code_path = re.search(r'codePath=(.*)', package_output).group(1)
+      lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
+                           package_output).group(1)
+    except AttributeError:
+      raise Exception('Error parsing dumpsys output: ' + package_output)
+
+    if code_path.startswith('/system'):
+      logging.warning('Measurement of system image apks can be innacurate')
+
+    compilation_filters = set()
+    # Match "compilation_filter=value", where a line break can occur at any spot
+    # (refer to examples above).
+    awful_wrapping = r'\s*'.join('compilation_filter=')
+    for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
+      compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
+    # Starting Android Q, output looks like:
+    #  arm: [status=speed-profile] [reason=install]
+    for m in re.finditer(r'\[status=(.+?)\]', package_output):
+      compilation_filters.add(m.group(1))
+    compilation_filter = ','.join(sorted(compilation_filters))
+
+    data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
+    # Measure code_cache separately since it can be large.
+    code_cache_sizes = {}
+    code_cache_dir = next(
+        (k for k in data_dir_sizes if k.endswith('/code_cache')), None)
+    if code_cache_dir:
+      data_dir_sizes.pop(code_cache_dir)
+      code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
+                                   run_as=package_name)
+
+    apk_path_spec = code_path
+    if not apk_path_spec.endswith('.apk'):
+      apk_path_spec += '/*.apk'
+    apk_sizes = _DuHelper(d, apk_path_spec)
+    if lib_path.endswith('/lib'):
+      # Shows architecture subdirectory.
+      lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
+    else:
+      lib_sizes = _DuHelper(d, lib_path)
+
+    # Look at all possible locations for odex files.
+    odex_paths = []
+    for apk_path in apk_sizes:
+      mangled_apk_path = apk_path[1:].replace('/', '@')
+      apk_basename = posixpath.basename(apk_path)[:-4]
+      for ext in ('dex', 'odex', 'vdex', 'art'):
+        # Easier to check all architectures than to determine active ones.
+        for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
+          odex_paths.append(
+              '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
+          # No app could possibly have more than 6 dex files.
+          for suffix in ('', '2', '3', '4', '5'):
+            odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
+                arch, mangled_apk_path, suffix, ext))
+            # This path does not have |arch|, so don't repeat it for every arch.
+            if arch == 'arm':
+              odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
+                  mangled_apk_path, suffix))
+
+    odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
+
+    return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+            compilation_filter)
+
+  def print_sizes(desc, sizes):
+    print('%s: %d KiB' % (desc, sum(sizes.itervalues())))
+    for path, size in sorted(sizes.iteritems()):
+      print('    %s: %s KiB' % (path, size))
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
+  for result in _PrintPerDeviceOutput(devices, all_results):
+    if not result:
+      print('APK is not installed.')
+      continue
+
+    (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+     compilation_filter) = result
+    total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
+
+    print_sizes('Apk', apk_sizes)
+    print_sizes('App Data (non-code cache)', data_dir_sizes)
+    print_sizes('App Data (code cache)', code_cache_sizes)
+    print_sizes('Native Libs', lib_sizes)
+    show_warning = compilation_filter and 'speed' not in compilation_filter
+    compilation_filter = compilation_filter or 'n/a'
+    print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
+    if show_warning:
+      logging.warning('For a more realistic odex size, run:')
+      logging.warning('    %s compile-dex [speed|speed-profile]', sys.argv[0])
+    print('Total: %s KiB (%.1f MiB)' % (total, total / 1024.0))
+
+
+class _LogcatProcessor(object):
+  ParsedLine = collections.namedtuple(
+      'ParsedLine',
+      ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message'])
+
+  class NativeStackSymbolizer(object):
+    """Buffers lines from native stacks and symbolizes them when done."""
+    # E.g.: #06 pc 0x0000d519 /apex/com.android.runtime/lib/libart.so
+    # E.g.: #01 pc 00180c8d  /data/data/.../lib/libbase.cr.so
+    _STACK_PATTERN = re.compile(r'\s*#\d+\s+(?:pc )?(0x)?[0-9a-f]{8,16}\s')
+
+    def __init__(self, stack_script_context, print_func):
+      # To symbolize native stacks, we need to pass all lines at once.
+      self._stack_script_context = stack_script_context
+      self._print_func = print_func
+      self._crash_lines_buffer = None
+
+    def _FlushLines(self):
+      """Prints queued lines after sending them through stack.py."""
+      crash_lines = self._crash_lines_buffer
+      self._crash_lines_buffer = None
+      with tempfile.NamedTemporaryFile() as f:
+        f.writelines(x[0].message + '\n' for x in crash_lines)
+        f.flush()
+        proc = self._stack_script_context.Popen(
+            input_file=f.name, stdout=subprocess.PIPE)
+        lines = proc.communicate()[0].splitlines()
+
+      for i, line in enumerate(lines):
+        parsed_line, dim = crash_lines[min(i, len(crash_lines) - 1)]
+        d = parsed_line._asdict()
+        d['message'] = line
+        parsed_line = _LogcatProcessor.ParsedLine(**d)
+        self._print_func(parsed_line, dim)
+
+    def AddLine(self, parsed_line, dim):
+      # Assume all lines from DEBUG are stacks.
+      # Also look for "stack-looking" lines to catch manual stack prints.
+      # It's important to not buffer non-stack lines because stack.py does not
+      # pass them through.
+      is_crash_line = parsed_line.tag == 'DEBUG' or (self._STACK_PATTERN.match(
+          parsed_line.message))
+
+      if is_crash_line:
+        if self._crash_lines_buffer is None:
+          self._crash_lines_buffer = []
+        self._crash_lines_buffer.append((parsed_line, dim))
+        return
+
+      if self._crash_lines_buffer is not None:
+        self._FlushLines()
+
+      self._print_func(parsed_line, dim)
+
+
+  # Logcat tags for messages that are generally relevant but are not from PIDs
+  # associated with the apk.
+  _ALLOWLISTED_TAGS = {
+      'ActivityManager',  # Shows activity lifecycle messages.
+      'ActivityTaskManager',  # More activity lifecycle messages.
+      'AndroidRuntime',  # Java crash dumps
+      'DEBUG',  # Native crash dump.
+  }
+
+  # Matches messages only on pre-L (Dalvik) that are spammy and unimportant.
+  _DALVIK_IGNORE_PATTERN = re.compile('|'.join([
+      r'^Added shared lib',
+      r'^Could not find ',
+      r'^DexOpt:',
+      r'^GC_',
+      r'^Late-enabling CheckJNI',
+      r'^Link of class',
+      r'^No JNI_OnLoad found in',
+      r'^Trying to load lib',
+      r'^Unable to resolve superclass',
+      r'^VFY:',
+      r'^WAIT_',
+  ]))
+
+  def __init__(self,
+               device,
+               package_name,
+               stack_script_context,
+               deobfuscate=None,
+               verbose=False):
+    self._device = device
+    self._package_name = package_name
+    self._verbose = verbose
+    self._deobfuscator = deobfuscate
+    self._native_stack_symbolizer = _LogcatProcessor.NativeStackSymbolizer(
+        stack_script_context, self._PrintParsedLine)
+    # Process ID for the app's main process (with no :name suffix).
+    self._primary_pid = None
+    # Set of all Process IDs that belong to the app.
+    self._my_pids = set()
+    # Set of all Process IDs that we've parsed at some point.
+    self._seen_pids = set()
+    # Start proc 22953:com.google.chromeremotedesktop/
+    self._pid_pattern = re.compile(r'Start proc (\d+):{}/'.format(package_name))
+    # START u0 {act=android.intent.action.MAIN \
+    # cat=[android.intent.category.LAUNCHER] \
+    # flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000
+    self._start_pattern = re.compile(r'START .*pkg=' + package_name)
+
+    self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random())
+    # Holds lines buffered on start-up, before we find our nonce message.
+    self._initial_buffered_lines = []
+    self._UpdateMyPids()
+    # Give preference to PID reported by "ps" over those found from
+    # _start_pattern. There can be multiple "Start proc" messages from prior
+    # runs of the app.
+    self._found_initial_pid = self._primary_pid != None
+    # Retrieve any additional patterns that are relevant for the User.
+    self._user_defined_highlight = None
+    user_regex = os.environ.get('CHROMIUM_LOGCAT_HIGHLIGHT')
+    if user_regex:
+      self._user_defined_highlight = re.compile(user_regex)
+      if not self._user_defined_highlight:
+        print(_Colorize(
+            'Rejecting invalid regular expression: {}'.format(user_regex),
+            colorama.Fore.RED + colorama.Style.BRIGHT))
+
+  def _UpdateMyPids(self):
+    # We intentionally do not clear self._my_pids to make sure that the
+    # ProcessLine method below also includes lines from processes which may
+    # have already exited.
+    self._primary_pid = None
+    for process in _GetPackageProcesses(self._device, self._package_name):
+      # We take only the first "main" process found in order to account for
+      # possibly forked() processes.
+      if ':' not in process.name and self._primary_pid is None:
+        self._primary_pid = process.pid
+      self._my_pids.add(process.pid)
+
+  def _GetPidStyle(self, pid, dim=False):
+    if pid == self._primary_pid:
+      return colorama.Fore.WHITE
+    elif pid in self._my_pids:
+      # TODO(wnwen): Use one separate persistent color per process, pop LRU
+      return colorama.Fore.YELLOW
+    elif dim:
+      return colorama.Style.DIM
+    return ''
+
+  def _GetPriorityStyle(self, priority, dim=False):
+    # pylint:disable=no-self-use
+    if dim:
+      return ''
+    style = colorama.Fore.BLACK
+    if priority == 'E' or priority == 'F':
+      style += colorama.Back.RED
+    elif priority == 'W':
+      style += colorama.Back.YELLOW
+    elif priority == 'I':
+      style += colorama.Back.GREEN
+    elif priority == 'D':
+      style += colorama.Back.BLUE
+    return style
+
+  def _ParseLine(self, line):
+    tokens = line.split(None, 6)
+
+    def consume_token_or_default(default):
+      return tokens.pop(0) if len(tokens) > 0 else default
+
+    def consume_integer_token_or_default(default):
+      if len(tokens) == 0:
+        return default
+
+      try:
+        return int(tokens.pop(0))
+      except ValueError:
+        return default
+
+    date = consume_token_or_default('')
+    invokation_time = consume_token_or_default('')
+    pid = consume_integer_token_or_default(-1)
+    tid = consume_integer_token_or_default(-1)
+    priority = consume_token_or_default('')
+    tag = consume_token_or_default('')
+    original_message = consume_token_or_default('')
+
+    # Example:
+    #   09-19 06:35:51.113  9060  9154 W GCoreFlp: No location...
+    #   09-19 06:01:26.174  9060 10617 I Auth    : [ReflectiveChannelBinder]...
+    # Parsing "GCoreFlp:" vs "Auth    :", we only want tag to contain the word,
+    # and we don't want to keep the colon for the message.
+    if tag and tag[-1] == ':':
+      tag = tag[:-1]
+    elif len(original_message) > 2:
+      original_message = original_message[2:]
+    return self.ParsedLine(
+        date, invokation_time, pid, tid, priority, tag, original_message)
+
+  def _PrintParsedLine(self, parsed_line, dim=False):
+    tid_style = colorama.Style.NORMAL
+    user_match = self._user_defined_highlight and (
+        re.search(self._user_defined_highlight, parsed_line.tag)
+        or re.search(self._user_defined_highlight, parsed_line.message))
+
+    # Make the main thread bright.
+    if not dim and parsed_line.pid == parsed_line.tid:
+      tid_style = colorama.Style.BRIGHT
+    pid_style = self._GetPidStyle(parsed_line.pid, dim)
+    msg_style = pid_style if not user_match else (colorama.Fore.GREEN +
+                                                  colorama.Style.BRIGHT)
+    # We have to pad before adding color as that changes the width of the tag.
+    pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style)
+    tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style)
+    tag = _Colorize('{:8}'.format(parsed_line.tag),
+                    pid_style + ('' if dim else colorama.Style.BRIGHT))
+    priority = _Colorize(parsed_line.priority,
+                         self._GetPriorityStyle(parsed_line.priority))
+    messages = [parsed_line.message]
+    if self._deobfuscator:
+      messages = self._deobfuscator.TransformLines(messages)
+    for message in messages:
+      message = _Colorize(message, msg_style)
+      sys.stdout.write('{} {} {} {} {} {}: {}\n'.format(
+          parsed_line.date, parsed_line.invokation_time, pid_str, tid_str,
+          priority, tag, message))
+
+  def _TriggerNonceFound(self):
+    # Once the nonce is hit, we have confidence that we know which lines
+    # belong to the current run of the app. Process all of the buffered lines.
+    if self._primary_pid:
+      for args in self._initial_buffered_lines:
+        self._native_stack_symbolizer.AddLine(*args)
+    self._initial_buffered_lines = None
+    self.nonce = None
+
+  def ProcessLine(self, line):
+    if not line or line.startswith('------'):
+      return
+
+    if self.nonce and self.nonce in line:
+      self._TriggerNonceFound()
+
+    nonce_found = self.nonce is None
+
+    log = self._ParseLine(line)
+    if log.pid not in self._seen_pids:
+      self._seen_pids.add(log.pid)
+      if nonce_found:
+        # Update list of owned PIDs each time a new PID is encountered.
+        self._UpdateMyPids()
+
+    # Search for "Start proc $pid:$package_name/" message.
+    if not nonce_found:
+      # Capture logs before the nonce. Start with the most recent "am start".
+      if self._start_pattern.match(log.message):
+        self._initial_buffered_lines = []
+
+      # If we didn't find the PID via "ps", then extract it from log messages.
+      # This will happen if the app crashes too quickly.
+      if not self._found_initial_pid:
+        m = self._pid_pattern.match(log.message)
+        if m:
+          # Find the most recent "Start proc" line before the nonce.
+          # Track only the primary pid in this mode.
+          # The main use-case is to find app logs when no current PIDs exist.
+          # E.g.: When the app crashes on launch.
+          self._primary_pid = m.group(1)
+          self._my_pids.clear()
+          self._my_pids.add(m.group(1))
+
+    owned_pid = log.pid in self._my_pids
+    if owned_pid and not self._verbose and log.tag == 'dalvikvm':
+      if self._DALVIK_IGNORE_PATTERN.match(log.message):
+        return
+
+    if owned_pid or self._verbose or (log.priority == 'F' or  # Java crash dump
+                                      log.tag in self._ALLOWLISTED_TAGS):
+      if nonce_found:
+        self._native_stack_symbolizer.AddLine(log, not owned_pid)
+      else:
+        self._initial_buffered_lines.append((log, not owned_pid))
+
+
+def _RunLogcat(device, package_name, stack_script_context, deobfuscate,
+               verbose):
+  logcat_processor = _LogcatProcessor(
+      device, package_name, stack_script_context, deobfuscate, verbose)
+  device.RunShellCommand(['log', logcat_processor.nonce])
+  for line in device.adb.Logcat(logcat_format='threadtime'):
+    try:
+      logcat_processor.ProcessLine(line)
+    except:
+      sys.stderr.write('Failed to process line: ' + line + '\n')
+      # Skip stack trace for the common case of the adb server being
+      # restarted.
+      if 'unexpected EOF' in line:
+        sys.exit(1)
+      raise
+
+
+def _GetPackageProcesses(device, package_name):
+  return [
+      p for p in device.ListProcesses(package_name)
+      if p.name == package_name or p.name.startswith(package_name + ':')]
+
+
+def _RunPs(devices, package_name):
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  all_processes = parallel_devices.pMap(
+      lambda d: _GetPackageProcesses(d, package_name)).pGet(None)
+  for processes in _PrintPerDeviceOutput(devices, all_processes):
+    if not processes:
+      print('No processes found.')
+    else:
+      proc_map = collections.defaultdict(list)
+      for p in processes:
+        proc_map[p.name].append(str(p.pid))
+      for name, pids in sorted(proc_map.items()):
+        print(name, ','.join(pids))
+
+
+def _RunShell(devices, package_name, cmd):
+  if cmd:
+    parallel_devices = device_utils.DeviceUtils.parallel(devices)
+    outputs = parallel_devices.RunShellCommand(
+        cmd, run_as=package_name).pGet(None)
+    for output in _PrintPerDeviceOutput(devices, outputs):
+      for line in output:
+        print(line)
+  else:
+    adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
+    cmd = [adb_path, '-s', devices[0].serial, 'shell']
+    # Pre-N devices do not support -t flag.
+    if devices[0].build_version_sdk >= version_codes.NOUGAT:
+      cmd += ['-t', 'run-as', package_name]
+    else:
+      print('Upon entering the shell, run:')
+      print('run-as', package_name)
+      print()
+    os.execv(adb_path, cmd)
+
+
+def _RunCompileDex(devices, package_name, compilation_filter):
+  cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
+         package_name]
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None)
+  for output in _PrintPerDeviceOutput(devices, outputs):
+    for line in output:
+      print(line)
+
+
+def _RunProfile(device, package_name, host_build_directory, pprof_out_path,
+                process_specifier, thread_specifier, extra_args):
+  simpleperf.PrepareDevice(device)
+  device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name)
+  with tempfile.NamedTemporaryFile() as fh:
+    host_simpleperf_out_path = fh.name
+
+    with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name,
+                                  process_specifier, thread_specifier,
+                                  extra_args, host_simpleperf_out_path):
+      sys.stdout.write('Profiler is running; press Enter to stop...')
+      sys.stdin.read(1)
+      sys.stdout.write('Post-processing data...')
+      sys.stdout.flush()
+
+    simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path,
+                                        host_build_directory, pprof_out_path)
+    print(textwrap.dedent("""
+        Profile data written to %(s)s.
+
+        To view profile as a call graph in browser:
+          pprof -web %(s)s
+
+        To print the hottest methods:
+          pprof -top %(s)s
+
+        pprof has many useful customization options; `pprof --help` for details.
+        """ % {'s': pprof_out_path}))
+
+
+class _StackScriptContext(object):
+  """Maintains temporary files needed by stack.py."""
+
+  def __init__(self,
+               output_directory,
+               apk_path,
+               bundle_generation_info,
+               quiet=False):
+    self._output_directory = output_directory
+    self._apk_path = apk_path
+    self._bundle_generation_info = bundle_generation_info
+    self._staging_dir = None
+    self._quiet = quiet
+
+  def _CreateStaging(self):
+    # In many cases, stack decoding requires APKs to map trace lines to native
+    # libraries. Create a temporary directory, and either unpack a bundle's
+    # APKS into it, or simply symlink the standalone APK into it. This
+    # provides an unambiguous set of APK files for the stack decoding process
+    # to inspect.
+    logging.debug('Creating stack staging directory')
+    self._staging_dir = tempfile.mkdtemp()
+    bundle_generation_info = self._bundle_generation_info
+
+    if bundle_generation_info:
+      # TODO(wnwen): Use apk_helper instead.
+      _GenerateBundleApks(bundle_generation_info)
+      logging.debug('Extracting .apks file')
+      with zipfile.ZipFile(bundle_generation_info.bundle_apks_path, 'r') as z:
+        files_to_extract = [
+            f for f in z.namelist() if f.endswith('-master.apk')
+        ]
+        z.extractall(self._staging_dir, files_to_extract)
+    elif self._apk_path:
+      # Otherwise an incremental APK and an empty apks directory is correct.
+      output = os.path.join(self._staging_dir, os.path.basename(self._apk_path))
+      os.symlink(self._apk_path, output)
+
+  def Close(self):
+    if self._staging_dir:
+      logging.debug('Clearing stack staging directory')
+      shutil.rmtree(self._staging_dir)
+      self._staging_dir = None
+
+  def Popen(self, input_file=None, **kwargs):
+    if self._staging_dir is None:
+      self._CreateStaging()
+    stack_script = os.path.join(
+        constants.host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+        'stack.py')
+    cmd = [
+        stack_script, '--output-directory', self._output_directory,
+        '--apks-directory', self._staging_dir
+    ]
+    if self._quiet:
+      cmd.append('--quiet')
+    if input_file:
+      cmd.append(input_file)
+    logging.info('Running stack.py')
+    return subprocess.Popen(cmd, **kwargs)
+
+
+def _GenerateAvailableDevicesMessage(devices):
+  devices_obj = device_utils.DeviceUtils.parallel(devices)
+  descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
+  msg = 'Available devices:\n'
+  for d, desc in zip(devices, descriptions):
+    msg += '  %s (%s)\n' % (d, desc)
+  return msg
+
+
+# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
+def _GenerateMissingAllFlagMessage(devices):
+  return ('More than one device available. Use --all to select all devices, ' +
+          'or use --device to select a device by serial.\n\n' +
+          _GenerateAvailableDevicesMessage(devices))
+
+
+def _DisplayArgs(devices, command_line_flags_file):
+  def flags_helper(d):
+    changer = flag_changer.FlagChanger(d, command_line_flags_file)
+    return changer.GetCurrentFlags()
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  outputs = parallel_devices.pMap(flags_helper).pGet(None)
+  print('Existing flags per-device (via /data/local/tmp/{}):'.format(
+      command_line_flags_file))
+  for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
+    quoted_flags = ' '.join(pipes.quote(f) for f in flags)
+    print(quoted_flags or 'No flags set.')
+
+
+def _DeviceCachePath(device, output_directory):
+  file_name = 'device_cache_%s.json' % device.serial
+  return os.path.join(output_directory, file_name)
+
+
+def _LoadDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    if os.path.exists(cache_path):
+      logging.debug('Using device cache: %s', cache_path)
+      with open(cache_path) as f:
+        d.LoadCacheData(f.read())
+      # Delete the cached file so that any exceptions cause it to be cleared.
+      os.unlink(cache_path)
+    else:
+      logging.debug('No cache present for device: %s', d)
+
+
+def _SaveDeviceCaches(devices, output_directory):
+  if not output_directory:
+    return
+  for d in devices:
+    cache_path = _DeviceCachePath(d, output_directory)
+    with open(cache_path, 'w') as f:
+      f.write(d.DumpCacheData())
+      logging.info('Wrote device cache: %s', cache_path)
+
+
+class _Command(object):
+  name = None
+  description = None
+  long_description = None
+  needs_package_name = False
+  needs_output_directory = False
+  needs_apk_helper = False
+  supports_incremental = False
+  accepts_command_line_flags = False
+  accepts_args = False
+  need_device_args = True
+  all_devices_by_default = False
+  calls_exec = False
+  supports_multiple_devices = True
+
+  def __init__(self, from_wrapper_script, is_bundle):
+    self._parser = None
+    self._from_wrapper_script = from_wrapper_script
+    self.args = None
+    self.apk_helper = None
+    self.additional_apk_helpers = None
+    self.install_dict = None
+    self.devices = None
+    self.is_bundle = is_bundle
+    self.bundle_generation_info = None
+    # Only support  incremental install from APK wrapper scripts.
+    if is_bundle or not from_wrapper_script:
+      self.supports_incremental = False
+
+  def RegisterBundleGenerationInfo(self, bundle_generation_info):
+    self.bundle_generation_info = bundle_generation_info
+
+  def _RegisterExtraArgs(self, subp):
+    pass
+
+  def RegisterArgs(self, parser):
+    subp = parser.add_parser(
+        self.name, help=self.description,
+        description=self.long_description or self.description,
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    self._parser = subp
+    subp.set_defaults(command=self)
+    if self.need_device_args:
+      subp.add_argument('--all',
+                        action='store_true',
+                        default=self.all_devices_by_default,
+                        help='Operate on all connected devices.',)
+      subp.add_argument('-d',
+                        '--device',
+                        action='append',
+                        default=[],
+                        dest='devices',
+                        help='Target device for script to work on. Enter '
+                            'multiple times for multiple devices.')
+    subp.add_argument('-v',
+                      '--verbose',
+                      action='count',
+                      default=0,
+                      dest='verbose_count',
+                      help='Verbose level (multiple times for more)')
+    group = subp.add_argument_group('%s arguments' % self.name)
+
+    if self.needs_package_name:
+      # Three cases to consider here, since later code assumes
+      #  self.args.package_name always exists, even if None:
+      #
+      # - Called from a bundle wrapper script, the package_name is already
+      #   set through parser.set_defaults(), so don't call add_argument()
+      #   to avoid overriding its value.
+      #
+      # - Called from an apk wrapper script. The --package-name argument
+      #   should not appear, but self.args.package_name will be gleaned from
+      #   the --apk-path file later.
+      #
+      # - Called directly, then --package-name is required on the command-line.
+      #
+      if not self.is_bundle:
+        group.add_argument(
+            '--package-name',
+            help=argparse.SUPPRESS if self._from_wrapper_script else (
+                "App's package name."))
+
+    if self.needs_apk_helper or self.needs_package_name:
+      # Adding this argument to the subparser would override the set_defaults()
+      # value set by on the parent parser (even if None).
+      if not self._from_wrapper_script and not self.is_bundle:
+        group.add_argument(
+            '--apk-path', required=self.needs_apk_helper, help='Path to .apk')
+
+    if self.supports_incremental:
+      group.add_argument('--incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install an incremental apk.')
+      group.add_argument('--non-incremental',
+                          action='store_true',
+                          default=False,
+                          help='Always install a non-incremental apk.')
+
+    # accepts_command_line_flags and accepts_args are mutually exclusive.
+    # argparse will throw if they are both set.
+    if self.accepts_command_line_flags:
+      group.add_argument(
+          '--args', help='Command-line flags. Use = to assign args.')
+
+    if self.accepts_args:
+      group.add_argument(
+          '--args', help='Extra arguments. Use = to assign args')
+
+    if not self._from_wrapper_script and self.accepts_command_line_flags:
+      # Provided by wrapper scripts.
+      group.add_argument(
+          '--command-line-flags-file',
+          help='Name of the command-line flags file')
+
+    self._RegisterExtraArgs(group)
+
+  def _CreateApkHelpers(self, args, incremental_apk_path, install_dict):
+    """Returns true iff self.apk_helper was created and assigned."""
+    if self.apk_helper is None:
+      if args.apk_path:
+        self.apk_helper = apk_helper.ToHelper(args.apk_path)
+      elif incremental_apk_path:
+        self.install_dict = install_dict
+        self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
+      elif self.is_bundle:
+        _GenerateBundleApks(self.bundle_generation_info)
+        self.apk_helper = apk_helper.ToHelper(
+            self.bundle_generation_info.bundle_apks_path)
+    if args.additional_apk_paths and self.additional_apk_helpers is None:
+      self.additional_apk_helpers = [
+          apk_helper.ToHelper(apk_path)
+          for apk_path in args.additional_apk_paths
+      ]
+    return self.apk_helper is not None
+
+  def ProcessArgs(self, args):
+    self.args = args
+    # Ensure these keys always exist. They are set by wrapper scripts, but not
+    # always added when not using wrapper scripts.
+    args.__dict__.setdefault('apk_path', None)
+    args.__dict__.setdefault('incremental_json', None)
+
+    incremental_apk_path = None
+    install_dict = None
+    if args.incremental_json and not (self.supports_incremental and
+                                      args.non_incremental):
+      with open(args.incremental_json) as f:
+        install_dict = json.load(f)
+        incremental_apk_path = os.path.join(args.output_directory,
+                                            install_dict['apk_path'])
+        if not os.path.exists(incremental_apk_path):
+          incremental_apk_path = None
+
+    if self.supports_incremental:
+      if args.incremental and args.non_incremental:
+        self._parser.error('Must use only one of --incremental and '
+                           '--non-incremental')
+      elif args.non_incremental:
+        if not args.apk_path:
+          self._parser.error('Apk has not been built.')
+      elif args.incremental:
+        if not incremental_apk_path:
+          self._parser.error('Incremental apk has not been built.')
+        args.apk_path = None
+
+      if args.apk_path and incremental_apk_path:
+        self._parser.error('Both incremental and non-incremental apks exist. '
+                           'Select using --incremental or --non-incremental')
+
+
+    # Gate apk_helper creation with _CreateApkHelpers since for bundles it takes
+    # a while to unpack the apks file from the aab file, so avoid this slowdown
+    # for simple commands that don't need apk_helper.
+    if self.needs_apk_helper:
+      if not self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        self._parser.error('App is not built.')
+
+    if self.needs_package_name and not args.package_name:
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        args.package_name = self.apk_helper.GetPackageName()
+      elif self._from_wrapper_script:
+        self._parser.error('App is not built.')
+      else:
+        self._parser.error('One of --package-name or --apk-path is required.')
+
+    self.devices = []
+    if self.need_device_args:
+      abis = None
+      if self._CreateApkHelpers(args, incremental_apk_path, install_dict):
+        abis = self.apk_helper.GetAbis()
+      self.devices = device_utils.DeviceUtils.HealthyDevices(
+          device_arg=args.devices,
+          enable_device_files_cache=bool(args.output_directory),
+          default_retries=0,
+          abis=abis)
+      # TODO(agrieve): Device cache should not depend on output directory.
+      #     Maybe put into /tmp?
+      _LoadDeviceCaches(self.devices, args.output_directory)
+
+      try:
+        if len(self.devices) > 1:
+          if not self.supports_multiple_devices:
+            self._parser.error(device_errors.MultipleDevicesError(self.devices))
+          if not args.all and not args.devices:
+            self._parser.error(_GenerateMissingAllFlagMessage(self.devices))
+        # Save cache now if command will not get a chance to afterwards.
+        if self.calls_exec:
+          _SaveDeviceCaches(self.devices, args.output_directory)
+      except:
+        _SaveDeviceCaches(self.devices, args.output_directory)
+        raise
+
+
+class _DevicesCommand(_Command):
+  name = 'devices'
+  description = 'Describe attached devices.'
+  all_devices_by_default = True
+
+  def Run(self):
+    print(_GenerateAvailableDevicesMessage(self.devices))
+
+
+class _PackageInfoCommand(_Command):
+  name = 'package-info'
+  description = 'Show various attributes of this app.'
+  need_device_args = False
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    # Format all (even ints) as strings, to handle cases where APIs return None
+    print('Package name: "%s"' % self.args.package_name)
+    print('versionCode: %s' % self.apk_helper.GetVersionCode())
+    print('versionName: "%s"' % self.apk_helper.GetVersionName())
+    print('minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion())
+    print('targetSdkVersion: %s' % self.apk_helper.GetTargetSdkVersion())
+    print('Supported ABIs: %r' % self.apk_helper.GetAbis())
+
+
+class _InstallCommand(_Command):
+  name = 'install'
+  description = 'Installs the APK or bundle to one or more devices.'
+  needs_apk_helper = True
+  supports_incremental = True
+  default_modules = []
+
+  def _RegisterExtraArgs(self, group):
+    if self.is_bundle:
+      group.add_argument(
+          '-m',
+          '--module',
+          action='append',
+          default=self.default_modules,
+          help='Module to install. Can be specified multiple times.')
+      group.add_argument(
+          '-f',
+          '--fake',
+          action='append',
+          default=[],
+          help='Fake bundle module install. Can be specified multiple times. '
+          'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format(
+              BASE_MODULE))
+      # Add even if |self.default_modules| is empty, for consistency.
+      group.add_argument('--no-module',
+                         action='append',
+                         choices=self.default_modules,
+                         default=[],
+                         help='Module to exclude from default install.')
+
+  def Run(self):
+    if self.additional_apk_helpers:
+      for additional_apk_helper in self.additional_apk_helpers:
+        _InstallApk(self.devices, additional_apk_helper, None)
+    if self.is_bundle:
+      modules = list(
+          set(self.args.module) - set(self.args.no_module) -
+          set(self.args.fake))
+      _InstallBundle(self.devices, self.apk_helper, self.args.package_name,
+                     self.args.command_line_flags_file, modules, self.args.fake)
+    else:
+      _InstallApk(self.devices, self.apk_helper, self.install_dict)
+
+
+class _UninstallCommand(_Command):
+  name = 'uninstall'
+  description = 'Removes the APK or bundle from one or more devices.'
+  needs_package_name = True
+
+  def Run(self):
+    _UninstallApk(self.devices, self.install_dict, self.args.package_name)
+
+
+class _SetWebViewProviderCommand(_Command):
+  name = 'set-webview-provider'
+  description = ("Sets the device's WebView provider to this APK's "
+                 "package name.")
+  needs_package_name = True
+  needs_apk_helper = True
+
+  def Run(self):
+    if not _IsWebViewProvider(self.apk_helper):
+      raise Exception('This package does not have a WebViewLibrary meta-data '
+                      'tag. Are you sure it contains a WebView implementation?')
+    _SetWebViewProvider(self.devices, self.args.package_name)
+
+
+class _LaunchCommand(_Command):
+  name = 'launch'
+  description = ('Sends a launch intent for the APK or bundle after first '
+                 'writing the command-line flags file.')
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('-w', '--wait-for-java-debugger', action='store_true',
+                       help='Pause execution until debugger attaches. Applies '
+                            'only to the main process. To have renderers wait, '
+                            'use --args="--renderer-wait-for-java-debugger"')
+    group.add_argument('--debug-process-name',
+                       help='Name of the process to debug. '
+                            'E.g. "privileged_process0", or "foo.bar:baz"')
+    group.add_argument('--nokill', action='store_true',
+                       help='Do not set the debug-app, nor set command-line '
+                            'flags. Useful to load a URL without having the '
+                             'app restart.')
+    group.add_argument('url', nargs='?', help='A URL to launch with.')
+
+  def Run(self):
+    if self.args.url and self.is_bundle:
+      # TODO(digit): Support this, maybe by using 'dumpsys' as described
+      # in the _LaunchUrl() comment.
+      raise Exception('Launching with URL not supported for bundles yet!')
+    _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+               command_line_flags_file=self.args.command_line_flags_file,
+               url=self.args.url, apk=self.apk_helper,
+               wait_for_java_debugger=self.args.wait_for_java_debugger,
+               debug_process_name=self.args.debug_process_name,
+               nokill=self.args.nokill)
+
+
+class _StopCommand(_Command):
+  name = 'stop'
+  description = 'Force-stops the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ForceStop(
+        self.args.package_name)
+
+
+class _ClearDataCommand(_Command):
+  name = 'clear-data'
+  descriptions = 'Clears all app data.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
+        self.args.package_name)
+
+
+class _ArgvCommand(_Command):
+  name = 'argv'
+  description = 'Display and optionally update command-line flags file.'
+  needs_package_name = True
+  accepts_command_line_flags = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _ChangeFlags(self.devices, self.args.args,
+                 self.args.command_line_flags_file)
+
+
+class _GdbCommand(_Command):
+  name = 'gdb'
+  description = 'Runs //build/android/adb_gdb with apk-specific args.'
+  long_description = description + """
+
+To attach to a process other than the APK's main process, use --pid=1234.
+To list all PIDs, use the "ps" command.
+
+If no apk process is currently running, sends a launch intent.
+"""
+  needs_package_name = True
+  needs_output_directory = True
+  calls_exec = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    _RunGdb(self.devices[0], self.args.package_name,
+            self.args.debug_process_name, self.args.pid,
+            self.args.output_directory, self.args.target_cpu, self.args.port,
+            self.args.ide, bool(self.args.verbose_count))
+
+  def _RegisterExtraArgs(self, group):
+    pid_group = group.add_mutually_exclusive_group()
+    pid_group.add_argument('--debug-process-name',
+                           help='Name of the process to attach to. '
+                                'E.g. "privileged_process0", or "foo.bar:baz"')
+    pid_group.add_argument('--pid',
+                           help='The process ID to attach to. Defaults to '
+                                'the main process for the package.')
+    group.add_argument('--ide', action='store_true',
+                       help='Rather than enter a gdb prompt, set up the '
+                            'gdb connection and wait for an IDE to '
+                            'connect.')
+    # Same default port that ndk-gdb.py uses.
+    group.add_argument('--port', type=int, default=5039,
+                       help='Use the given port for the GDB connection')
+
+
+class _LogcatCommand(_Command):
+  name = 'logcat'
+  description = 'Runs "adb logcat" with filters relevant the current APK.'
+  long_description = description + """
+
+"Relevant filters" means:
+  * Log messages from processes belonging to the apk,
+  * Plus log messages from log tags: ActivityManager|DEBUG,
+  * Plus fatal logs from any process,
+  * Minus spamy dalvikvm logs (for pre-L devices).
+
+Colors:
+  * Primary process is white
+  * Other processes (gpu, renderer) are yellow
+  * Non-apk processes are grey
+  * UI thread has a bolded Thread-ID
+
+Java stack traces are detected and deobfuscated (for release builds).
+
+To disable filtering, (but keep coloring), use --verbose.
+"""
+  needs_package_name = True
+  supports_multiple_devices = False
+
+  def Run(self):
+    deobfuscate = None
+    if self.args.proguard_mapping_path and not self.args.no_deobfuscate:
+      deobfuscate = deobfuscator.Deobfuscator(self.args.proguard_mapping_path)
+
+    stack_script_context = _StackScriptContext(
+        self.args.output_directory,
+        self.args.apk_path,
+        self.bundle_generation_info,
+        quiet=True)
+    try:
+      _RunLogcat(self.devices[0], self.args.package_name, stack_script_context,
+                 deobfuscate, bool(self.args.verbose_count))
+    except KeyboardInterrupt:
+      pass  # Don't show stack trace upon Ctrl-C
+    finally:
+      stack_script_context.Close()
+      if deobfuscate:
+        deobfuscate.Close()
+
+  def _RegisterExtraArgs(self, group):
+    if self._from_wrapper_script:
+      group.add_argument('--no-deobfuscate', action='store_true',
+          help='Disables ProGuard deobfuscation of logcat.')
+    else:
+      group.set_defaults(no_deobfuscate=False)
+      group.add_argument('--proguard-mapping-path',
+          help='Path to ProGuard map (enables deobfuscation)')
+
+
+class _PsCommand(_Command):
+  name = 'ps'
+  description = 'Show PIDs of any APK processes currently running.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunPs(self.devices, self.args.package_name)
+
+
+class _DiskUsageCommand(_Command):
+  name = 'disk-usage'
+  description = 'Show how much device storage is being consumed by the app.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def Run(self):
+    _RunDiskUsage(self.devices, self.args.package_name)
+
+
+class _MemUsageCommand(_Command):
+  name = 'mem-usage'
+  description = 'Show memory usage of currently running APK processes.'
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument('--query-app', action='store_true',
+        help='Do not add --local to "dumpsys meminfo". This will output '
+             'additional metrics (e.g. Context count), but also cause memory '
+             'to be used in order to gather the metrics.')
+
+  def Run(self):
+    _RunMemUsage(self.devices, self.args.package_name,
+                 query_app=self.args.query_app)
+
+
+class _ShellCommand(_Command):
+  name = 'shell'
+  description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
+                 '(via run-as). Useful for inspecting the app\'s data '
+                 'directory.')
+  needs_package_name = True
+
+  @property
+  def calls_exec(self):
+    return not self.args.cmd
+
+  @property
+  def supports_multiple_devices(self):
+    return not self.args.cmd
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'cmd', nargs=argparse.REMAINDER, help='Command to run.')
+
+  def Run(self):
+    _RunShell(self.devices, self.args.package_name, self.args.cmd)
+
+
+class _CompileDexCommand(_Command):
+  name = 'compile-dex'
+  description = ('Applicable only for Android N+. Forces .odex files to be '
+                 'compiled with the given compilation filter. To see existing '
+                 'filter, use "disk-usage" command.')
+  needs_package_name = True
+  all_devices_by_default = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'compilation_filter',
+        choices=['verify', 'quicken', 'space-profile', 'space',
+                 'speed-profile', 'speed'],
+        help='For WebView/Monochrome, use "speed". For other apks, use '
+             '"speed-profile".')
+
+  def Run(self):
+    _RunCompileDex(self.devices, self.args.package_name,
+                   self.args.compilation_filter)
+
+
+class _PrintCertsCommand(_Command):
+  name = 'print-certs'
+  description = 'Print info about certificates used to sign this APK.'
+  need_device_args = False
+  needs_apk_helper = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--full-cert',
+        action='store_true',
+        help=("Print the certificate's full signature, Base64-encoded. "
+              "Useful when configuring an Android image's "
+              "config_webview_packages.xml."))
+
+  def Run(self):
+    keytool = os.path.join(_JAVA_HOME, 'bin', 'keytool')
+    if self.is_bundle:
+      # Bundles are not signed until converted to .apks. The wrapper scripts
+      # record which key will be used to sign though.
+      with tempfile.NamedTemporaryFile() as f:
+        logging.warning('Bundles are not signed until turned into .apk files.')
+        logging.warning('Showing signing info based on associated keystore.')
+        cmd = [
+            keytool, '-exportcert', '-keystore',
+            self.bundle_generation_info.keystore_path, '-storepass',
+            self.bundle_generation_info.keystore_password, '-alias',
+            self.bundle_generation_info.keystore_alias, '-file', f.name
+        ]
+        subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+        cmd = [keytool, '-printcert', '-file', f.name]
+        logging.warning('Running: %s', ' '.join(cmd))
+        subprocess.check_call(cmd)
+        if self.args.full_cert:
+          # Redirect stderr to hide a keytool warning about using non-standard
+          # keystore format.
+          full_output = subprocess.check_output(
+              cmd + ['-rfc'], stderr=subprocess.STDOUT)
+    else:
+      cmd = [
+          build_tools.GetPath('apksigner'), 'verify', '--print-certs',
+          '--verbose', self.apk_helper.path
+      ]
+      logging.warning('Running: %s', ' '.join(cmd))
+      env = os.environ.copy()
+      env['PATH'] = os.path.pathsep.join(
+          [os.path.join(_JAVA_HOME, 'bin'),
+           env.get('PATH')])
+      stdout = subprocess.check_output(cmd, env=env)
+      print(stdout)
+      if self.args.full_cert:
+        if 'v1 scheme (JAR signing): true' not in stdout:
+          raise Exception(
+              'Cannot print full certificate because apk is not V1 signed.')
+
+        cmd = [keytool, '-printcert', '-jarfile', self.apk_helper.path, '-rfc']
+        # Redirect stderr to hide a keytool warning about using non-standard
+        # keystore format.
+        full_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+
+    if self.args.full_cert:
+      m = re.search(
+          r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+',
+          full_output, re.MULTILINE)
+      if not m:
+        raise Exception('Unable to parse certificate:\n{}'.format(full_output))
+      signature = re.sub(r'[\r\n]+', '', m.group(1))
+      print()
+      print('Full Signature:')
+      print(signature)
+
+
+class _ProfileCommand(_Command):
+  name = 'profile'
+  description = ('Run the simpleperf sampling CPU profiler on the currently-'
+                 'running APK. If --args is used, the extra arguments will be '
+                 'passed on to simpleperf; otherwise, the following default '
+                 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data')
+  needs_package_name = True
+  needs_output_directory = True
+  supports_multiple_devices = False
+  accepts_args = True
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--profile-process', default='browser',
+        help=('Which process to profile. This may be a process name or pid '
+              'such as you would get from running `%s ps`; or '
+              'it can be one of (browser, renderer, gpu).' % sys.argv[0]))
+    group.add_argument(
+        '--profile-thread', default=None,
+        help=('(Optional) Profile only a single thread. This may be either a '
+              'thread ID such as you would get by running `adb shell ps -t` '
+              '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may '
+              'be one of (io, compositor, main, render), in which case '
+              '--profile-process is also required. (Note that "render" thread '
+              'refers to a thread in the browser process that manages a '
+              'renderer; to profile the main thread of the renderer process, '
+              'use --profile-thread=main).'))
+    group.add_argument('--profile-output', default='profile.pb',
+                       help='Output file for profiling data')
+
+  def Run(self):
+    extra_args = shlex.split(self.args.args or '')
+    _RunProfile(self.devices[0], self.args.package_name,
+                self.args.output_directory, self.args.profile_output,
+                self.args.profile_process, self.args.profile_thread,
+                extra_args)
+
+
+class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand):
+  name = 'run'
+  description = 'Install, launch, and show logcat (when targeting one device).'
+  all_devices_by_default = False
+  supports_multiple_devices = True
+
+  def _RegisterExtraArgs(self, group):
+    _InstallCommand._RegisterExtraArgs(self, group)
+    _LaunchCommand._RegisterExtraArgs(self, group)
+    _LogcatCommand._RegisterExtraArgs(self, group)
+    group.add_argument('--no-logcat', action='store_true',
+                       help='Install and launch, but do not enter logcat.')
+
+  def Run(self):
+    logging.warning('Installing...')
+    _InstallCommand.Run(self)
+    logging.warning('Sending launch intent...')
+    _LaunchCommand.Run(self)
+    if len(self.devices) == 1 and not self.args.no_logcat:
+      logging.warning('Entering logcat...')
+      _LogcatCommand.Run(self)
+
+
+class _BuildBundleApks(_Command):
+  name = 'build-bundle-apks'
+  description = ('Build the .apks archive from an Android app bundle, and '
+                 'optionally copy it to a specific destination.')
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--output-apks', required=True, help='Destination path for .apks file.')
+    group.add_argument(
+        '--minimal',
+        action='store_true',
+        help='Build .apks archive that targets the bundle\'s minSdkVersion and '
+        'contains only english splits. It still contains optional splits.')
+    group.add_argument(
+        '--sdk-version', help='The sdkVersion to build the .apks for.')
+    group.add_argument(
+        '--build-mode',
+        choices=app_bundle_utils.BUILD_APKS_MODES,
+        help='Specify which type of APKs archive to build. "default" '
+        'generates regular splits, "universal" generates an archive with a '
+        'single universal APK, "system" generates an archive with a system '
+        'image APK, while "system_compressed" generates a compressed system '
+        'APK, with an additional stub APK for the system image.')
+    group.add_argument(
+        '--optimize-for',
+        choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS,
+        help='Override split configuration.')
+
+  def Run(self):
+    _GenerateBundleApks(
+        self.bundle_generation_info,
+        output_path=self.args.output_apks,
+        minimal=self.args.minimal,
+        minimal_sdk_version=self.args.sdk_version,
+        mode=self.args.build_mode,
+        optimize_for=self.args.optimize_for)
+
+
+class _ManifestCommand(_Command):
+  name = 'dump-manifest'
+  description = 'Dump the android manifest from this bundle, as XML, to stdout.'
+  need_device_args = False
+
+  def Run(self):
+    bundletool.RunBundleTool([
+        'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
+    ])
+
+
+class _StackCommand(_Command):
+  name = 'stack'
+  description = 'Decodes an Android stack.'
+  need_device_args = False
+
+  def _RegisterExtraArgs(self, group):
+    group.add_argument(
+        'file',
+        nargs='?',
+        help='File to decode. If not specified, stdin is processed.')
+
+  def Run(self):
+    context = _StackScriptContext(self.args.output_directory,
+                                  self.args.apk_path,
+                                  self.bundle_generation_info)
+    try:
+      proc = context.Popen(input_file=self.args.file)
+      if proc.wait():
+        raise Exception('stack script returned {}'.format(proc.returncode))
+    finally:
+      context.Close()
+
+
+# Shared commands for regular APKs and app bundles.
+_COMMANDS = [
+    _DevicesCommand,
+    _PackageInfoCommand,
+    _InstallCommand,
+    _UninstallCommand,
+    _SetWebViewProviderCommand,
+    _LaunchCommand,
+    _StopCommand,
+    _ClearDataCommand,
+    _ArgvCommand,
+    _GdbCommand,
+    _LogcatCommand,
+    _PsCommand,
+    _DiskUsageCommand,
+    _MemUsageCommand,
+    _ShellCommand,
+    _CompileDexCommand,
+    _PrintCertsCommand,
+    _ProfileCommand,
+    _RunCommand,
+    _StackCommand,
+]
+
+# Commands specific to app bundles.
+_BUNDLE_COMMANDS = [
+    _BuildBundleApks,
+    _ManifestCommand,
+]
+
+
+def _ParseArgs(parser, from_wrapper_script, is_bundle):
+  subparsers = parser.add_subparsers()
+  command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else [])
+  commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list]
+
+  for command in commands:
+    if from_wrapper_script or not command.needs_output_directory:
+      command.RegisterArgs(subparsers)
+
+  # Show extended help when no command is passed.
+  argv = sys.argv[1:]
+  if not argv:
+    argv = ['--help']
+
+  return parser.parse_args(argv)
+
+
+def _RunInternal(parser,
+                 output_directory=None,
+                 additional_apk_paths=None,
+                 bundle_generation_info=None):
+  colorama.init()
+  parser.set_defaults(
+      additional_apk_paths=additional_apk_paths,
+      output_directory=output_directory)
+  from_wrapper_script = bool(output_directory)
+  args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info))
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  if bundle_generation_info:
+    args.command.RegisterBundleGenerationInfo(bundle_generation_info)
+  if args.additional_apk_paths:
+    for path in additional_apk_paths:
+      if not path or not os.path.exists(path):
+        raise Exception('Invalid additional APK path "{}"'.format(path))
+  args.command.ProcessArgs(args)
+  args.command.Run()
+  # Incremental install depends on the cache being cleared when uninstalling.
+  if args.command.name != 'uninstall':
+    _SaveDeviceCaches(args.command.devices, output_directory)
+
+
+def Run(output_directory, apk_path, additional_apk_paths, incremental_json,
+        command_line_flags_file, target_cpu, proguard_mapping_path):
+  """Entry point for generated wrapper scripts."""
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  parser = argparse.ArgumentParser()
+  exists_or_none = lambda p: p if p and os.path.exists(p) else None
+
+  parser.set_defaults(
+      command_line_flags_file=command_line_flags_file,
+      target_cpu=target_cpu,
+      apk_path=exists_or_none(apk_path),
+      incremental_json=exists_or_none(incremental_json),
+      proguard_mapping_path=proguard_mapping_path)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths)
+
+
+def RunForBundle(output_directory, bundle_path, bundle_apks_path,
+                 additional_apk_paths, aapt2_path, keystore_path,
+                 keystore_password, keystore_alias, package_name,
+                 command_line_flags_file, proguard_mapping_path, target_cpu,
+                 system_image_locales, default_modules):
+  """Entry point for generated app bundle wrapper scripts.
+
+  Args:
+    output_dir: Chromium output directory path.
+    bundle_path: Input bundle path.
+    bundle_apks_path: Output bundle .apks archive path.
+    additional_apk_paths: Additional APKs to install prior to bundle install.
+    aapt2_path: Aapt2 tool path.
+    keystore_path: Keystore file path.
+    keystore_password: Keystore password.
+    keystore_alias: Signing key name alias in keystore file.
+    package_name: Application's package name.
+    command_line_flags_file: Optional. Name of an on-device file that will be
+      used to store command-line flags for this bundle.
+    proguard_mapping_path: Input path to the Proguard mapping file, used to
+      deobfuscate Java stack traces.
+    target_cpu: Chromium target CPU name, used by the 'gdb' command.
+    system_image_locales: List of Chromium locales that should be included in
+      system image APKs.
+    default_modules: List of modules that are installed in addition to those
+      given by the '-m' switch.
+  """
+  constants.SetOutputDirectory(output_directory)
+  devil_chromium.Initialize(output_directory=output_directory)
+  bundle_generation_info = BundleGenerationInfo(
+      bundle_path=bundle_path,
+      bundle_apks_path=bundle_apks_path,
+      aapt2_path=aapt2_path,
+      keystore_path=keystore_path,
+      keystore_password=keystore_password,
+      keystore_alias=keystore_alias,
+      system_image_locales=system_image_locales)
+  _InstallCommand.default_modules = default_modules
+
+  parser = argparse.ArgumentParser()
+  parser.set_defaults(
+      package_name=package_name,
+      command_line_flags_file=command_line_flags_file,
+      proguard_mapping_path=proguard_mapping_path,
+      target_cpu=target_cpu)
+  _RunInternal(
+      parser,
+      output_directory=output_directory,
+      additional_apk_paths=additional_apk_paths,
+      bundle_generation_info=bundle_generation_info)
+
+
+def main():
+  devil_chromium.Initialize()
+  _RunInternal(argparse.ArgumentParser())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/apk_operations.pydeps b/src/build/android/apk_operations.pydeps
new file mode 100644
index 0000000..60b1289
--- /dev/null
+++ b/src/build/android/apk_operations.pydeps
@@ -0,0 +1,110 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/apk_operations.pydeps build/android/apk_operations.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/six/six.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../gn_helpers.py
+../print_python_deps.py
+adb_command_line.py
+apk_operations.py
+convert_dex_profile.py
+devil_chromium.py
+gyp/bundletool.py
+gyp/dex.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+gyp/util/resource_utils.py
+gyp/util/zipalign.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/utils/__init__.py
+pylib/utils/app_bundle_utils.py
+pylib/utils/simpleperf.py
+pylib/utils/time_profile.py
diff --git a/src/build/android/apply_shared_preference_file.py b/src/build/android/apply_shared_preference_file.py
new file mode 100755
index 0000000..187bf18
--- /dev/null
+++ b/src/build/android/apply_shared_preference_file.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manually applies a shared preference JSON file.
+
+If needed during automation, use the --shared-prefs-file in test_runner.py
+instead.
+"""
+
+import argparse
+import sys
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+
+from devil.android import device_utils
+from devil.android.sdk import shared_prefs
+from pylib.utils import shared_preference_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Manually apply shared preference JSON files.')
+  parser.add_argument('filepaths', nargs='*',
+                      help='Any number of paths to shared preference JSON '
+                           'files to apply.')
+  args = parser.parse_args()
+
+  all_devices = device_utils.DeviceUtils.HealthyDevices()
+  if not all_devices:
+    raise RuntimeError('No healthy devices attached')
+
+  for filepath in args.filepaths:
+    all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath)
+    for setting in all_settings:
+      for device in all_devices:
+        shared_pref = shared_prefs.SharedPrefs(
+            device, setting['package'], setting['filename'],
+            use_encrypted_path=setting.get('supports_encrypted_path', False))
+        shared_preference_utils.ApplySharedPreferenceSetting(
+            shared_pref, setting)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/asan_symbolize.py b/src/build/android/asan_symbolize.py
new file mode 100755
index 0000000..6585089
--- /dev/null
+++ b/src/build/android/asan_symbolize.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+# pylint: disable=wrong-import-order
+# Uses symbol.py from third_party/android_platform, not python's.
+with host_paths.SysPath(
+    host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+    position=0):
+  import symbol
+
+
+_RE_ASAN = re.compile(
+    r"""
+    (?P<prefix>.*?)
+    (?P<pos>\#\S*?)          # position of the call in stack.
+                             # escape the char "#" due to the VERBOSE flag.
+    \s+(\S*?)\s+
+    \(                       # match the char "(".
+        (?P<lib>.*?)         # library path.
+        \+0[xX](?P<addr>.*?) # address of the symbol in hex.
+                             # the prefix "0x" is skipped.
+    \)                       # match the char ")".
+    """, re.VERBOSE)
+
+# This named tuple models a parsed Asan log line.
+AsanParsedLine = collections.namedtuple('AsanParsedLine',
+                                        'prefix,library,pos,rel_address')
+
+# This named tuple models an Asan log line. 'raw' is the raw content
+# while 'parsed' is None or an AsanParsedLine instance.
+AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed')
+
+def _ParseAsanLogLine(line):
+  """Parse line into corresponding AsanParsedLine value, if any, or None."""
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return AsanParsedLine(prefix=m.group('prefix'),
+                        library=m.group('lib'),
+                        pos=m.group('pos'),
+                        rel_address='%08x' % int(m.group('addr'), 16))
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  # pylint: disable=no-member
+  return symbol.TranslateLibPath(library)
+
+
+def _PrintSymbolized(asan_input, arch):
+  """Print symbolized logcat output for Asan symbols.
+
+  Args:
+    asan_input: list of input lines.
+    arch: Target CPU architecture.
+  """
+  asan_libs = _FindASanLibraries()
+
+  # Maps library -> [ AsanParsedLine... ]
+  libraries = collections.defaultdict(list)
+
+  asan_log_lines = []
+  for line in asan_input:
+    line = line.rstrip()
+    parsed = _ParseAsanLogLine(line)
+    if parsed:
+      libraries[parsed.library].append(parsed)
+    asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed))
+
+  # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
+  all_symbols = collections.defaultdict(dict)
+
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i.rel_address for i in items])
+    # pylint: disable=no-member
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True,
+                                               cpu_arch=arch)
+    if info_dict:
+      all_symbols[library] = info_dict
+
+  for log_line in asan_log_lines:
+    m = log_line.parsed
+    if (m and m.library in all_symbols and
+        m.rel_address in all_symbols[m.library]):
+      # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples.
+      # NOTE: The documentation for SymbolInformationForSet() indicates
+      # that usually one wants to display the last list item, not the first.
+      # The code below takes the first, is this the best choice here?
+      s = all_symbols[m.library][m.rel_address][0]
+      print('%s%s %s %s' % (m.prefix, m.pos, s[0], s[1]))
+    else:
+      print(log_line.raw)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  parser.add_option('--output-directory',
+                    help='Path to the root build directory.')
+  parser.add_option('--arch', default='arm',
+                    help='CPU architecture name')
+  options, _ = parser.parse_args()
+
+  if options.output_directory:
+    constants.SetOutputDirectory(options.output_directory)
+  # Do an up-front test that the output directory is known.
+  constants.CheckOutputDirectory()
+
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+
+  _PrintSymbolized(asan_input.readlines(), options.arch)
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/android/bytecode/BUILD.gn b/src/build/android/bytecode/BUILD.gn
new file mode 100644
index 0000000..36b5432
--- /dev/null
+++ b/src/build/android/bytecode/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_binary("bytecode_processor") {
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeProcessor.java",
+    "java/org/chromium/bytecode/ClassPathValidator.java",
+    "java/org/chromium/bytecode/TypeUtils.java",
+  ]
+  main_class = "org.chromium.bytecode.ByteCodeProcessor"
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+  wrapper_script_name = "helper/bytecode_processor"
+  enable_bytecode_checks = false
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`.
+java_binary("fragment_activity_replacer") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer"
+}
+
+# A bytecode rewriter that replaces all calls to
+# `FragmentActivity Fragment.getActivity()` with
+# `Activity Fragment.getActivity()`  followed by a cast to FragmentActivity.
+# Prefer :fragment_activity_replacer. This rewriter should only be used for
+# libraries that rely on getActivity() returning a FragmentActivity *and* are
+# not going to be used in an app that contains multiple copies of the AndroidX
+# Fragment library (i.e. WebLayer).
+java_binary("fragment_activity_replacer_single_androidx") {
+  main_class = "org.chromium.bytecode.FragmentActivityReplacer"
+  deps = [ ":fragment_activity_replacer_java" ]
+  wrapper_script_name = "helper/fragment_activity_replacer_single_androidx"
+  wrapper_script_args = [ "--single-androidx" ]
+}
+
+java_library("fragment_activity_replacer_java") {
+  visibility = [ ":*" ]
+  sources = [
+    "java/org/chromium/bytecode/ByteCodeRewriter.java",
+    "java/org/chromium/bytecode/FragmentActivityReplacer.java",
+  ]
+  deps = [
+    "//third_party/android_deps:org_ow2_asm_asm_commons_java",
+    "//third_party/android_deps:org_ow2_asm_asm_java",
+    "//third_party/android_deps:org_ow2_asm_asm_util_java",
+  ]
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
new file mode 100644
index 0000000..b767f4f
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -0,0 +1,167 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ * Java application that takes in an input jar, performs a series of bytecode
+ * transformations, and generates an output jar.
+ */
+class ByteCodeProcessor {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+    private static final int BUFFER_SIZE = 16384;
+    private static boolean sVerbose;
+    private static boolean sIsPrebuilt;
+    private static ClassLoader sDirectClassPathClassLoader;
+    private static ClassLoader sFullClassPathClassLoader;
+    private static Set<String> sFullClassPathJarPaths;
+    private static Set<String> sMissingClassesAllowlist;
+    private static Map<String, String> sJarToGnTarget;
+    private static ClassPathValidator sValidator;
+
+    private static Void processEntry(ZipEntry entry, byte[] data) {
+        ClassReader reader = new ClassReader(data);
+        if (sIsPrebuilt) {
+            sValidator.validateFullClassPath(
+                    reader, sFullClassPathClassLoader, sMissingClassesAllowlist);
+        } else {
+            sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader,
+                    sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist,
+                    sVerbose);
+        }
+        return null;
+    }
+
+    private static void process(String gnTarget, String inputJarPath)
+            throws ExecutionException, InterruptedException {
+        ExecutorService executorService =
+                Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+        try (ZipInputStream inputStream = new ZipInputStream(
+                     new BufferedInputStream(new FileInputStream(inputJarPath)))) {
+            while (true) {
+                ZipEntry entry = inputStream.getNextEntry();
+                if (entry == null) {
+                    break;
+                }
+                byte[] data = readAllBytes(inputStream);
+                executorService.submit(() -> processEntry(entry, data));
+            }
+            executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+            executorService.awaitTermination(1, TimeUnit.HOURS);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        if (sValidator.hasErrors()) {
+            sValidator.printAll(gnTarget, sJarToGnTarget);
+            System.exit(1);
+        }
+    }
+
+    private static byte[] readAllBytes(InputStream inputStream) throws IOException {
+        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+        int numRead = 0;
+        byte[] data = new byte[BUFFER_SIZE];
+        while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
+            buffer.write(data, 0, numRead);
+        }
+        return buffer.toByteArray();
+    }
+
+    /**
+     * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
+     * given jars.
+     */
+    static ClassLoader loadJars(Collection<String> paths) {
+        URL[] jarUrls = new URL[paths.size()];
+        int i = 0;
+        for (String path : paths) {
+            try {
+                jarUrls[i++] = new File(path).toURI().toURL();
+            } catch (MalformedURLException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        return new URLClassLoader(jarUrls);
+    }
+
+    /**
+     * Extracts a length-encoded list of strings from the arguments, and adds them to |out|. Returns
+     * the new "next index" to be processed.
+     */
+    private static int parseListArgument(String[] args, int index, Collection<String> out) {
+        int argLength = Integer.parseInt(args[index++]);
+        out.addAll(Arrays.asList(Arrays.copyOfRange(args, index, index + argLength)));
+        return index + argLength;
+    }
+
+    public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+                                                  ExecutionException, InterruptedException {
+        // Invoke this script using //build/android/gyp/bytecode_processor.py
+        int currIndex = 0;
+        String gnTarget = args[currIndex++];
+        String inputJarPath = args[currIndex++];
+        sVerbose = args[currIndex++].equals("--verbose");
+        sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+
+        sMissingClassesAllowlist = new HashSet<>();
+        currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist);
+
+        ArrayList<String> sdkJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, sdkJarPaths);
+
+        ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+        directClassPathJarPaths.add(inputJarPath);
+        directClassPathJarPaths.addAll(sdkJarPaths);
+        currIndex = parseListArgument(args, currIndex, directClassPathJarPaths);
+        sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+
+        ArrayList<String> fullClassPathJarPaths = new ArrayList<>();
+        currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths);
+        ArrayList<String> gnTargets = new ArrayList<>();
+        parseListArgument(args, currIndex, gnTargets);
+        sJarToGnTarget = new HashMap<>();
+        assert fullClassPathJarPaths.size() == gnTargets.size();
+        for (int i = 0; i < fullClassPathJarPaths.size(); ++i) {
+            sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i));
+        }
+
+        // Load all jars that are on the classpath for the input jar for analyzing class
+        // hierarchy.
+        sFullClassPathJarPaths = new HashSet<>();
+        sFullClassPathJarPaths.add(inputJarPath);
+        sFullClassPathJarPaths.addAll(sdkJarPaths);
+        sFullClassPathJarPaths.addAll(fullClassPathJarPaths);
+        sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+        sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+        sValidator = new ClassPathValidator();
+        process(gnTarget, inputJarPath);
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
new file mode 100644
index 0000000..3d0d9cd
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java
@@ -0,0 +1,91 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * Base class for scripts that perform bytecode modifications on a jar file.
+ */
+public abstract class ByteCodeRewriter {
+    private static final String CLASS_FILE_SUFFIX = ".class";
+
+    public void rewrite(File inputJar, File outputJar) throws IOException {
+        if (!inputJar.exists()) {
+            throw new FileNotFoundException("Input jar not found: " + inputJar.getPath());
+        }
+        try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar))) {
+            try (OutputStream outputStream = new FileOutputStream(outputJar)) {
+                processZip(inputStream, outputStream);
+            }
+        }
+    }
+
+    /** Returns true if the class at the given path in the archive should be rewritten. */
+    protected abstract boolean shouldRewriteClass(String classPath);
+
+    /**
+     * Returns the ClassVisitor that should be used to modify the bytecode of class at the given
+     * path in the archive.
+     */
+    protected abstract ClassVisitor getClassVisitorForClass(
+            String classPath, ClassVisitor delegate);
+
+    private void processZip(InputStream inputStream, OutputStream outputStream) {
+        try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
+            ZipInputStream zipInputStream = new ZipInputStream(inputStream);
+            ZipEntry entry;
+            while ((entry = zipInputStream.getNextEntry()) != null) {
+                ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+                boolean handled = processClassEntry(entry, zipInputStream, buffer);
+                if (handled) {
+                    ZipEntry newEntry = new ZipEntry(entry.getName());
+                    zipOutputStream.putNextEntry(newEntry);
+                    zipOutputStream.write(buffer.toByteArray(), 0, buffer.size());
+                } else {
+                    zipOutputStream.putNextEntry(entry);
+                    zipInputStream.transferTo(zipOutputStream);
+                }
+            }
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private boolean processClassEntry(
+            ZipEntry entry, InputStream inputStream, OutputStream outputStream) {
+        if (!entry.getName().endsWith(CLASS_FILE_SUFFIX) || !shouldRewriteClass(entry.getName())) {
+            return false;
+        }
+        try {
+            ClassReader reader = new ClassReader(inputStream);
+            ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES);
+            ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer);
+            reader.accept(classVisitor, ClassReader.EXPAND_FRAMES);
+
+            writer.visitEnd();
+            byte[] classData = writer.toByteArray();
+            outputStream.write(classData, 0, classData.length);
+            return true;
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 0000000..9f45df5
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,233 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.io.PrintStream;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.function.Consumer;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+    // Number of warnings to print.
+    private static final int MAX_MISSING_CLASS_WARNINGS = 10;
+    // Number of missing classes to show per missing jar.
+    private static final int MAX_ERRORS_PER_JAR = 2;
+    // Map of missing .jar -> Missing class -> Classes that failed.
+    // TreeMap so that error messages have sorted list of jars.
+    private final Map<String, Map<String, Set<String>>> mDirectErrors =
+            Collections.synchronizedMap(new TreeMap<>());
+    // Missing classes we only track the first one for each jar.
+    // Map of missingClass -> srcClass.
+    private final Map<String, String> mMissingClasses =
+            Collections.synchronizedMap(new TreeMap<>());
+
+    static class ClassNotLoadedException extends ClassNotFoundException {
+        private final String mClassName;
+
+        ClassNotLoadedException(String className, Throwable ex) {
+            super("Couldn't load " + className, ex);
+            mClassName = className;
+        }
+
+        public String getClassName() {
+            return mClassName;
+        }
+    }
+
+    private static void validateClass(ClassLoader classLoader, String className)
+            throws ClassNotLoadedException {
+        if (className.startsWith("[")) {
+            // Dealing with an array type which isn't encoded nicely in the constant pool.
+            // For example, [[Lorg/chromium/Class$1;
+            className = className.substring(className.lastIndexOf('[') + 1);
+            if (className.charAt(0) == 'L' && className.endsWith(";")) {
+                className = className.substring(1, className.length() - 1);
+            } else {
+                // Bailing out if we have an non-class array type.
+                // This could be something like [B
+                return;
+            }
+        }
+        if (className.matches(".*\\bR(\\$\\w+)?$")) {
+            // Resources in R.java files are not expected to be valid at this stage in the build.
+            return;
+        }
+        if (className.matches("^libcore\\b.*")) {
+            // libcore exists on devices, but is not included in the Android sdk as it is a private
+            // API.
+            return;
+        }
+        try {
+            classLoader.loadClass(className.replace('/', '.'));
+        } catch (ClassNotFoundException e) {
+            throw new ClassNotLoadedException(className, e);
+        } catch (NoClassDefFoundError e) {
+            // We assume that this is caused by another class that is not going to able to be
+            // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+        }
+    }
+
+    /**
+     * Given a .class file, see if every class referenced in the main class' constant pool can be
+     * loaded by the given ClassLoader.
+     *
+     * @param classReader .class file interface for reading the constant pool.
+     * @param classLoader classpath you wish to validate.
+     * @param errorConsumer Called for each missing class.
+     */
+    private static void validateClassPath(ClassReader classReader, ClassLoader classLoader,
+            Consumer<ClassNotLoadedException> errorConsumer) {
+        char[] charBuffer = new char[classReader.getMaxStringLength()];
+        // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+        // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+        for (int i = 1; i < classReader.getItemCount(); i++) {
+            int offset = classReader.getItem(i);
+            // Class entries correspond to 7 in the constant pool
+            // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+            if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+                try {
+                    validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+                } catch (ClassNotLoadedException e) {
+                    errorConsumer.accept(e);
+                }
+            }
+        }
+    }
+
+    public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader,
+            Set<String> missingClassAllowlist) {
+        // Prebuilts only need transitive dependencies checked, not direct dependencies.
+        validateClassPath(classReader, fullClassLoader, (e) -> {
+            if (!missingClassAllowlist.contains(e.getClassName())) {
+                addMissingError(classReader.getClassName(), e.getClassName());
+            }
+        });
+    }
+
+    public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader,
+            ClassLoader fullClassLoader, Collection<String> jarsOnlyInFullClassPath,
+            Set<String> missingClassAllowlist, boolean verbose) {
+        validateClassPath(classReader, directClassLoader, (e) -> {
+            try {
+                validateClass(fullClassLoader, e.getClassName());
+            } catch (ClassNotLoadedException d) {
+                if (!missingClassAllowlist.contains(e.getClassName())) {
+                    addMissingError(classReader.getClassName(), e.getClassName());
+                }
+                return;
+            }
+            if (verbose) {
+                System.err.println("Class \"" + e.getClassName()
+                        + "\" not found in direct dependencies,"
+                        + " but found in indirect dependiences.");
+            }
+            // Iterating through all jars that are in the full classpath but not the direct
+            // classpath to find which one provides the class we are looking for.
+            for (String jarPath : jarsOnlyInFullClassPath) {
+                try {
+                    ClassLoader smallLoader =
+                            ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+                    validateClass(smallLoader, e.getClassName());
+                    addDirectError(jarPath, classReader.getClassName(), e.getClassName());
+                    break;
+                } catch (ClassNotLoadedException f) {
+                }
+            }
+        });
+    }
+
+    private void addMissingError(String srcClass, String missingClass) {
+        mMissingClasses.put(missingClass, srcClass);
+    }
+
+    private void addDirectError(String jarPath, String srcClass, String missingClass) {
+        synchronized (mDirectErrors) {
+            Map<String, Set<String>> failedClassesByMissingClass = mDirectErrors.get(jarPath);
+            if (failedClassesByMissingClass == null) {
+                // TreeMap so that error messages have sorted list of classes.
+                failedClassesByMissingClass = new TreeMap<>();
+                mDirectErrors.put(jarPath, failedClassesByMissingClass);
+            }
+            Set<String> failedClasses = failedClassesByMissingClass.get(missingClass);
+            if (failedClasses == null) {
+                failedClasses = new TreeSet<>();
+                failedClassesByMissingClass.put(missingClass, failedClasses);
+            }
+            failedClasses.add(srcClass);
+        }
+    }
+
+    public boolean hasErrors() {
+        return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty();
+    }
+
+    private static void printValidationError(
+            PrintStream out, String gnTarget, Map<String, Set<String>> missingClasses) {
+        out.print(" * ");
+        out.println(gnTarget);
+        int i = 0;
+        // The list of missing classes is non-exhaustive because each class that fails to validate
+        // reports only the first missing class.
+        for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+            String missingClass = entry.getKey();
+            Set<String> filesThatNeededIt = entry.getValue();
+            out.print("     * ");
+            if (i == MAX_ERRORS_PER_JAR) {
+                out.print(String.format(
+                        "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR));
+                break;
+            }
+            out.print(missingClass.replace('/', '.'));
+            out.print(" (needed by ");
+            out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+            if (filesThatNeededIt.size() > 1) {
+                out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+            }
+            out.println(")");
+            i++;
+        }
+    }
+
+    public void printAll(String gnTarget, Map<String, String> jarToGnTarget) {
+        String streamer = "=============================";
+        System.err.println();
+        System.err.println(streamer + " Dependency Checks Failed " + streamer);
+        System.err.println("Target: " + gnTarget);
+        if (!mMissingClasses.isEmpty()) {
+            int i = 0;
+            for (Map.Entry<String, String> entry : mMissingClasses.entrySet()) {
+                if (++i > MAX_MISSING_CLASS_WARNINGS) {
+                    System.err.println(String.format("... and %d more.",
+                            mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS));
+                    break;
+                }
+                System.err.println(String.format(
+                        "Class \"%s\" not found on any classpath. Used by class \"%s\"",
+                        entry.getKey(), entry.getValue()));
+            }
+            System.err.println();
+        }
+        if (!mDirectErrors.isEmpty()) {
+            System.err.println("Direct classpath is incomplete. To fix, add deps on:");
+            for (Map.Entry<String, Map<String, Set<String>>> entry : mDirectErrors.entrySet()) {
+                printValidationError(
+                        System.err, jarToGnTarget.get(entry.getKey()), entry.getValue());
+            }
+            System.err.println();
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
new file mode 100644
index 0000000..a40f39c
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java
@@ -0,0 +1,238 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.commons.MethodRemapper;
+import org.objectweb.asm.commons.Remapper;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Java application that modifies Fragment.getActivity() to return an Activity instead of a
+ * FragmentActivity, and updates any existing getActivity() calls to reference the updated method.
+ *
+ * See crbug.com/1144345 for more context.
+ */
+public class FragmentActivityReplacer extends ByteCodeRewriter {
+    private static final String GET_ACTIVITY_METHOD_NAME = "getActivity";
+    private static final String GET_LIFECYCLE_ACTIVITY_METHOD_NAME = "getLifecycleActivity";
+    private static final String NEW_METHOD_DESCRIPTOR = "()Landroid/app/Activity;";
+    private static final String OLD_METHOD_DESCRIPTOR =
+            "()Landroidx/fragment/app/FragmentActivity;";
+    private static final String REQUIRE_ACTIVITY_METHOD_NAME = "requireActivity";
+    private static final String SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME =
+            "com.google.android.gms.common.api.internal.SupportLifecycleFragmentImpl";
+
+    public static void main(String[] args) throws IOException {
+        // Invoke this script using //build/android/gyp/bytecode_rewriter.py
+        if (!(args.length == 2 || args.length == 3 && args[0].equals("--single-androidx"))) {
+            System.err.println("Expected arguments: [--single-androidx] <input.jar> <output.jar>");
+            System.exit(1);
+        }
+
+        if (args.length == 2) {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(false);
+            rewriter.rewrite(new File(args[0]), new File(args[1]));
+        } else {
+            FragmentActivityReplacer rewriter = new FragmentActivityReplacer(true);
+            rewriter.rewrite(new File(args[1]), new File(args[2]));
+        }
+    }
+
+    private final boolean mSingleAndroidX;
+
+    public FragmentActivityReplacer(boolean singleAndroidX) {
+        mSingleAndroidX = singleAndroidX;
+    }
+
+    @Override
+    protected boolean shouldRewriteClass(String classPath) {
+        return true;
+    }
+
+    @Override
+    protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) {
+        ClassVisitor invocationVisitor = new InvocationReplacer(delegate, mSingleAndroidX);
+        switch (classPath) {
+            case "androidx/fragment/app/Fragment.class":
+                return new FragmentClassVisitor(invocationVisitor);
+            case "com/google/android/gms/common/api/internal/SupportLifecycleFragmentImpl.class":
+                return new SupportLifecycleFragmentImplClassVisitor(invocationVisitor);
+            default:
+                return invocationVisitor;
+        }
+    }
+
+    /**
+     * Updates any Fragment.getActivity/requireActivity() or getLifecycleActivity() calls to call
+     * the replaced method.
+     */
+    private static class InvocationReplacer extends ClassVisitor {
+        private final boolean mSingleAndroidX;
+
+        private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) {
+            super(Opcodes.ASM7, baseVisitor);
+            mSingleAndroidX = singleAndroidX;
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            MethodVisitor base = super.visitMethod(access, name, descriptor, signature, exceptions);
+            return new MethodVisitor(Opcodes.ASM7, base) {
+                @Override
+                public void visitMethodInsn(int opcode, String owner, String name,
+                        String descriptor, boolean isInterface) {
+                    boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isFragmentRequireActivity = name.equals(REQUIRE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && isFragmentSubclass(owner);
+                    boolean isSupportLifecycleFragmentImplGetLifecycleActivity =
+                            name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)
+                            && descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                            && owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME);
+                    if ((opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL)
+                            && (isFragmentGetActivity || isFragmentRequireActivity
+                                    || isSupportLifecycleFragmentImplGetLifecycleActivity)) {
+                        super.visitMethodInsn(
+                                opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface);
+                        if (mSingleAndroidX) {
+                            super.visitTypeInsn(
+                                    Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity");
+                        }
+                    } else {
+                        super.visitMethodInsn(opcode, owner, name, descriptor, isInterface);
+                    }
+                }
+
+                private boolean isFragmentSubclass(String internalType) {
+                    // Look up classes with a ClassLoader that will resolve any R classes to Object.
+                    // This is fine in this case as resource classes shouldn't be in the class
+                    // hierarchy of any Fragments.
+                    ClassLoader resourceStubbingClassLoader = new ClassLoader() {
+                        @Override
+                        protected Class<?> findClass(String name) throws ClassNotFoundException {
+                            if (name.matches(".*\\.R(\\$.+)?")) {
+                                return Object.class;
+                            }
+                            return super.findClass(name);
+                        }
+                    };
+
+                    // This doesn't use Class#isAssignableFrom to avoid us needing to load
+                    // AndroidX's Fragment class, which may not be on the classpath.
+                    try {
+                        String binaryName = Type.getObjectType(internalType).getClassName();
+                        Class<?> clazz = resourceStubbingClassLoader.loadClass(binaryName);
+                        while (clazz != null) {
+                            if (clazz.getName().equals("androidx.fragment.app.Fragment")) {
+                                return true;
+                            }
+                            clazz = clazz.getSuperclass();
+                        }
+                        return false;
+                    } catch (ClassNotFoundException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+            };
+        }
+    }
+
+    /**
+     * Updates the implementation of Fragment.getActivity() and Fragment.requireActivity().
+     */
+    private static class FragmentClassVisitor extends ClassVisitor {
+        private FragmentClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // Update the descriptor of getActivity() and requireActivity().
+            MethodVisitor baseVisitor;
+            if (descriptor.equals(OLD_METHOD_DESCRIPTOR)
+                    && (name.equals(GET_ACTIVITY_METHOD_NAME)
+                            || name.equals(REQUIRE_ACTIVITY_METHOD_NAME))) {
+                // Some Fragments in a Clank library implement an interface that defines an
+                // `Activity getActivity()` method. Fragment.getActivity() is considered its
+                // implementation from a typechecking perspective, but javac still generates a
+                // getActivity() method in these Fragments that call Fragment.getActivity(). This
+                // isn't an issue when the methods return different types, but after changing
+                // Fragment.getActivity() to return an Activity, this generated implementation is
+                // now overriding Fragment's, which it can't do because Fragment.getActivity() is
+                // final. We make it non-final here to avoid this issue.
+                baseVisitor = super.visitMethod(
+                        access & ~Opcodes.ACC_FINAL, name, NEW_METHOD_DESCRIPTOR, null, exceptions);
+            } else {
+                baseVisitor = super.visitMethod(access, name, descriptor, signature, exceptions);
+            }
+
+            // Replace getActivity() with `return ContextUtils.activityFromContext(getContext());`
+            if (name.equals(GET_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                baseVisitor.visitVarInsn(Opcodes.ALOAD, 0);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "androidx/fragment/app/Fragment",
+                        "getContext", "()Landroid/content/Context;", false);
+                baseVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "org/chromium/utils/ContextUtils",
+                        "activityFromContext", "(Landroid/content/Context;)Landroid/app/Activity;",
+                        false);
+                baseVisitor.visitInsn(Opcodes.ARETURN);
+                // Since we set COMPUTE_FRAMES, the arguments of visitMaxs are ignored, but calling
+                // it forces ClassWriter to actually recompute the correct stack/local values.
+                // Without this call ClassWriter keeps the original stack=0,locals=1 which is wrong.
+                baseVisitor.visitMaxs(0, 0);
+                return null;
+            }
+
+            return new MethodRemapper(baseVisitor, new Remapper() {
+                @Override
+                public String mapType(String internalName) {
+                    if (internalName.equals("androidx/fragment/app/FragmentActivity")) {
+                        return "android/app/Activity";
+                    }
+                    return internalName;
+                }
+            });
+        }
+    }
+
+    /**
+     * Update SupportLifecycleFragmentImpl.getLifecycleActivity().
+     */
+    private static class SupportLifecycleFragmentImplClassVisitor extends ClassVisitor {
+        private SupportLifecycleFragmentImplClassVisitor(ClassVisitor baseVisitor) {
+            super(Opcodes.ASM7, baseVisitor);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(
+                int access, String name, String descriptor, String signature, String[] exceptions) {
+            // SupportLifecycleFragmentImpl has two getActivity methods:
+            //   1. public FragmentActivity getLifecycleActivity():
+            //      This is what you'll see in the source. This delegates to Fragment.getActivity().
+            //   2. public Activity getLifecycleActivity():
+            //      This is generated because the class implements LifecycleFragment, which
+            //      declares this method, and delegates to #1.
+            //
+            // Here we change the return type of #1 and delete #2.
+            if (name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)) {
+                if (descriptor.equals(OLD_METHOD_DESCRIPTOR)) {
+                    return super.visitMethod(
+                            access, name, NEW_METHOD_DESCRIPTOR, signature, exceptions);
+                }
+                return null;
+            }
+            return super.visitMethod(access, name, descriptor, signature, exceptions);
+        }
+    }
+}
diff --git a/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
new file mode 100644
index 0000000..ed2dc2d
--- /dev/null
+++ b/src/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
@@ -0,0 +1,87 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.Type;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for accessing {@link Type}s Strings.
+ *
+ * Useful definitions to keep in mind when using this class:
+ * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really
+ * relevant for primitive types.
+ * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types.
+ *
+ * The methods in this class accept internal names or primitive type descriptors.
+ */
+class TypeUtils {
+    static final String ASSERTION_ERROR = "java/lang/AssertionError";
+    static final String ASSET_MANAGER = "android/content/res/AssetManager";
+    static final String BUILD_HOOKS = "org/chromium/build/BuildHooks";
+    static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid";
+    static final String CONFIGURATION = "android/content/res/Configuration";
+    static final String CONTEXT = "android/content/Context";
+    static final String CONTEXT_WRAPPER = "android/content/ContextWrapper";
+    static final String RESOURCES = "android/content/res/Resources";
+    static final String STRING = "java/lang/String";
+    static final String THEME = "android/content/res/Resources$Theme";
+
+    static final String BOOLEAN = "Z";
+    static final String INT = "I";
+    static final String VOID = "V";
+    private static final Map<String, Type> PRIMITIVE_DESCRIPTORS;
+    static {
+        PRIMITIVE_DESCRIPTORS = new HashMap<>();
+        PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE);
+        PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE);
+    }
+
+    /**
+     * Returns the full method signature with internal names.
+     *
+     * @param methodName Name of the method (ex. "getResources").
+     * @param returnType Internal name for the return type.
+     * @param argumentTypes List of internal names for argument types.
+     * @return String representation of the method signature.
+     */
+    static String getMethodSignature(
+            String methodName, String returnType, String... argumentTypes) {
+        return methodName + getMethodDescriptor(returnType, argumentTypes);
+    }
+
+    /**
+     * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}.
+     *
+     * @param returnType Internal name for the return type of the method (primitive or class).
+     * @param argumentTypes Internal names for the argument types (primitive or class).
+     * @return The generated method descriptor.
+     */
+    static String getMethodDescriptor(String returnType, String... argumentTypes) {
+        Type[] typedArguments = new Type[argumentTypes.length];
+        for (int i = 0; i < argumentTypes.length; ++i) {
+            // Argument list should be empty in this case, not V (void).
+            assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]);
+            typedArguments[i] = convert(argumentTypes[i]);
+        }
+        return Type.getMethodDescriptor(convert(returnType), typedArguments);
+    }
+
+    /**
+     * Converts an internal name for a type to a {@link Type}.
+     *
+     * @param type Internal name for a type (primitive or class).
+     * @return The resulting Type.
+     */
+    private static Type convert(String type) {
+        if (PRIMITIVE_DESCRIPTORS.containsKey(type)) {
+            return PRIMITIVE_DESCRIPTORS.get(type);
+        }
+        return Type.getObjectType(type);
+    }
+}
diff --git a/src/build/android/chromium-debug.keystore b/src/build/android/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/src/build/android/chromium-debug.keystore
Binary files differ
diff --git a/src/build/android/convert_dex_profile.py b/src/build/android/convert_dex_profile.py
new file mode 100755
index 0000000..f9fdeb6
--- /dev/null
+++ b/src/build/android/convert_dex_profile.py
@@ -0,0 +1,557 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import re
+import subprocess
+import sys
+
+DEX_CLASS_NAME_RE = re.compile(r'\'L(?P<class_name>[^;]+);\'')
+DEX_METHOD_NAME_RE = re.compile(r'\'(?P<method_name>[^\']+)\'')
+DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re
+    r'\''
+    r'\('
+    r'(?P<method_params>[^)]*)'
+    r'\)'
+    r'(?P<method_return_type>[^\']+)'
+    r'\'')
+DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P<line_number>\d+)')
+
+PROFILE_METHOD_RE = re.compile(
+    r'(?P<tags>[HSP]+)' # tags such as H/S/P
+    r'(?P<class_name>L[^;]+;)' # class name in type descriptor format
+    r'->(?P<method_name>[^(]+)'
+    r'\((?P<method_params>[^)]*)\)'
+    r'(?P<method_return_type>.+)')
+
+PROGUARD_CLASS_MAPPING_RE = re.compile(
+    r'(?P<original_name>[^ ]+)'
+    r' -> '
+    r'(?P<obfuscated_name>[^:]+):')
+PROGUARD_METHOD_MAPPING_RE = re.compile(
+    # line_start:line_end: (optional)
+    r'((?P<line_start>\d+):(?P<line_end>\d+):)?'
+    r'(?P<return_type>[^ ]+)' # original method return type
+    # original method class name (if exists)
+    r' (?:(?P<original_method_class>[a-zA-Z_\d.$]+)\.)?'
+    r'(?P<original_method_name>[^.\(]+)'
+    r'\((?P<params>[^\)]*)\)' # original method params
+    r'(?:[^ ]*)' # original method line numbers (ignored)
+    r' -> '
+    r'(?P<obfuscated_name>.+)') # obfuscated method name
+
+TYPE_DESCRIPTOR_RE = re.compile(
+    r'(?P<brackets>\[*)'
+    r'(?:'
+    r'(?P<class_name>L[^;]+;)'
+    r'|'
+    r'[VZBSCIJFD]'
+    r')')
+
+DOT_NOTATION_MAP = {
+    '': '',
+    'boolean': 'Z',
+    'byte': 'B',
+    'void': 'V',
+    'short': 'S',
+    'char': 'C',
+    'int': 'I',
+    'long': 'J',
+    'float': 'F',
+    'double': 'D'
+}
+
+class Method(object):
+  def __init__(self, name, class_name, param_types=None, return_type=None):
+    self.name = name
+    self.class_name = class_name
+    self.param_types = param_types
+    self.return_type = return_type
+
+  def __str__(self):
+    return '{}->{}({}){}'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __repr__(self):
+    return 'Method<{}->{}({}){}>'.format(self.class_name, self.name,
+        self.param_types or '', self.return_type or '')
+
+  def __cmp__(self, other):
+    return cmp((self.class_name, self.name, self.param_types, self.return_type),
+        (other.class_name, other.name, other.param_types, other.return_type))
+
+  def __hash__(self):
+    # only hash name and class_name since other fields may not be set yet.
+    return hash((self.name, self.class_name))
+
+
+class Class(object):
+  def __init__(self, name):
+    self.name = name
+    self._methods = []
+
+  def AddMethod(self, method, line_numbers):
+    self._methods.append((method, set(line_numbers)))
+
+  def FindMethodsAtLine(self, method_name, line_start, line_end=None):
+    """Searches through dex class for a method given a name and line numbers
+
+    The dex maps methods to line numbers, this method, given the a method name
+    in this class as well as a start line and an optional end line (which act as
+    hints as to which function in the class is being looked for), returns a list
+    of possible matches (or none if none are found).
+
+    Args:
+      method_name: name of method being searched for
+      line_start: start of hint range for lines in this method
+      line_end: end of hint range for lines in this method (optional)
+
+    Returns:
+      A list of Method objects that could match the hints given, or None if no
+      method is found.
+    """
+    found_methods = []
+    if line_end is None:
+      hint_lines = set([line_start])
+    else:
+      hint_lines = set(range(line_start, line_end+1))
+
+    named_methods = [(method, l) for method, l in self._methods
+                     if method.name == method_name]
+
+    if len(named_methods) == 1:
+      return [method for method, l in named_methods]
+    if len(named_methods) == 0:
+      return None
+
+    for method, line_numbers in named_methods:
+      if not hint_lines.isdisjoint(line_numbers):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+
+    for method, line_numbers in named_methods:
+      if (max(hint_lines) >= min(line_numbers)
+          and min(hint_lines) <= max(line_numbers)):
+        found_methods.append(method)
+
+    if len(found_methods) > 0:
+      if len(found_methods) > 1:
+        logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+            found_methods, hint_lines, self.name)
+      return found_methods
+    else:
+      logging.warning('No method named "%s" in class "%s" is '
+                      'mapped to lines %s', method_name, self.name, hint_lines)
+      return None
+
+
+class Profile(object):
+  def __init__(self):
+    # {Method: set(char)}
+    self._methods = collections.defaultdict(set)
+    self._classes = []
+
+  def AddMethod(self, method, tags):
+    for tag in tags:
+      self._methods[method].add(tag)
+
+  def AddClass(self, cls):
+    self._classes.append(cls)
+
+  def WriteToFile(self, path):
+    with open(path, 'w') as output_profile:
+      for cls in sorted(self._classes):
+        output_profile.write(cls + '\n')
+      for method in sorted(self._methods):
+        tags = sorted(self._methods[method])
+        line = '{}{}\n'.format(''.join(tags), str(method))
+        output_profile.write(line)
+
+
+class ProguardMapping(object):
+  def __init__(self):
+    # {Method: set(Method)}
+    self._method_mapping = collections.defaultdict(set)
+    # {String: String} String is class name in type descriptor format
+    self._class_mapping = dict()
+
+  def AddMethodMapping(self, from_method, to_method):
+    self._method_mapping[from_method].add(to_method)
+
+  def AddClassMapping(self, from_class, to_class):
+    self._class_mapping[from_class] = to_class
+
+  def GetMethodMapping(self, from_method):
+    return self._method_mapping.get(from_method)
+
+  def GetClassMapping(self, from_class):
+    return self._class_mapping.get(from_class, from_class)
+
+  def MapTypeDescriptor(self, type_descriptor):
+    match = TYPE_DESCRIPTOR_RE.search(type_descriptor)
+    assert match is not None
+    class_name = match.group('class_name')
+    if class_name is not None:
+      return match.group('brackets') + self.GetClassMapping(class_name)
+    # just a native type, return as is
+    return match.group()
+
+  def MapTypeDescriptorList(self, type_descriptor_list):
+    return TYPE_DESCRIPTOR_RE.sub(
+        lambda match: self.MapTypeDescriptor(match.group()),
+        type_descriptor_list)
+
+
+class MalformedLineException(Exception):
+  def __init__(self, message, line_number):
+    super(MalformedLineException, self).__init__(message)
+    self.line_number = line_number
+
+  def __str__(self):
+    return self.message + ' at line {}'.format(self.line_number)
+
+
+class MalformedProguardMappingException(MalformedLineException):
+  pass
+
+
+class MalformedProfileException(MalformedLineException):
+  pass
+
+
+def _RunDexDump(dexdump_path, dex_file_path):
+  return subprocess.check_output([dexdump_path, dex_file_path]).splitlines()
+
+
+def _ReadFile(file_path):
+  with open(file_path, 'r') as f:
+    return f.readlines()
+
+
+def _ToTypeDescriptor(dot_notation):
+  """Parses a dot notation type and returns it in type descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity;
+  boolean -> Z
+  int[] -> [I
+
+  Args:
+    dot_notation: trimmed string with a single type in dot notation format
+
+  Returns:
+    A string with the type in type descriptor format
+  """
+  dot_notation = dot_notation.strip()
+  prefix = ''
+  while dot_notation.endswith('[]'):
+    prefix += '['
+    dot_notation = dot_notation[:-2]
+  if dot_notation in DOT_NOTATION_MAP:
+    return prefix + DOT_NOTATION_MAP[dot_notation]
+  return prefix + 'L' + dot_notation.replace('.', '/') + ';'
+
+
+def _DotNotationListToTypeDescriptorList(dot_notation_list_string):
+  """Parses a param list of dot notation format and returns it in type
+  descriptor format
+
+  eg:
+  org.chromium.browser.ChromeActivity,boolean,int[] ->
+      Lorg/chromium/browser/ChromeActivity;Z[I
+
+  Args:
+    dot_notation_list_string: single string with multiple comma separated types
+                              in dot notation format
+
+  Returns:
+    A string with the param list in type descriptor format
+  """
+  return ''.join(_ToTypeDescriptor(param) for param in
+      dot_notation_list_string.split(','))
+
+
+def ProcessDex(dex_dump):
+  """Parses dexdump output returning a dict of class names to Class objects
+
+  Parses output of the dexdump command on a dex file and extracts information
+  about classes and their respective methods and which line numbers a method is
+  mapped to.
+
+  Methods that are not mapped to any line number are ignored and not listed
+  inside their respective Class objects.
+
+  Args:
+    dex_dump: An array of lines of dexdump output
+
+  Returns:
+    A dict that maps from class names in type descriptor format (but without the
+    surrounding 'L' and ';') to Class objects.
+  """
+  # class_name: Class
+  classes_by_name = {}
+  current_class = None
+  current_method = None
+  reading_positions = False
+  reading_methods = False
+  method_line_numbers = []
+  for line in dex_dump:
+    line = line.strip()
+    if line.startswith('Class descriptor'):
+      # New class started, no longer reading methods.
+      reading_methods = False
+      current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name'))
+      classes_by_name[current_class.name] = current_class
+    elif (line.startswith('Direct methods')
+          or line.startswith('Virtual methods')):
+      reading_methods = True
+    elif reading_methods and line.startswith('name'):
+      assert current_class is not None
+      current_method = Method(
+          DEX_METHOD_NAME_RE.search(line).group('method_name'),
+          "L" + current_class.name + ";")
+    elif reading_methods and line.startswith('type'):
+      assert current_method is not None
+      match = DEX_METHOD_TYPE_RE.search(line)
+      current_method.param_types = match.group('method_params')
+      current_method.return_type = match.group('method_return_type')
+    elif line.startswith('positions'):
+      assert reading_methods
+      reading_positions = True
+      method_line_numbers = []
+    elif reading_positions and line.startswith('0x'):
+      line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number')
+      method_line_numbers.append(int(line_number))
+    elif reading_positions and line.startswith('locals'):
+      if len(method_line_numbers) > 0:
+        current_class.AddMethod(current_method, method_line_numbers)
+      # finished reading method line numbers
+      reading_positions = False
+  return classes_by_name
+
+
+def ProcessProguardMapping(proguard_mapping_lines, dex):
+  """Parses a proguard mapping file
+
+  This takes proguard mapping file lines and then uses the obfuscated dex to
+  create a mapping of unobfuscated methods to obfuscated ones and vice versa.
+
+  The dex is used because the proguard mapping file only has the name of the
+  obfuscated methods but not their signature, thus the dex is read to look up
+  which method with a specific name was mapped to the lines mentioned in the
+  proguard mapping file.
+
+  Args:
+    proguard_mapping_lines: Array of strings, each is a line from the proguard
+                            mapping file (in order).
+    dex: a dict of class name (in type descriptor format but without the
+         enclosing 'L' and ';') to a Class object.
+  Returns:
+    Two dicts the first maps from obfuscated methods to a set of non-obfuscated
+    ones. It also maps the obfuscated class names to original class names, both
+    in type descriptor format (with the enclosing 'L' and ';')
+  """
+  mapping = ProguardMapping()
+  reverse_mapping = ProguardMapping()
+  to_be_obfuscated = []
+  current_class_orig = None
+  current_class_obfs = None
+  for index, line in enumerate(proguard_mapping_lines):
+    if line.strip() == '':
+      continue
+    if not line.startswith(' '):
+      match = PROGUARD_CLASS_MAPPING_RE.search(line)
+      if match is None:
+        raise MalformedProguardMappingException(
+            'Malformed class mapping', index)
+      current_class_orig = match.group('original_name')
+      current_class_obfs = match.group('obfuscated_name')
+      mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs),
+                              _ToTypeDescriptor(current_class_orig))
+      reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig),
+                                      _ToTypeDescriptor(current_class_obfs))
+      continue
+
+    assert current_class_orig is not None
+    assert current_class_obfs is not None
+    line = line.strip()
+    match = PROGUARD_METHOD_MAPPING_RE.search(line)
+    # check if is a method mapping (we ignore field mappings)
+    if match is not None:
+      # check if this line is an inlining by reading ahead 1 line.
+      if index + 1 < len(proguard_mapping_lines):
+        next_match = PROGUARD_METHOD_MAPPING_RE.search(
+            proguard_mapping_lines[index+1].strip())
+        if (next_match and match.group('line_start') is not None
+            and next_match.group('line_start') == match.group('line_start')
+            and next_match.group('line_end') == match.group('line_end')):
+          continue # This is an inlining, skip
+
+      original_method = Method(
+          match.group('original_method_name'),
+          _ToTypeDescriptor(
+              match.group('original_method_class') or current_class_orig),
+          _DotNotationListToTypeDescriptorList(match.group('params')),
+          _ToTypeDescriptor(match.group('return_type')))
+
+      if match.group('line_start') is not None:
+        obfs_methods = (dex[current_class_obfs.replace('.', '/')]
+            .FindMethodsAtLine(
+                match.group('obfuscated_name'),
+                int(match.group('line_start')),
+                int(match.group('line_end'))))
+
+        if obfs_methods is None:
+          continue
+
+        for obfs_method in obfs_methods:
+          mapping.AddMethodMapping(obfs_method, original_method)
+          reverse_mapping.AddMethodMapping(original_method, obfs_method)
+      else:
+        to_be_obfuscated.append(
+            (original_method, match.group('obfuscated_name')))
+
+  for original_method, obfuscated_name in to_be_obfuscated:
+    obfuscated_method = Method(
+        obfuscated_name,
+        reverse_mapping.GetClassMapping(original_method.class_name),
+        reverse_mapping.MapTypeDescriptorList(original_method.param_types),
+        reverse_mapping.MapTypeDescriptor(original_method.return_type))
+    mapping.AddMethodMapping(obfuscated_method, original_method)
+    reverse_mapping.AddMethodMapping(original_method, obfuscated_method)
+  return mapping, reverse_mapping
+
+
+def ProcessProfile(input_profile, proguard_mapping):
+  """Parses an android profile and uses the proguard mapping to (de)obfuscate it
+
+  This takes the android profile lines and for each method or class for the
+  profile, it uses the mapping to either obfuscate or deobfuscate (based on the
+  provided mapping) and returns a Profile object that stores this information.
+
+  Args:
+    input_profile: array of lines of the input profile
+    proguard_mapping: a proguard mapping that would map from the classes and
+                      methods in the input profile to the classes and methods
+                      that should be in the output profile.
+
+  Returns:
+    A Profile object that stores the information (ie list of mapped classes and
+    methods + tags)
+  """
+  profile = Profile()
+  for index, line in enumerate(input_profile):
+    line = line.strip()
+    if line.startswith('L'):
+      profile.AddClass(proguard_mapping.GetClassMapping(line))
+      continue
+    match = PROFILE_METHOD_RE.search(line)
+    if not match:
+      raise MalformedProfileException("Malformed line", index)
+
+    method = Method(
+        match.group('method_name'),
+        match.group('class_name'),
+        match.group('method_params'),
+        match.group('method_return_type'))
+
+    mapped_methods = proguard_mapping.GetMethodMapping(method)
+    if mapped_methods is None:
+      logging.warning('No method matching "%s" has been found in the proguard '
+                      'mapping file', method)
+      continue
+
+    for original_method in mapped_methods:
+      profile.AddMethod(original_method, match.group('tags'))
+
+  return profile
+
+
+def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping,
+                     dexdump_path, output_filename):
+  """Helper method for obfuscating a profile.
+
+  Args:
+    nonobfuscated_profile: a profile with nonobfuscated symbols.
+    dex_file: path to the dex file matching the mapping.
+    proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used
+      in the dex file.
+    dexdump_path: path to the dexdump utility.
+    output_filename: output filename in which to write the obfuscated profile.
+  """
+  dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file))
+  _, reverse_mapping = ProcessProguardMapping(
+      _ReadFile(proguard_mapping), dexinfo)
+  obfuscated_profile = ProcessProfile(
+      _ReadFile(nonobfuscated_profile), reverse_mapping)
+  obfuscated_profile.WriteToFile(output_filename)
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--dexdump-path',
+      required=True,
+      help='Path to dexdump binary.')
+  parser.add_argument(
+      '--dex-path',
+      required=True,
+      help='Path to dex file corresponding to the proguard mapping file.')
+  parser.add_argument(
+      '--proguard-mapping-path',
+      required=True,
+      help='Path to input proguard mapping file corresponding to the dex file.')
+  parser.add_argument(
+      '--output-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--input-profile-path',
+      required=True,
+      help='Path to output profile.')
+  parser.add_argument(
+      '--verbose',
+      action='store_true',
+      default=False,
+      help='Print verbose output.')
+  obfuscation = parser.add_mutually_exclusive_group(required=True)
+  obfuscation.add_argument('--obfuscate', action='store_true',
+      help='Indicates to output an obfuscated profile given a deobfuscated '
+     'one.')
+  obfuscation.add_argument('--deobfuscate', dest='obfuscate',
+      action='store_false', help='Indicates to output a deobfuscated profile '
+      'given an obfuscated one.')
+  options = parser.parse_args(args)
+
+  if options.verbose:
+    log_level = logging.WARNING
+  else:
+    log_level = logging.ERROR
+  logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+  dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path))
+  proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping(
+      _ReadFile(options.proguard_mapping_path), dex)
+  if options.obfuscate:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        reverse_proguard_mapping)
+  else:
+    profile = ProcessProfile(
+        _ReadFile(options.input_profile_path),
+        proguard_mapping)
+  profile.WriteToFile(options.output_profile_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/convert_dex_profile_tests.py b/src/build/android/convert_dex_profile_tests.py
new file mode 100644
index 0000000..0ddc5ce
--- /dev/null
+++ b/src/build/android/convert_dex_profile_tests.py
@@ -0,0 +1,276 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for convert_dex_profile.
+
+Can be run from build/android/:
+  $ cd build/android
+  $ python convert_dex_profile_tests.py
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+import convert_dex_profile as cp
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import build_utils
+
+cp.logging.disable(cp.logging.CRITICAL)
+
+# There are two obfuscations used in the tests below, each with the same
+# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING,
+# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both
+# getInstance and initialize. The second, corresponding to DEX_DUMP_2,
+# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity.
+
+DEX_DUMP = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    4:4:void inlined():237:237 -> a
+    4:4:org.chromium.Original getInstance():203 -> a
+    5:5:void org.chromium.Original$Subclass.<init>(org.chromium.Original,byte):130:130 -> a
+    5:5:void initialize():237 -> a
+    5:5:org.chromium.Original getInstance():203 -> a
+    6:6:void initialize():237:237 -> a
+    9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a
+    9:9:android.content.Context getContext():219 -> a
+    9:9:void initialize():245 -> a
+    9:9:org.chromium.Original getInstance():203 -> a"""
+
+OBFUSCATED_PROFILE = \
+"""La;
+PLa;->b()La;
+SLa;->a(Ljava/lang/Object;)I
+HPLa;->a(Ljava/lang/String;)I"""
+
+DEX_DUMP_2 = """
+
+Class descriptor  : 'La;'
+  Direct methods    -
+      #0              : (in La;)
+        name          : '<clinit>'
+        type          : '(Ljava/lang/String;)V'
+        code          -
+        catches       : 1
+                0x000f - 0x001e
+                  <any> -> 0x0093
+        positions     :
+                0x0001 line=310
+                0x0057 line=313
+        locals        : 
+      #1              : (in La;)
+        name          : '<init>'
+        type          : '()V'
+        positions     :
+        locals        : 
+  Virtual methods   -
+      #0              : (in La;)
+        name          : 'a'
+        type          : '(Ljava/lang/String;)I'
+        positions     : 
+          0x0000 line=2
+          0x0003 line=3
+          0x001b line=8
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #1              : (in La;)
+        name          : 'c'
+        type          : '(Ljava/lang/Object;)I'
+        positions     : 
+          0x0000 line=8
+          0x0003 line=9
+        locals        : 
+          0x0000 - 0x0021 reg=3 this La; 
+      #2              : (in La;)
+        name          : 'b'
+        type          : '()La;'
+        positions     : 
+          0x0000 line=1
+        locals        : 
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING_2 = \
+"""org.chromium.Original -> a:
+    org.chromium.Original sDisplayAndroidManager -> e
+    org.chromium.Original another() -> b
+    void initialize() -> c
+    org.chromium.Original getInstance():203 -> a
+    4:4:void inlined():237:237 -> a"""
+
+OBFUSCATED_PROFILE_2 = \
+"""La;
+PLa;->b()La;
+HPSLa;->a()La;
+HPLa;->c()V"""
+
+UNOBFUSCATED_PROFILE = \
+"""Lorg/chromium/Original;
+PLorg/chromium/Original;->another()Lorg/chromium/Original;
+HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original;
+HPLorg/chromium/Original;->initialize()V"""
+
+class GenerateProfileTests(unittest.TestCase):
+  def testProcessDex(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    self.assertIsNotNone(dex['a'])
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 311, 313)), 1)
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 309, 315)), 1)
+    clinit = dex['a'].FindMethodsAtLine('<clinit>', 311, 313)[0]
+    self.assertEquals(clinit.name, '<clinit>')
+    self.assertEquals(clinit.return_type, 'V')
+    self.assertEquals(clinit.param_types, 'Ljava/lang/String;')
+
+    self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2)
+    self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None))
+
+# pylint: disable=protected-access
+  def testProcessProguardMapping(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, reverse = cp.ProcessProguardMapping(
+        PROGUARD_MAPPING.splitlines(), dex)
+
+    self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;'))
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    subclassInit = cp.Method(
+        '<init>', 'Lorg/chromium/Original$Subclass;',
+        'Lorg/chromium/Original;B', 'V')
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/String;', 'I'))
+    self.assertEquals(len(mapped), 2)
+    self.assertIn(getInstance, mapped)
+    self.assertNotIn(subclassInit, mapped)
+    self.assertNotIn(
+        cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped)
+    self.assertIn(initialize, mapped)
+
+    mapped = mapping.GetMethodMapping(
+        cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(getInstance, mapped)
+
+    mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;'))
+    self.assertEquals(len(mapped), 1)
+    self.assertIn(another, mapped)
+
+    for from_method, to_methods in mapping._method_mapping.iteritems():
+      for to_method in to_methods:
+        self.assertIn(from_method, reverse.GetMethodMapping(to_method))
+    for from_class, to_class in mapping._class_mapping.iteritems():
+      self.assertEquals(from_class, reverse.GetClassMapping(to_class))
+
+  def testProcessProfile(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+
+    getInstance = cp.Method(
+        'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+    initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+    another = cp.Method(
+        'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+
+    self.assertIn('Lorg/chromium/Original;', profile._classes)
+    self.assertIn(getInstance, profile._methods)
+    self.assertIn(initialize, profile._methods)
+    self.assertIn(another, profile._methods)
+
+    self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P']))
+    self.assertEquals(profile._methods[initialize], set(['H', 'P']))
+    self.assertEquals(profile._methods[another], set(['P']))
+
+  def testEndToEnd(self):
+    dex = cp.ProcessDex(DEX_DUMP.splitlines())
+    mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+
+    profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+    with tempfile.NamedTemporaryFile() as temp:
+      profile.WriteToFile(temp.name)
+      with open(temp.name, 'r') as f:
+        for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())):
+          self.assertEquals(a.strip(), b.strip())
+
+  def testObfuscateProfile(self):
+    with build_utils.TempDir() as temp_dir:
+      # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump
+      # program.
+      dex_path = os.path.join(temp_dir, 'dexdump')
+      with open(dex_path, 'w') as dex_file:
+        dex_file.write(DEX_DUMP_2)
+      mapping_path = os.path.join(temp_dir, 'mapping')
+      with open(mapping_path, 'w') as mapping_file:
+        mapping_file.write(PROGUARD_MAPPING_2)
+      unobfuscated_path = os.path.join(temp_dir, 'unobfuscated')
+      with open(unobfuscated_path, 'w') as unobfuscated_file:
+        unobfuscated_file.write(UNOBFUSCATED_PROFILE)
+      obfuscated_path = os.path.join(temp_dir, 'obfuscated')
+      cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat',
+                          obfuscated_path)
+      with open(obfuscated_path) as obfuscated_file:
+        obfuscated_profile = sorted(obfuscated_file.readlines())
+      for a, b in zip(
+          sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile):
+        self.assertEquals(a.strip(), b.strip())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/dcheck_is_off.flags b/src/build/android/dcheck_is_off.flags
new file mode 100644
index 0000000..78b9cc2
--- /dev/null
+++ b/src/build/android/dcheck_is_off.flags
@@ -0,0 +1,17 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Contains flags that are applied only when ENABLE_DCHECK=false.
+
+-checkdiscard @org.chromium.base.annotations.CheckDiscard class ** {
+  *;
+}
+-checkdiscard class ** {
+  @org.chromium.base.annotations.CheckDiscard *;
+}
+
+# Ensure @RemovableInRelease actually works.
+-checkdiscard class ** {
+  @org.chromium.base.annotations.RemovableInRelease *;
+}
diff --git a/src/build/android/devil_chromium.json b/src/build/android/devil_chromium.json
new file mode 100644
index 0000000..0bfcfd8
--- /dev/null
+++ b/src/build/android/devil_chromium.json
@@ -0,0 +1,120 @@
+{
+  "config_type": "BaseConfig",
+  "dependencies": {
+    "aapt": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/aapt"
+          ]
+        }
+      }
+    },
+    "adb": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/platform-tools/adb"
+          ]
+        }
+      }
+    },
+    "android_build_tools_libc++": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so"
+          ]
+        }
+      }
+    },
+    "android_sdk": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public"
+          ]
+        }
+      }
+    },
+    "dexdump": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump"
+          ]
+        }
+      }
+    },
+    "split-select": {
+      "file_info": {
+        "linux2_x86_64": {
+          "local_paths": [
+            "../../third_party/android_sdk/public/build-tools/27.0.3/split-select"
+          ]
+        }
+      }
+    },
+    "simpleperf": {
+      "file_info": {
+        "android_armeabi-v7a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf"
+          ]
+        },
+        "android_arm64-v8a": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf"
+          ]
+        },
+        "android_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf"
+          ]
+        },
+        "android_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf"
+          ]
+        },
+        "linux_x86": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf"
+          ]
+        },
+        "linux_x86_64": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf"
+          ]
+        }
+      }
+    },
+    "simpleperf_scripts": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_ndk/simpleperf"
+          ]
+        }
+      }
+    },
+    "llvm-symbolizer": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer"
+          ]
+        }
+      }
+    },
+    "bundletool": {
+      "file_info": {
+        "default": {
+          "local_paths": [
+            "../../third_party/android_build_tools/bundletool/bundletool-all-1.4.0.jar"
+          ]
+        }
+      }
+    }
+  }
+}
diff --git a/src/build/android/devil_chromium.py b/src/build/android/devil_chromium.py
new file mode 100644
index 0000000..20ae1e3
--- /dev/null
+++ b/src/build/android/devil_chromium.py
@@ -0,0 +1,200 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configures devil for use in chromium."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.insert(1, host_paths.DEVIL_PATH)
+
+from devil import devil_env
+from devil.android.ndk import abis
+
+_BUILD_DIR = os.path.join(constants.DIR_SOURCE_ROOT, 'build')
+if _BUILD_DIR not in sys.path:
+  sys.path.insert(1, _BUILD_DIR)
+
+import gn_helpers
+
+_DEVIL_CONFIG = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), 'devil_chromium.json'))
+
+_DEVIL_BUILD_PRODUCT_DEPS = {
+  'chromium_commands': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['lib.java', 'chromium_commands.dex.jar'],
+    }
+  ],
+  'forwarder_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['forwarder_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['forwarder_dist'],
+    },
+  ],
+  'forwarder_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['host_forwarder'],
+    },
+  ],
+  'md5sum_device': [
+    {
+      'platform': 'android',
+      'arch': abis.ARM,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.ARM_64,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': 'mips64',
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86,
+      'path_components': ['md5sum_dist'],
+    },
+    {
+      'platform': 'android',
+      'arch': abis.X86_64,
+      'path_components': ['md5sum_dist'],
+    },
+  ],
+  'md5sum_host': [
+    {
+      'platform': 'linux2',
+      'arch': 'x86_64',
+      'path_components': ['md5sum_bin_host'],
+    },
+  ],
+}
+
+
+def _UseLocalBuildProducts(output_directory, devil_dynamic_config):
+  output_directory = os.path.abspath(output_directory)
+  devil_dynamic_config['dependencies'] = {
+      dep_name: {
+          'file_info': {
+              '%s_%s' % (dep_config['platform'], dep_config['arch']): {
+                  'local_paths': [
+                      os.path.join(output_directory,
+                                   *dep_config['path_components']),
+                  ],
+              }
+              for dep_config in dep_configs
+          }
+      }
+      for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems()
+  }
+
+
+def _BuildWithChromium():
+  """Returns value of gclient's |build_with_chromium|."""
+  gni_path = os.path.join(_BUILD_DIR, 'config', 'gclient_args.gni')
+  if not os.path.exists(gni_path):
+    return False
+  with open(gni_path) as f:
+    data = f.read()
+  args = gn_helpers.FromGNArgs(data)
+  return args.get('build_with_chromium', False)
+
+
+def Initialize(output_directory=None, custom_deps=None, adb_path=None):
+  """Initializes devil with chromium's binaries and third-party libraries.
+
+  This includes:
+    - Libraries:
+      - the android SDK ("android_sdk")
+    - Build products:
+      - host & device forwarder binaries
+          ("forwarder_device" and "forwarder_host")
+      - host & device md5sum binaries ("md5sum_device" and "md5sum_host")
+
+  Args:
+    output_directory: An optional path to the output directory. If not set,
+      no built dependencies are configured.
+    custom_deps: An optional dictionary specifying custom dependencies.
+      This should be of the form:
+
+        {
+          'dependency_name': {
+            'platform': 'path',
+            ...
+          },
+          ...
+        }
+    adb_path: An optional path to use for the adb binary. If not set, this uses
+      the adb binary provided by the Android SDK.
+  """
+  build_with_chromium = _BuildWithChromium()
+
+  devil_dynamic_config = {
+    'config_type': 'BaseConfig',
+    'dependencies': {},
+  }
+  if build_with_chromium and output_directory:
+    # Non-chromium users of chromium's //build directory fetch build products
+    # from google storage rather than use locally built copies. Chromium uses
+    # locally-built copies so that changes to the tools can be easily tested.
+    _UseLocalBuildProducts(output_directory, devil_dynamic_config)
+
+  if custom_deps:
+    devil_dynamic_config['dependencies'].update(custom_deps)
+  if adb_path:
+    devil_dynamic_config['dependencies'].update({
+      'adb': {
+        'file_info': {
+          devil_env.GetPlatform(): {
+            'local_paths': [adb_path]
+          }
+        }
+      }
+    })
+
+  config_files = [_DEVIL_CONFIG] if build_with_chromium else None
+  devil_env.config.Initialize(configs=[devil_dynamic_config],
+                              config_files=config_files)
diff --git a/src/build/android/devil_chromium.pydeps b/src/build/android/devil_chromium.pydeps
new file mode 100644
index 0000000..4143805
--- /dev/null
+++ b/src/build/android/devil_chromium.pydeps
@@ -0,0 +1,39 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/six/six.py
+../gn_helpers.py
+devil_chromium.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
diff --git a/src/build/android/diff_resource_sizes.py b/src/build/android/diff_resource_sizes.py
new file mode 100755
index 0000000..eefb6cd
--- /dev/null
+++ b/src/build/android/diff_resource_sizes.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs resource_sizes.py on two apks and outputs the diff."""
+
+from __future__ import print_function
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from pylib.constants import host_paths
+from pylib.utils import shared_preference_utils
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+  from tracing.value import convert_chart_json # pylint: disable=import-error
+
+_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
+with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')):
+  from util import build_utils  # pylint: disable=import-error
+
+
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes_diff',
+    'benchmark_description': 'APK resource size diff information',
+    'trace_rerun_options': [],
+    'charts': {},
+}
+
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
+
+
+def DiffResults(chartjson, base_results, diff_results):
+  """Reports the diff between the two given results.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in, or None
+        to only print results.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title, trace_title,
+          diff_results['charts'][graph_title][trace_title]['value']
+              - trace['value'],
+          trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def AddIntermediateResults(chartjson, base_results, diff_results):
+  """Copies the intermediate size results into the output chartjson.
+
+  Args:
+    chartjson: A dictionary that chartjson results will be placed in.
+    base_results: The chartjson-formatted size results of the base APK.
+    diff_results: The chartjson-formatted size results of the diff APK.
+  """
+  for graph_title, graph in base_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_base_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+  # Both base_results and diff_results should have the same charts/traces, but
+  # loop over them separately in case they don't
+  for graph_title, graph in diff_results['charts'].iteritems():
+    for trace_title, trace in graph.iteritems():
+      perf_tests_results_helper.ReportPerfResult(
+          chartjson, graph_title + '_diff_apk', trace_title,
+          trace['value'], trace['units'], trace['improvement_direction'],
+          trace['important'])
+
+
+def _CreateArgparser():
+  def chromium_path(arg):
+    if arg.startswith('//'):
+      return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:])
+    return arg
+
+  argparser = argparse.ArgumentParser(
+      description='Diff resource sizes of two APKs. Arguments not listed here '
+                  'will be passed on to both invocations of resource_sizes.py.')
+  argparser.add_argument('--chromium-output-directory-base',
+                         dest='out_dir_base',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the base '
+                              'APK, i.e. what the size increase/decrease will '
+                              'be measured from.')
+  argparser.add_argument('--chromium-output-directory-diff',
+                         dest='out_dir_diff',
+                         type=chromium_path,
+                         help='Location of the build artifacts for the diff '
+                              'APK.')
+  argparser.add_argument('--chartjson',
+                         action='store_true',
+                         help='DEPRECATED. Use --output-format=chartjson '
+                              'instead.')
+  argparser.add_argument('--output-format',
+                         choices=['chartjson', 'histograms'],
+                         help='Output the results to a file in the given '
+                              'format instead of printing the results.')
+  argparser.add_argument('--include-intermediate-results',
+                         action='store_true',
+                         help='Include the results from the resource_sizes.py '
+                              'runs in the chartjson output.')
+  argparser.add_argument('--output-dir',
+                         default='.',
+                         type=chromium_path,
+                         help='Directory to save chartjson to.')
+  argparser.add_argument('--base-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the base APK, i.e. what the size '
+                              'increase/decrease will be measured from.')
+  argparser.add_argument('--diff-apk',
+                         required=True,
+                         type=chromium_path,
+                         help='Path to the diff APK, i.e. the APK whose size '
+                              'increase/decrease will be measured against the '
+                              'base APK.')
+  return argparser
+
+
+def main():
+  args, unknown_args = _CreateArgparser().parse_known_args()
+  # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+  if args.chartjson:
+    args.output_format = 'chartjson'
+
+  chartjson = _BASE_CHART.copy() if args.output_format else None
+
+  with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
+    # Run resource_sizes.py on the two APKs
+    resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
+    shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+                   + unknown_args)
+
+    base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
+    if args.out_dir_base:
+      base_args += ['--chromium-output-directory', args.out_dir_base]
+    try:
+      subprocess.check_output(base_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
+    if args.out_dir_diff:
+      diff_args += ['--chromium-output-directory', args.out_dir_diff]
+    try:
+      subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise
+
+    # Combine the separate results
+    base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+    diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
+    base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
+    diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
+    DiffResults(chartjson, base_results, diff_results)
+    if args.include_intermediate_results:
+      AddIntermediateResults(chartjson, base_results, diff_results)
+
+    if args.output_format:
+      chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+                                    _CHARTJSON_FILENAME)
+      logging.critical('Dumping diff chartjson to %s', chartjson_path)
+      with open(chartjson_path, 'w') as outfile:
+        json.dump(chartjson, outfile)
+
+      if args.output_format == 'histograms':
+        histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+        if histogram_result.returncode != 0:
+          logging.error('chartjson conversion failed with error: %s',
+              histogram_result.stdout)
+          return 1
+
+        histogram_path = os.path.join(os.path.abspath(args.output_dir),
+            'perf_results.json')
+        logging.critical('Dumping diff histograms to %s', histogram_path)
+        with open(histogram_path, 'w') as json_file:
+          json_file.write(histogram_result.stdout)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/docs/README.md b/src/build/android/docs/README.md
new file mode 100644
index 0000000..6392f7d
--- /dev/null
+++ b/src/build/android/docs/README.md
@@ -0,0 +1,13 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [java_toolchain.md](java_toolchain.md)
+* [java_optimization.md](java_optimization.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/src/build/android/docs/android_app_bundles.md b/src/build/android/docs/android_app_bundles.md
new file mode 100644
index 0000000..e71fe27
--- /dev/null
+++ b/src/build/android/docs/android_app_bundles.md
@@ -0,0 +1,205 @@
+# Introduction
+
+This document describes how the Chromium build system supports Android app
+bundles.
+
+[TOC]
+
+# Overview of app bundles
+
+An Android app bundle is an alternative application distribution format for
+Android applications on the Google Play Store, that allows reducing the size
+of binaries sent for installation to individual devices that run on Android L
+and beyond. For more information about them, see the official Android
+[documentation](https://developer.android.com/guide/app-bundle/).
+
+For the context of this document, the most important points are:
+
+  - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
+    be installed directly on a device.
+
+  - Instead, it must be processed into a set of installable split APKs, which
+    are stored inside a special zip archive (e.g. `foo.apks`).
+
+  - The splitting can be based on various criteria: e.g. language or screen
+    density for resources, or cpu ABI for native code.
+
+  - The bundle also uses the notion of dynamic features modules (DFMs) to
+    separate several application features. Each module has its own code, assets
+    and resources, and can be installed separately from the rest of the
+    application if needed.
+
+  - The main application itself is stored in the '`base`' module (this name
+    cannot be changed).
+
+
+# Declaring app bundles with GN templates
+
+Here's an example that shows how to declare a simple bundle that contains a
+single base module, which enables language-based splits:
+
+```gn
+
+  # First declare the first bundle module. The base module is the one
+  # that contains the main application's code, resources and assets.
+  android_app_bundle_module("foo_base_module") {
+    # Declaration are similar to android_apk here.
+    ...
+  }
+
+  # Second, declare the bundle itself.
+  android_app_bundle("foo_bundle") {
+    # Indicate the base module to use for this bundle
+    base_module_target = ":foo_base_module"
+
+    # The name of our bundle file (without any suffix). Default would
+    # be 'foo_bundle' otherwise.
+    bundle_name = "FooBundle"
+
+    # Enable language-based splits for this bundle. Which means that
+    # resources and assets specific to a given language will be placed
+    # into their own split APK in the final .apks archive.
+    enable_language_splits = true
+
+    # Proguard settings must be passed at the bundle, not module, target.
+    proguard_enabled = !is_java_debug
+  }
+```
+
+When generating the `foo_bundle` target with Ninja, you will end up with
+the following:
+
+  - The bundle file under `out/Release/apks/FooBundle.aab`
+
+  - A helper script called `out/Release/bin/foo_bundle`, which can be used
+    to install / launch / uninstall the bundle on local devices.
+
+    This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
+    to see all possible commands supported by the script.
+
+
+# Declaring dynamic feature modules with GN templates
+
+Please see
+[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
+more details. In short, if you need more modules besides the base one, you
+will need to list all the extra ones using the extra_modules variable which
+takes a list of GN scopes, as in:
+
+```gn
+
+  android_app_bundle_module("foo_base_module") {
+    ...
+  }
+
+  android_app_bundle_module("foo_extra_module") {
+    ...
+  }
+
+  android_app_bundle("foo_bundle") {
+    base_module_target = ":foo_base_module"
+
+    extra_modules = [
+      { # NOTE: Scopes require one field per line, and no comma separators.
+        name = "my_module"
+        module_target = ":foo_extra_module"
+      }
+    ]
+
+    ...
+  }
+```
+
+Note that each extra module is identified by a unique name, which cannot
+be '`base`'.
+
+
+# Bundle signature issues
+
+Signing an app bundle is not necessary, unless you want to upload it to the
+Play Store. Since this process is very slow (it uses `jarsigner` instead of
+the much faster `apkbuilder`), you can control it with the `sign_bundle`
+variable, as described in the example above.
+
+The `.apks` archive however always contains signed split APKs. The keystore
+path/password/alias being used are the default ones, unless you use custom
+values when declaring the bundle itself, as in:
+
+```gn
+  android_app_bundle("foo_bundle") {
+    ...
+    keystore_path = "//path/to/keystore"
+    keystore_password = "K3y$t0Re-Pa$$w0rd"
+    keystore_name = "my-signing-key-name"
+  }
+```
+
+These values are not stored in the bundle itself, but in the wrapper script,
+which will use them to generate the `.apks` archive for you. This allows you
+to properly install updates on top of existing applications on any device.
+
+
+# Proguard and bundles
+
+When using an app bundle that is made of several modules, it is crucial to
+ensure that proguard, if enabled:
+
+- Keeps the obfuscated class names used by each module consistent.
+- Does not remove classes that are not used in one module, but referenced
+  by others.
+
+To achieve this, a special scheme called *synchronized proguarding* is
+performed, which consists of the following steps:
+
+- The list of unoptimized .jar files from all modules are sent to a single
+  proguard command. This generates a new temporary optimized *group* .jar file.
+
+- Each module extracts the optimized class files from the optimized *group*
+  .jar file, to generate its own, module-specific, optimized .jar.
+
+- Each module-specific optimized .jar is then sent to dex generation.
+
+This synchronized proguarding step is added by the `android_app_bundle()` GN
+template. In practice this means the following:
+
+  - `proguard_enabled` must be passed to `android_app_bundle` targets, but not
+    to `android_app_bundle_module` ones.
+
+  - `proguard_configs` can be still passed to individual modules, just
+    like regular APKs. All proguard configs will be merged during the
+    synchronized proguard step.
+
+
+# Manual generation and installation of .apks archives
+
+Note that the `foo_bundle` script knows how to generate the .apks archive
+from the bundle file, and install it to local devices for you. For example,
+to install and launch a bundle, use:
+
+```sh
+  out/Release/bin/foo_bundle run
+```
+
+If you want to manually look or use the `.apks` archive, use the following
+command to generate it:
+
+```sh
+  out/Release/bin/foo_bundle build-bundle-apks \
+      --output-apks=/tmp/BundleFoo.apks
+```
+
+All split APKs within the archive will be properly signed. And you will be
+able to look at its content (with `unzip -l`), or install it manually with:
+
+```sh
+  build/android/gyp/bundletool.py install-apks \
+      --apks=/tmp/BundleFoo.apks \
+      --adb=$(which adb)
+```
+
+The task of examining the manifest is simplified by running the following,
+which dumps the application manifest as XML to stdout:
+
+```sh
+  build/android/gyp/bundletool.py dump-manifest
+```
diff --git a/src/build/android/docs/build_config.md b/src/build/android/docs/build_config.md
new file mode 100644
index 0000000..8a301c8
--- /dev/null
+++ b/src/build/android/docs/build_config.md
@@ -0,0 +1,168 @@
+# Introduction
+
+This document describes the `.build_config` files that are used by the
+Chromium build system for Android-specific targets like APK, resources,
+and more.
+
+[TOC]
+
+# I. Overview of .build_config files:
+
+The Android build requires performing computations about dependencies in
+various targets, which are not possible with the GN build language. To address
+this, `.build_config` files are written during the build to store the needed
+per-target information as JSON files.
+
+They are always written to `$target_gen_dir/${target_name}.build_config`.
+
+Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used
+during the build, can also accept parameter arguments using
+`@FileArg references`, which look like:
+
+    --some-param=@FileArg(<filename>:<key1>:<key2>:..<keyN>)
+
+This placeholder will ensure that `<filename>` is read as a JSON file, then
+return the value at `[key1][key2]...[keyN]` for the `--some-param` option.
+
+Apart from that, the scripts do not need to know anything about the structure
+of `.build_config` files (but the GN rules that invoke them do and select
+which `@FileArg()` references to use).
+
+For a concrete example, consider the following GN fragment:
+
+```gn
+# From //ui/android/BUILD.gn:
+android_resources("ui_java_resources") {
+  custom_package = "org.chromium.ui"
+  resource_dirs = [ "java/res" ]
+  deps = [
+    ":ui_strings_grd",
+  ]
+}
+```
+
+This will end up generating the following JSON file under
+`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`:
+
+```json
+{
+  "deps_info": {
+    "deps_configs": [
+      "gen/ui/android/ui_strings_grd.build_config"
+    ],
+    "name": "ui_java_resources.build_config",
+    "package_name": "org.chromium.ui",
+    "path": "gen/ui/android/ui_java_resources.build_config",
+    "r_text": "gen/ui/android/ui_java_resources_R.txt",
+    "resources_dirs": [
+      "../../ui/android/java/res"
+    ],
+    "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip",
+    "srcjar": "gen/ui/android/ui_java_resources.srcjar",
+    "type": "android_resources"
+  },
+  "gradle": {},
+  "resources": {
+    "dependency_zips": [
+      "resource_zips/ui/android/ui_strings_grd.resources.zip"
+    ],
+    "extra_package_names": [],
+  }
+}
+```
+
+NOTE: All path values in `.build_config` files are relative to your
+`$CHROMIUM_OUTPUT_DIR`.
+
+# II. Generation of .build_config files:
+
+They are generated by the GN [`write_build_config()`](gn_write_build_config)
+internal template, which ends up invoking
+[`write_build_config.py`](write_build_config_py). For our example above, this
+is with the following parameters:
+
+```
+python ../../build/android/gyp/write_build_config.py \
+    --type=android_resources \
+    --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \
+    --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \
+    --build-config gen/ui/android/ui_java_resources.build_config \
+    --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \
+    --package-name org.chromium.ui \
+    --r-text gen/ui/android/ui_java_resources_R.txt \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --srcjar gen/ui/android/ui_java_resources.srcjar
+```
+
+Note that *most* of the content of the JSON file comes from command-line
+parameters, but not all of it.
+
+In particular, the `resources['dependency_zips']` entry was computed by
+inspecting the content of all dependencies (here, only
+`ui_string_grd.build_config`), and collecting their
+`deps_configs['resources_zip']` values.
+
+Because a target's `.build_config` file will always be generated after
+that of all of its dependencies,
+[`write_build_config.py`](write_build_config_py) can traverse the
+whole (transitive) set of direct *and* indirect dependencies for a given target
+and extract useful information out of it.
+
+This is the kind of processing that cannot be done at the GN language level,
+and is very powerful for Android builds.
+
+
+# III. Usage of .build_config files:
+
+In addition to being parsed by `write_build_config.py`, when they are listed
+in the `--deps-configs` of a given target, the `.build_config` files are used
+by other scripts under [build/android/gyp/] to build stuff.
+
+For example, the GN `android_resources` template uses it to invoke the
+[`process_resources.py`] script with the following command, in order to
+generate various related files (e.g. `ui_java_resources_R.txt`):
+
+```sh
+python ../../build/android/gyp/process_resources.py \
+    --depfile gen/ui/android/ui_java_resources_1.d \
+    --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-29/android.jar \
+    --aapt-path ../../third_party/android_sdk/public/build-tools/29.0.2/aapt \
+    --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \
+    --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \
+    --resource-dirs=\[\"../../ui/android/java/res\"\] \
+    --debuggable \
+    --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \
+    --r-text-out gen/ui/android/ui_java_resources_R.txt \
+    --srcjar-out gen/ui/android/ui_java_resources.srcjar \
+    --non-constant-id \
+    --custom-package org.chromium.ui \
+    --shared-resources
+```
+
+Note the use of `@FileArg()` references here, to tell the script where to find
+the information it needs.
+
+
+# IV. Format of .build_config files:
+
+Thanks to `@FileArg()` references, Python build scripts under
+[`build/android/gyp/`](build/android/gyp/)  do not need to know anything
+about the internal format of `.build_config` files.
+
+This format is decided between internal GN build rules and
+[`write_build_config.py`][write_build_config_py]. Since these changes rather
+often, the format documentation is kept inside the Python script itself, but
+can be extracted as a Markdown file and visualized with the following commands:
+
+```sh
+# Extract .build_config format documentation
+build/android/gyp/write_build_config.py \
+  --generate-markdown-format-doc > /tmp/format.md
+
+# Launch a browser to visualize the format documentation.
+python tools/md_browser/md_browser.py -d /tmp /tmp/format.md
+```
+
+[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/
+[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium
+[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/write_build_config.py
diff --git a/src/build/android/docs/class_verification_failures.md b/src/build/android/docs/class_verification_failures.md
new file mode 100644
index 0000000..e3e4745
--- /dev/null
+++ b/src/build/android/docs/class_verification_failures.md
@@ -0,0 +1,286 @@
+# Class Verification Failures
+
+[TOC]
+
+## What's this all about?
+
+This document aims to explain class verification on Android, how this can affect
+app performance, how to identify problems, and chromium-specific solutions. For
+simplicity, this document focuses on how class verification is implemented by
+ART, the virtual machine which replaced Dalvik starting in Android Lollipop.
+
+## What is class verification?
+
+The Java language requires any virtual machine to _verify_ the class files it
+loads and executes. Generally, verification is extra work the virtual machine is
+responsible for doing, on top of the work of loading the class and performing
+[class initialization][1].
+
+A class may fail verification for a wide variety of reasons, but in practice
+it's usually because the class's code refers to unknown classes or methods. An
+example case might look like:
+
+```java
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return mWindow.isWideColorGamut();
+        }
+        return false;
+    }
+}
+```
+
+### Why does that fail?
+
+In this example, `WindowHelper` is a helper class intended to help callers
+figure out wide color gamut support, even on pre-OMR1 devices. However, this
+class will fail class verification on pre-OMR1 devices, because it refers to
+[`Window#isWideColorGamut()`][2] (new-in-OMR1), which appears to be an undefined
+method.
+
+### Huh? But we have an SDK check!
+
+SDK checks are completely irrelevant for class verification. Although readers
+can see we'll never call the new-in-OMR1 API unless we're on >= OMR1 devices,
+the Oreo version of ART doesn't know `isWideColorGamut()` was added in next
+year's release. From ART's perspective, we may as well be calling
+`methodWhichDoesNotExist()`, which would clearly be unsafe.
+
+All the SDK check does is protect us from crashing at runtime if we call this
+method on Oreo or below.
+
+### Class verification on ART
+
+While the above is a mostly general description of class verification, it's
+important to understand how the Android runtime handles this.
+
+Since class verification is extra work, ART has an optimization called **AOT
+("ahead-of-time") verification**¹. Immediately after installing an app, ART will
+scan the dex files and verify as many classes as it can. If a class fails
+verification, this is usually a "soft failure" (hard failures are uncommon), and
+ART marks the class with the status `RetryVerificationAtRuntime`.
+
+`RetryVerificationAtRuntime`, as the name suggests, means ART must try again to
+verify the class at runtime. ART does so the first time you access the class
+(right before class initialization/`<clinit>()` method). However, depending on
+the class, this verification step can be very expensive (we've observed cases
+which take [several milliseconds][3]). Since apps tend to initialize most of
+their classes during startup, verification significantly increases startup time.
+
+Another minor cost to failing class verification is that ART cannot optimize
+classes which fail verification, so **all** methods in the class will perform
+slower at runtime, even after the verification step.
+
+*** aside
+¹ AOT _verification_ should not be confused with AOT _compilation_ (another ART
+feature). Unlike compilation, AOT verification happens during install time for
+every application, whereas recent versions of ART aim to apply AOT compilation
+selectively to optimize space.
+***
+
+## Chromium's solution
+
+In Chromium, we try to avoid doing class verification at runtime by
+manually out-of-lining all Android API usage like so:
+
+```java
+public class ApiHelperForOMR1 {
+    public static boolean isWideColorGamut(Window window) {
+        return window.isWideColorGamut();
+    }
+}
+
+public class WindowHelper {
+    // ...
+    public boolean isWideColorGamut() {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
+            return ApiHelperForOMR1.isWideColorGamut(mWindow);
+        }
+        return false;
+    }
+}
+```
+
+This pushes the class verification failure out of `WindowHelper` and into the
+new `ApiHelperForOMR1` class. There's no magic here: `ApiHelperForOMR1` will
+fail class verification on Oreo and below, for the same reason `WindowHelper`
+did previously.
+
+The key is that, while `WindowHelper` is used on all API levels, it only calls
+into `ApiHelperForOMR1` on OMR1 and above. Because we never use
+`ApiHelperForOMR1` on Oreo and below, we never load and initialize the class,
+and thanks to ART's lazy runtime class verification, we never actually retry
+verification. **Note:** `list_class_verification_failures.py` will still list
+`ApiHelperFor*` classes in its output, although these don't cause performance
+issues.
+
+### Creating ApiHelperFor\* classes
+
+There are several examples throughout the code base, but such classes should
+look as follows:
+
+```java
+/**
+ * Utility class to use new APIs that were added in O_MR1 (API level 27).
+ * These need to exist in a separate class so that Android framework can successfully verify
+ * classes without encountering the new APIs.
+ */
+@VerifiesOnOMR1
+@TargetApi(Build.VERSION_CODES.O_MR1)
+public class ApiHelperForOMR1 {
+    private ApiHelperForOMR1() {}
+
+    // ...
+}
+```
+
+* `@VerifiesOnO_MR1`: this is a chromium-defined annotation to tell proguard
+  (and similar tools) not to inline this class or its methods (since that would
+  defeat the point of out-of-lining!)
+* `@TargetApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to
+  use OMR1 APIs since this class is only used on OMR1 and above. Substitute
+  `O_MR1` for the [appropriate constant][4], depending when the APIs were
+  introduced.
+* Don't put any `SDK_INT` checks inside this class, because it must only be
+  called on >= OMR1.
+
+### Out-of-lining if your method has a new type in its signature
+
+Sometimes you'll run into a situation where a class **needs** to have a method
+which either accepts a parameter which is a new type or returns a new type
+(e.g., externally-facing code, such as WebView's glue layer). Even though it's
+impossible to write such a class without referring to the new type, it's still
+possible to avoid failing class verification. ART has a useful optimization: if
+your class only moves a value between registers (i.e., it doesn't call any
+methods or fields on the value), then ART will not check for the existence of
+that value's type. This means you can write your class like so:
+
+```java
+public class FooBar {
+    // FooBar needs to have the getNewTypeInAndroidP method, but it would be
+    // expensive to fail verification. This method will only be called on >= P
+    // but other methods on the class will be used on lower OS versions (and
+    // also can't be factored into another class).
+    public NewTypeInAndroidP getNewTypeInAndroidP() {
+        assert Build.VERSION.SDK_INT >= Build.VERSION_CODES.P;
+        // Stores a NewTypeInAndroidP in the return register, but doesn't do
+        // anything else with it
+        return ApiHelperForP.getNewTypeInAndroidP();
+    }
+
+    // ...
+}
+
+@VerifiesOnP
+@TargetApi(Build.VERSION_CODES.P)
+public class ApiHelperForP {
+    public static NewTypeInAndroidP getNewTypeInAndroidP() {
+        return new NewTypeInAndroidP();
+    }
+
+    // ...
+}
+```
+
+**Note:** this only works in ART (L+), not Dalvik (KitKat and earlier).
+
+## Investigating class verification failures
+
+Class verification is generally surprising and nonintuitive. Fortunately, the
+ART team have provided tools to investigate errors (and the chromium team has
+built helpful wrappers).
+
+### Listing failing classes
+
+The main starting point is to figure out which classes fail verification (those
+which ART marks as `RetryVerificationAtRuntime`). This can be done for **any
+Android app** (it doesn't have to be from the chromium project) like so:
+
+```shell
+# Install the app first. Using Chrome as an example.
+autoninja -C out/Default chrome_public_apk
+out/Default/bin/chrome_public_apk install
+
+# List all classes marked as 'RetryVerificationAtRuntime'
+build/android/list_class_verification_failures.py --package="org.chromium.chrome"
+W    0.000s Main  Skipping deobfuscation because no map file was provided.
+first.failing.Class
+second.failing.Class
+...
+```
+
+"Skipping deobfuscation because no map file was provided" is a warning, since
+many Android applications (including Chrome's release builds) are built with
+proguard (or similar tools) to obfuscate Java classes and shrink code. Although
+it's safe to ignore this warning if you don't obfuscate Java code, the script
+knows how to deobfuscate classes for you (useful for `is_debug = true` or
+`is_java_debug = true`):
+
+```shell
+build/android/list_class_verification_failures.py --package="org.chromium.chrome" \
+  --mapping=<path/to/file.mapping> # ex. out/Release/apks/ChromePublic.apk.mapping
+android.support.design.widget.AppBarLayout
+android.support.design.widget.TextInputLayout
+...
+```
+
+Googlers can also download mappings for [official
+builds](http://go/webview-official-builds).
+
+### Understanding the reason for the failure
+
+ART team also provide tooling for this. You can configure ART on a rooted device
+to log all class verification failures (during installation), at which point the
+cause is much clearer:
+
+```shell
+# Enable ART logging (requires root). Note the 2 pairs of quotes!
+adb root
+adb shell setprop dalvik.vm.dex2oat-flags '"--runtime-arg -verbose:verifier"'
+
+# Restart Android services to pick up the settings
+adb shell stop && adb shell start
+
+# Optional: clear logs which aren't relevant
+adb logcat -c
+
+# Install the app and check for ART logs
+adb install -d -r out/Default/apks/ChromePublic.apk
+adb logcat | grep 'dex2oat'
+...
+... I dex2oat : Soft verification failures in boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu)
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xF0] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xFA] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List;
+...
+```
+
+*** note
+**Note:** you may want to avoid `adb` wrapper scripts (ex.
+`out/Default/bin/chrome_public_apk install`). These scripts cache the package
+manager state to optimize away idempotent installs. However in this case, we
+**do** want to trigger idempotent installs, because we want to re-trigger AOT
+verification.
+***
+
+In the above example, `SelectionPopupControllerImpl` fails verification on Oreo
+(API 26) because it refers to [`TextClassification.getActions()`][5], which was
+added in Pie (API 28). If `SelectionPopupControllerImpl` is used on pre-Pie
+devices, then `TextClassification.getActions()` must be out-of-lined.
+
+## See also
+
+* Bugs or questions? Contact ntfschr@chromium.org
+* ART team's Google I/O talks: [2014](https://youtu.be/EBlTzQsUoOw) and later
+  years
+* Analysis of class verification in Chrome and WebView (Google-only
+  [doc](http://go/class-verification-chromium-analysis))
+* Presentation on class verification in Chrome and WebView (Google-only
+  [slide deck](http://go/class-verification-chromium-slides))
+
+[1]: https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-5.html#jvms-5.5
+[2]: https://developer.android.com/reference/android/view/Window.html#isWideColorGamut()
+[3]: https://bugs.chromium.org/p/chromium/issues/detail?id=838702
+[4]: https://developer.android.com/reference/android/os/Build.VERSION_CODES
+[5]: https://developer.android.com/reference/android/view/textclassifier/TextClassification.html#getActions()
diff --git a/src/build/android/docs/coverage.md b/src/build/android/docs/coverage.md
new file mode 100644
index 0000000..17c83c6
--- /dev/null
+++ b/src/build/android/docs/coverage.md
@@ -0,0 +1,73 @@
+# Android code coverage instructions
+
+These are instructions for collecting code coverage data for android
+instrumentation and JUnit tests.
+
+[TOC]
+
+## How JaCoCo coverage works
+
+In order to use JaCoCo code coverage, we need to create build time pre-instrumented
+class files and runtime **.exec** files. Then we need to process them using the
+**build/android/generate_jacoco_report.py** script.
+
+## How to collect coverage data
+
+1. Use the following GN build arguments:
+
+  ```gn
+  target_os = "android"
+  use_jacoco_coverage = true
+  ```
+
+   Now when building, pre-instrumented files will be created in the build directory.
+
+2. Run tests, with option `--coverage-dir <directory>`, to specify where to save
+   the .exec file. For example, you can run chrome JUnit tests:
+   `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`.
+
+3. The coverage results of JUnit and instrumentation tests will be merged
+   automatically if they are in the same directory.
+
+## How to generate coverage report
+
+1. Now we have generated .exec files already. We can create a JaCoCo HTML/XML/CSV
+   report using `generate_jacoco_report.py`, for example:
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+     --format html \
+     --output-dir /tmp/coverage_report/ \
+     --coverage-dir /tmp/coverage/ \
+     --sources-json-dir out/Debug/ \
+  ```
+   Then an index.html containing coverage info will be created in output directory:
+
+  ```
+  [INFO] Loading execution data file /tmp/coverage/testTitle.exec.
+  [INFO] Loading execution data file /tmp/coverage/testSelected.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToSelect.exec.
+  [INFO] Loading execution data file /tmp/coverage/testClickToClose.exec.
+  [INFO] Loading execution data file /tmp/coverage/testThumbnail.exec.
+  [INFO] Analyzing 58 classes.
+  ```
+
+2. For XML and CSV reports, we need to specify `--output-file` instead of `--output-dir` since
+   only one file will be generated as XML or CSV report.
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format xml \
+    --output-file /tmp/coverage_report/report.xml \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
+
+   or
+
+  ```shell
+  build/android/generate_jacoco_report.py \
+    --format csv \
+    --output-file /tmp/coverage_report/report.csv \
+    --coverage-dir /tmp/coverage/ \
+    --sources-json-dir out/Debug/ \
+  ```
diff --git a/src/build/android/docs/java_optimization.md b/src/build/android/docs/java_optimization.md
new file mode 100644
index 0000000..0ba0d50
--- /dev/null
+++ b/src/build/android/docs/java_optimization.md
@@ -0,0 +1,149 @@
+# Optimizing Java Code
+
+This doc describes how Java code is optimized in Chrome on Android and how to
+deal with issues caused by the optimizer. For tips on how to write optimized
+code, see [//docs/speed/binary_size/optimization_advice.md#optimizing-java-code](/docs/speed/binary_size/optimization_advice.md#optimizing-java-code).
+
+[TOC]
+
+## ProGuard vs R8
+
+ProGuard is the original open-source tool used by many Android applications to
+perform whole-program bytecode optimization. [R8](https://r8.googlesource.com/r8),
+is a re-implementation that is used by Chrome (and the default for Android Studio).
+The terms "ProGuard" and "R8" are used interchangeably within Chromium but
+generally they're meant to refer to the tool providing Java code optimizations.
+
+## What does ProGuard do?
+
+1. Shrinking: ProGuard will remove unused code. This is especially useful
+   when depending on third party libraries where only a few functions are used.
+
+2. Obfuscation: ProGuard will rename classes/fields/methods to use shorter
+   names. Obfuscation is used for minification purposes only (not security).
+
+3. Optimization: ProGuard performs a series of optimizations to shrink code
+   further through various approaches (ex. inlining, outlining, class merging,
+   etc).
+
+## Build Process
+
+ProGuard is enabled only for release builds of Chrome because it is a slow build
+step and breaks Java debugging. It can also be enabled manually via the GN arg:
+```is_java_debug = false```
+
+### ProGuard configuration files
+
+Most GN Java targets can specify ProGuard configuration files by setting the
+`proguard_configs` variable. [//base/android/proguard](/base/android/proguard)
+contains common flags shared by most Chrome applications.
+
+### GN build rules
+
+When `is_java_debug = false` and a target has enabled ProGuard, the `proguard`
+step generates the `.dex` files for the application. The `proguard` step takes
+as input a list of `.jar` files, runs R8/ProGuard on those `.jar` files, and
+produces the final `.dex` file(s) that will be packaged into your `.apk`
+
+## Deobfuscation
+
+Obfuscation can be turned off for local builds while leaving ProGuard enabled
+by setting `enable_proguard_obfuscation = false` in GN args.
+
+There are two main methods for deobfuscating Java stack traces locally:
+1. Using APK wrapper scripts (stacks are automatically deobfuscated)
+  * `$OUT/bin/chrome_public_apk logcat`  # Run adb logcat
+  * `$OUT/bin/chrome_public_apk run`  # Launch chrome and run adb logcat
+
+2. Using `java_deobfuscate`
+  * build/android/stacktrace/java_deobfuscate.py $OUT/apks/ChromePublic.apk.mapping < logcat.txt`
+    * ProGuard mapping files are located beside APKs (ex.
+      `$OUT/apks/ChromePublic.apk` and `$OUT/apks/ChromePublic.apk.mapping`)
+
+Helpful links for deobfuscation:
+
+* [Internal bits about how mapping files are archived][proguard-site]
+* [More detailed deobfuscation instructions][proguard-doc]
+* [Script for deobfuscating official builds][deob-official]
+
+[proguard-site]: http://goto.google.com/chrome-android-proguard
+[proguard-doc]: http://goto.google.com/chromejavadeobfuscation
+[deob-official]: http://goto.google.com/chrome-android-official-deobfuscation
+
+## Debugging common failures
+
+ProGuard failures are often hard to debug. This section aims to outline some of
+the more common errors.
+
+### Classes expected to be discarded
+
+The `-checkdiscard` directive can be used to ensure that certain items are
+removed by ProGuard. A common use of `-checkdiscard` it to ensure that ProGuard
+optimizations do not regress in their ability to remove code, such as code
+intended only for debug builds, or generated JNI classes that are meant to be
+zero-overhead abstractions. Annotating a class with
+[@CheckDiscard][checkdiscard] will add a `-checkdiscard` rule automatically.
+
+[checkdiscard]: /base/android/java/src/org/chromium/base/annotations/CheckDiscard.java
+
+```
+Item void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>() was not discarded.
+void org.chromium.base.library_loader.LibraryPrefetcherJni.<init>()
+|- is invoked from:
+|  void org.chromium.base.library_loader.LibraryPrefetcher.asyncPrefetchLibrariesToMemory()
+... more code path lines
+|- is referenced in keep rule:
+|  obj/chrome/android/chrome_public_apk/chrome_public_apk.resources.proguard.txt:104:1
+
+Error: Discard checks failed.
+```
+
+Things to check
+  * Did you add code that is referenced by code path in the error message?
+  * If so, check the original class for why the `CheckDiscard` was added
+    originally and verify that the reason is still valid with your change (may
+    need git blame to do this).
+  * Try the extra debugging steps listed in the JNI section below.
+
+### JNI wrapper classes not discarded
+
+Proxy native methods (`@NativeMethods`) use generated wrapper classes to provide
+access to native methods. We rely on ProGuard to fully optimize the generated
+code so that native methods aren't a source of binary size bloat. The above
+error message is an example when a JNI wrapper class wasn't discarded (notice
+the name of the offending class).
+  * The ProGuard rule pointed to in the error message isn't helpful (just tells
+    us a code path that reaches the not-inlined class).
+  * Common causes:
+    * Caching the result of `ClassNameJni.get()` in a member variable.
+    * Passing a native wrapper method reference instead of using a lambda (i.e.
+      `Jni.get()::methodName` vs. `() -> Jni.get.methodName()`).
+  * For more debugging info, add to `base/android/proguard/chromium_code.flags`:
+      ```
+      -whyareyounotinlining class org.chromium.base.library_loader.LibraryPrefetcherJni {
+          <init>();
+      }
+      ```
+
+### Duplicate classes
+
+```
+Type YourClassName is defined multiple times: obj/jar1.jar:YourClassName.class, obj/jar2.jar:YourClassName.class
+```
+
+Common causes:
+  * Multiple targets with overlapping `srcjar_deps`:
+    * Each `.srcjar` can only be depended on by a single Java target in any
+      given APK target. `srcjar_deps` are just a convenient way to depend on
+      generated files and should be treated like source files rather than
+      `deps`.
+    * Solution: Wrap the `srcjar` in an `android_library` target or have only a
+      single Java target depend on the `srcjar` and have other targets depend on
+      the containing Java target instead.
+  * Accidentally enabling APK level generated files for multiple targets that
+    share generated code (ex. Trichrome or App Bundles):
+    * Solution: Make sure the generated file is only added once.
+
+Debugging ProGuard failures isn't easy, so please message java@chromium.org
+or [file a bug](crbug.com/new) with `component=Build os=Android` for any
+issues related to Java code optimization.
diff --git a/src/build/android/docs/java_toolchain.md b/src/build/android/docs/java_toolchain.md
new file mode 100644
index 0000000..ef11548
--- /dev/null
+++ b/src/build/android/docs/java_toolchain.md
@@ -0,0 +1,284 @@
+# Chromium's Java Toolchain
+
+This doc aims to describe the Chrome build process that takes a set of `.java`
+files and turns them into a `classes.dex` file.
+
+[TOC]
+
+## Core GN Target Types
+
+The following have `supports_android` and `requires_android` set to false by
+default:
+* `java_library()`: Compiles `.java` -> `.jar`
+* `java_prebuilt()`:  Imports a prebuilt `.jar` file.
+
+The following have `supports_android` and `requires_android` set to true. They
+also have a default `jar_excluded_patterns` set (more on that later):
+* `android_library()`
+* `android_java_prebuilt()`
+
+All target names must end with "_java" so that the build system can distinguish
+them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)).
+
+Most targets produce two separate `.jar` files:
+* Device `.jar`: Used to produce `.dex.jar`, which is used on-device.
+* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`).
+  * Host `.jar` files live in `lib.java/` so that they are archived in
+    builder/tester bots (which do not archive `obj/`).
+
+## From Source to Final Dex
+
+### Step 1: Create interface .jar with turbine or ijar
+
+For prebuilt `.jar` files, use [//third_party/ijar] to create interface `.jar`
+from prebuilt `.jar`.
+
+For non-prebuilt targets, use [//third_party/turbine] to create interface `.jar`
+from `.java` source files. Turbine is much faster than javac, and so enables
+full compilation to happen more concurrently.
+
+What are interface jars?:
+
+* The contain `.class` files with all non-public symbols and function bodies
+  removed.
+* Dependant targets use interface `.jar` files to skip having to be rebuilt
+  when only private implementation details change.
+
+[//third_party/ijar]: /third_party/ijar/README.chromium
+[//third_party/turbine]: /third_party/turbine/README.chromium
+
+### Step 2a: Compile with javac
+
+This step is the only step that does not apply to prebuilt targets.
+
+* All `.java` files in a target are compiled by `javac` into `.class` files.
+  * This includes `.java` files that live within `.srcjar` files, referenced
+    through `srcjar_deps`.
+* The `classpath` used when compiling a target is comprised of `.jar` files of
+  its deps.
+  * When deps are library targets, the Step 1 `.jar` file is used.
+  * When deps are prebuilt targets, the original `.jar` file is used.
+  * All `.jar` processing done in subsequent steps does not impact compilation
+    classpath.
+* `.class` files are zipped into an output `.jar` file.
+* There is **no support** for incremental compilation at this level.
+  * If one source file changes within a library, then the entire library is
+    recompiled.
+  * Prefer smaller targets to avoid slow compiles.
+
+### Step 2b: Compile with ErrorProne
+
+This step can be disabled via GN arg: `use_errorprone_java_compiler = false`
+
+* Concurrently with step 1a: [ErrorProne] compiles java files and checks for bug
+  patterns, including some [custom to Chromium][ep_plugins].
+* ErrorProne used to replace step 1a, but was changed to a concurrent step after
+  being identified as being slower.
+
+[ErrorProne]: https://errorprone.info/
+[ep_plugins]: /tools/android/errorprone_plugin/
+
+### Step 3: Desugaring (Device .jar Only)
+
+This step happens only when targets have `supports_android = true`. It is not
+applied to `.jar` files used by `junit_binary`.
+
+* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as
+  lambdas and default interface methods, into constructs that are compatible
+  with Java 7.
+
+### Step 4: Instrumenting (Device .jar Only)
+
+This step happens only when this GN arg is set: `use_jacoco_coverage = true`
+
+* [Jacoco] adds instrumentation hooks to methods.
+
+[Jacoco]: https://www.eclemma.org/jacoco/
+
+### Step 5: Filtering
+
+This step happens only when targets that have `jar_excluded_patterns` or
+`jar_included_patterns` set (e.g. all `android_` targets).
+
+* Remove `.class` files that match the filters from the `.jar`. These `.class`
+  files are generally those that are re-created with different implementations
+  further on in the build process.
+  * E.g.: `R.class` files - a part of [Android Resources].
+  * E.g.: `GEN_JNI.class` - a part of our [JNI] glue.
+  * E.g.: `AppHooksImpl.class` - how `chrome_java` wires up different
+    implementations for [non-public builds][apphooks].
+
+[JNI]: /base/android/jni_generator/README.md
+[Android Resources]: life_of_a_resource.md
+[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java
+
+### Step 6: Per-Library Dexing
+
+This step happens only when targets have `supports_android = true`.
+
+* [d8] converts `.jar` files containing `.class` files into `.dex.jar` files
+  containing `classes.dex` files.
+* Dexing is incremental - it will reuse dex'ed classes from a previous build if
+  the corresponding `.class` file is unchanged.
+* These per-library `.dex.jar` files are used directly by [incremental install],
+  and are inputs to the Apk step when `enable_proguard = false`.
+  * Even when `is_java_debug = false`, many apk targets do not enable ProGuard
+    (e.g. unit tests).
+
+[d8]: https://developer.android.com/studio/command-line/d8
+[incremental install]: /build/android/incremental_install/README.md
+
+### Step 7: Apk / Bundle Module Compile
+
+* Each `android_apk` and `android_bundle_module` template has a nested
+  `java_library` target. The nested library includes final copies of files
+  stripped out by prior filtering steps. These files include:
+  * Final `R.java` files, created by `compile_resources.py`.
+  * Final `GEN_JNI.java` for [JNI glue].
+  * `BuildConfig.java` and `NativeLibraries.java` (//base dependencies).
+
+[JNI glue]: /base/android/jni_generator/README.md
+
+### Step 8: Final Dexing
+
+This step is skipped when building using [Incremental Install].
+
+When `is_java_debug = true`:
+* [d8] merges all library `.dex.jar` files into a final `.mergeddex.jar`.
+
+When `is_java_debug = false`:
+* [R8] performs whole-program optimization on all library `lib.java` `.jar`
+  files and outputs a final `.r8dex.jar`.
+  * For App Bundles, R8 creates a `.r8dex.jar` for each module.
+
+[Incremental Install]: /build/android/incremental_install/README.md
+[R8]: https://r8.googlesource.com/r8
+
+## Test APKs with apk_under_test
+
+Test APKs are normal APKs that contain an `<instrumentation>` tag within their
+`AndroidManifest.xml`. If this tag specifies an `android:targetPackage`
+different from itself, then Android will add that package's `classes.dex` to the
+test APK's Java classpath when run. In GN, you can enable this behavior using
+the `apk_under_test` parameter on `instrumentation_test_apk` targets. Using it
+is discouraged if APKs have `proguard_enabled=true`.
+
+### Difference in Final Dex
+
+When `enable_proguard=false`:
+* Any library depended on by the test APK that is also depended on by the
+  apk-under-test is excluded from the test APK's final dex step.
+
+When `enable_proguard=true`:
+* Test APKs cannot make use of the apk-under-test's dex because only symbols
+  explicitly kept by `-keep` directives are guaranteed to exist after
+  ProGuarding. As a work-around, test APKs include all of the apk-under-test's
+  libraries directly in its own final dex such that the under-test apk's Java
+  code is never used (because it is entirely shadowed by the test apk's dex).
+  * We've found this configuration to be fragile, and are trying to [move away
+    from it](https://bugs.chromium.org/p/chromium/issues/detail?id=890452).
+
+### Difference in GEN_JNI.java
+* Calling native methods using [JNI glue] requires that a `GEN_JNI.java` class
+  be generated that contains all native methods for an APK. There cannot be
+  conflicting `GEN_JNI` classes in both the test apk and the apk-under-test, so
+  only the apk-under-test has one generated for it. As a result this,
+  instrumentation test APKs that use apk-under-test cannot use native methods
+  that aren't already part of the apk-under-test.
+
+## How to Generate Java Source Code
+There are two ways to go about generating source files: Annotation Processors
+and custom build steps.
+
+### Annotation Processors
+* These are run by `javac` as part of the compile step.
+* They **cannot** modify the source files that they apply to. They can only
+  generate new sources.
+* Use these when:
+  * an existing Annotation Processor does what you want
+    (E.g. Dagger, AutoService, etc.), or
+  * you need to understand Java types to do generation.
+
+### Custom Build Steps
+* These use discrete build actions to generate source files.
+  * Some generate `.java` directly, but most generate a zip file of sources
+    (called a `.srcjar`) to simplify the number of inputs / outputs.
+* Examples of existing templates:
+  * `jinja_template`: Generates source files using [Jinja].
+  * `java_cpp_template`: Generates source files using the C preprocessor.
+  * `java_cpp_enum`: Generates `@IntDef`s based on enums within `.h` files.
+  * `java_cpp_strings`: Generates String constants based on strings defined in
+    `.cc` files.
+* Custom build steps are preferred over Annotation Processors because they are
+  generally easier to understand, and can run in parallel with other steps
+  (rather than being tied to compiles).
+
+[Jinja]: https://palletsprojects.com/p/jinja/
+
+## Static Analysis & Code Checks
+
+We use several tools for static analysis.
+
+### [ErrorProne](https://errorprone.info/)
+* Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`.
+* Most useful check:
+  * Enforcement of `@GuardedBy` annotations.
+* List of enabled / disabled checks exists [within javac.py](https://cs.chromium.org/chromium/src/build/android/gyp/javac.py?l=30)
+  * Many checks are currently disabled because there is work involved in fixing
+    violations they introduce. Please help!
+* Custom checks for Chrome:
+  * [//tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/](/tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/)
+* Use ErrorProne checks when you need something more sophisticated than pattern
+  matching.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+### [Android Lint](https://developer.android.com/studio/write/lint)
+* Runs as part of normal compilation. Controlled by GN arg: `disable_android_lint` 
+* Most useful check:
+  * Enforcing `@TargetApi` annotations (ensure you don't call a function that
+    does not exist on all versions of Android unless guarded by an version
+    check).
+* List of disabled checks:
+  * [//build/android/lint/suppressions.xml](/build/android/lint/suppressions.xml)
+* Custom lint checks [are possible](lint_plugins), but we don't have any.
+* Checks run on the entire codebase, not only on changed lines.
+* Does not run when `chromium_code = false` (e.g. for //third_party).
+
+[lint_plugins]: http://tools.android.com/tips/lint-custom-rules
+
+### [Bytecode Processor](/build/android/bytecode/)
+* Performs a single check:
+  * That target `deps` are not missing any entries.
+  * In other words: Enforces that targets do not rely on indirect dependencies
+    to populate their classpath.
+* Checks run on the entire codebase, not only on changed lines.
+
+### [PRESUBMIT.py](/PRESUBMIT.py):
+* Checks for banned patterns via `_BANNED_JAVA_FUNCTIONS`.
+  * (These should likely be moved to checkstyle).
+* Checks for a random set of things in `ChecksAndroidSpecificOnUpload()`.
+  * Including running Checkstyle.
+  * (Some of these other checks should likely also be moved to checkstyle).
+* Checks run only on changed lines.
+
+### [Checkstyle](https://checkstyle.sourceforge.io/)
+* Checks Java style rules that are not covered by clang-format.
+  * E.g.: Unused imports and naming conventions.
+* Allows custom checks to be added via XML. Here [is ours].
+* Preferred over adding checks directly in PRESUBMIT.py because the tool
+  understands `@SuppressWarnings` annotations.
+* Checks run only on changed lines.
+
+[is ours]:  /tools/android/checkstyle/chromium-style-5.0.xml
+
+### [clang-format](https://clang.llvm.org/docs/ClangFormat.html)
+* Formats `.java` files via `git cl format`.
+* Can be toggle on/off with code comments.
+  ```java
+  // clang-format off
+  ... non-formatted code here ...
+  // clang-format on
+  ```
+* Does not work great for multiple annotations or on some lambda expressions,
+  but is generally agreed it is better than not having it at all.
diff --git a/src/build/android/docs/life_of_a_resource.md b/src/build/android/docs/life_of_a_resource.md
new file mode 100644
index 0000000..3aacd5e
--- /dev/null
+++ b/src/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,260 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+  * Including AndroidManifest.xml files from libraries, which get merged
+    together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+  target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+  * AndroidManifest.xml (as binary xml)
+  * resources.arsc
+  * res/**
+* Final R.txt
+  * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+  * See [What are R.java files and how are they generated](
+  #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+  other resources will now use the id rather than the name for faster lookup at
+  runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+  the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+  dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+  other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+  using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+  resources as well as the name and path of non-string resources (ie. layouts
+  and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+  with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [allowlisted](#adding-resources-to-the-allowlist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the allowlist
+
+If a resource is accessed via `getIdentifier()` it needs to be allowed by an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_paths` variable. To add a resource to the allowlist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it calls this [R file's][Base Module R.java File]
+onResourcesLoaded function to have the correct package id. When deobfuscating
+webview resource ids, disregard the first two bytes in the id when looking it up
+in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[Base Module R.java File]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/system_webview_apk/generated_java/gen/base_module/R.java
+
+## How R.java files are generated
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk.
+
+There are three types of R.java files in Chrome.
+1. Base Module Root R.java Files
+2. DFM Root R.java Files
+3. Source R.java Files
+
+Example Base Module Root R.java File
+```java
+package gen.base_module;
+
+public final class R {
+    public static class anim  {
+        public static final int abc_fade_in = 0x7f010000;
+        public static final int abc_fade_out = 0x7f010001;
+        public static final int abc_slide_in_top = 0x7f010007;
+    }
+    public static class animator  {
+        public static final int design_appbar_state_list_animator = 0x7f020000;
+    }
+}
+```
+Base module root R.java files contain base android resources. All R.java files
+can access base module resources through inheritance.
+
+Example DFM Root R.java File
+```java
+package gen.vr_module;
+
+public final class R {
+    public static class anim extends gen.base_module.R.anim {
+    }
+    public static class animator extends gen.base_module.R.animator  {
+        public static final int design_appbar_state_list_animator = 0x7f030000;
+    }
+}
+```
+DFM root R.java files extend base module root R.java files. This allows DFMs to
+access their own resources as well as the base module's resources.
+
+Example Source R.java File
+```java
+package org.chromium.chrome.vr;
+
+public final class R {
+    public static final class anim extends
+            gen.base_module.R.anim {}
+    public static final class animator extends
+            gen.base_module.R.animator {}
+}
+```
+Source R.java files extend root R.java files and have no resources of their own.
+Developers can import these R.java files to access resources in the apk.
+
+The R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/src/build/android/docs/lint.md b/src/build/android/docs/lint.md
new file mode 100644
index 0000000..4ba13d7
--- /dev/null
+++ b/src/build/android/docs/lint.md
@@ -0,0 +1,140 @@
+# Lint
+
+Android's [**lint**](https://developer.android.com/tools/help/lint.html) is a
+static analysis tool that Chromium uses to catch possible issues in Java code.
+
+This is a list of [**checks**](http://tools.android.com/tips/lint-checks) that
+you might encounter.
+
+[TOC]
+
+## How Chromium uses lint
+
+Chromium only runs lint on apk or bundle targets that explicitly set
+`enable_lint = true`. Some example targets that have this set are:
+
+ - `//chrome/android:monochrome_public_bundle`
+ - `//android_webview/support_library/boundary_interfaces:boundary_interface_example_apk`
+ - `//remoting/android:remoting_apk`
+
+## My code has a lint error
+
+If lint reports an issue in your code, there are several possible remedies.
+In descending order of preference:
+
+### Fix it
+
+While this isn't always the right response, fixing the lint error or warning
+should be the default.
+
+### Suppress it locally
+
+Java provides an annotation,
+[`@SuppressWarnings`](https://developer.android.com/reference/java/lang/SuppressWarnings),
+that tells lint to ignore the annotated element. It can be used on classes,
+constructors, methods, parameters, fields, or local variables, though usage in
+Chromium is typically limited to the first three. You do not need to import it
+since it is in the `java.lang` package.
+
+Like many suppression annotations, `@SuppressWarnings` takes a value that tells
+**lint** what to ignore. It can be a single `String`:
+
+```java
+@SuppressWarnings("NewApi")
+public void foo() {
+    a.methodThatRequiresHighSdkLevel();
+}
+```
+
+It can also be a list of `String`s:
+
+```java
+@SuppressWarnings({
+        "NewApi",
+        "UseSparseArrays"
+        })
+public Map<Integer, FakeObject> bar() {
+    Map<Integer, FakeObject> shouldBeASparseArray = new HashMap<Integer, FakeObject>();
+    another.methodThatRequiresHighSdkLevel(shouldBeASparseArray);
+    return shouldBeASparseArray;
+}
+```
+
+For resource xml files you can use `tools:ignore`:
+
+```xml
+<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:tools="http://schemas.android.com/tools">
+    <!-- TODO(crbug/###): remove tools:ignore once these colors are used -->
+    <color name="hi" tools:ignore="NewApi,UnusedResources">@color/unused</color>
+</resources>
+```
+
+The examples above are the recommended ways of suppressing lint warnings.
+
+### Suppress it in a `lint-suppressions.xml` file
+
+**lint** can be given a per-target XML configuration file containing warnings or
+errors that should be ignored. Each target defines its own configuration file
+via the `lint_suppressions_file` gn variable. It is usually defined near its
+`enable_lint` gn variable.
+
+These suppressions files should only be used for temporarily ignoring warnings
+that are too hard (or not possible) to suppress locally, and permanently
+ignoring warnings only for this target. To permanently ignore a warning for all
+targets, add the warning to the `_DISABLED_ALWAYS` list in
+[build/android/gyp/lint.py](https://source.chromium.org/chromium/chromium/src/+/master:build/android/gyp/lint.py).
+Disabling globally makes lint a bit faster.
+
+The exception to the above rule is for warnings that affect multiple languages.
+Feel free to suppress those in lint-suppressions.xml files since it is not
+practical to suppress them in each language file and it is a lot of extra bloat
+to list out every language for every violation in lint-baseline.xml files.
+
+Here is an example of how to structure a suppressions XML file:
+
+```xml
+<?xml version="1.0" encoding="utf-8" ?>
+<lint>
+  <!-- Chrome is a system app. -->
+  <issue id="ProtectedPermissions" severity="ignore"/>
+  <issue id="UnusedResources">
+    <!-- 1 raw resources are accessed by URL in various places. -->
+    <ignore regexp="gen/remoting/android/.*/res/raw/credits.*"/>
+    <!-- TODO(crbug.com/###): Remove the following line.  -->
+    <ignore regexp="The resource `R.string.soon_to_be_used` appears to be unused"/>
+  </issue>
+</lint>
+```
+
+## What are `lint-baseline.xml` files for?
+
+Baseline files are to help us introduce new lint warnings and errors without
+blocking on fixing all our existing code that violate these new errors. Since
+they are generated files, they should **not** be used to suppress lint warnings.
+One of the approaches above should be used instead. Eventually all the errors in
+baseline files should be either fixed or ignored permanently.
+
+The following are some common scenarios where you may need to update baseline
+files.
+
+### I updated `cmdline-tools` and now there are tons of new errors!
+
+This happens every time lint is updated, since lint is provided by
+`cmdline-tools`.
+
+Baseline files are defined via the `lint_baseline_file` gn variable. It is
+usually defined near a target's `enable_lint` gn variable. To regenerate the
+baseline file, delete it and re-run the lint target. The command will fail, but
+the baseline file will have been generated.
+
+This may need to be repeated for all targets that have set `enable_lint = true`,
+including downstream targets. Downstream baseline files should be updated and
+first to avoid build breakages. Each target has its own `lint_baseline_file`
+defined and so all these files can be removed and regenerated as needed.
+
+### I updated `library X` and now there are tons of new errors!
+
+This is usually because `library X`'s aar contains custom lint checks and/or
+custom annotation definition. Follow the same procedure as updates to
+`cmdline-tools`.
diff --git a/src/build/android/download_doclava.py b/src/build/android/download_doclava.py
new file mode 100755
index 0000000..1982fdb
--- /dev/null
+++ b/src/build/android/download_doclava.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Minimal tool to download doclava from Google storage when building for
+Android."""
+
+import os
+import subprocess
+import sys
+
+
+def main():
+  # Some Windows bots inadvertently have third_party/android_sdk installed,
+  # but are unable to run download_from_google_storage because depot_tools
+  # is not in their path, so avoid failure and bail.
+  if sys.platform == 'win32':
+    return 0
+  subprocess.check_call([
+      'download_from_google_storage',
+      '--no_resume',
+      '--no_auth',
+      '--bucket', 'chromium-doclava',
+      '--extract',
+      '-s',
+      os.path.join(os.path.dirname(__file__), '..', '..', 'buildtools',
+                   'android', 'doclava.tar.gz.sha1')])
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/dump_apk_resource_strings.py b/src/build/android/dump_apk_resource_strings.py
new file mode 100755
index 0000000..8417e29
--- /dev/null
+++ b/src/build/android/dump_apk_resource_strings.py
@@ -0,0 +1,664 @@
+#!/usr/bin/env vpython
+# encoding: utf-8
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A script to parse and dump localized strings in resource.arsc files."""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import cProfile
+import os
+import re
+import subprocess
+import sys
+import zipfile
+
+# pylint: disable=bare-except
+
+# Assuming this script is located under build/android, try to import
+# build/android/gyp/bundletool.py to get the default path to the bundletool
+# jar file. If this fail, using --bundletool-path will be required to parse
+# bundles, allowing this script to be relocated or reused somewhere else.
+try:
+  sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp'))
+  import bundletool
+
+  _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH
+except:
+  _DEFAULT_BUNDLETOOL_PATH = None
+
+# Try to get the path of the aapt build tool from catapult/devil.
+try:
+  import devil_chromium  # pylint: disable=unused-import
+  from devil.android.sdk import build_tools
+  _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt')
+except:
+  _AAPT_DEFAULT_PATH = None
+
+
+def AutoIndentStringList(lines, indentation=2):
+  """Auto-indents a input list of text lines, based on open/closed braces.
+
+  For example, the following input text:
+
+    'Foo {',
+    'Bar {',
+    'Zoo',
+    '}',
+    '}',
+
+  Will return the following:
+
+    'Foo {',
+    '  Bar {',
+    '    Zoo',
+    '  }',
+    '}',
+
+  The rules are pretty simple:
+    - A line that ends with an open brace ({) increments indentation.
+    - A line that starts with a closing brace (}) decrements it.
+
+  The main idea is to make outputting structured text data trivial,
+  since it can be assumed that the final output will be passed through
+  this function to make it human-readable.
+
+  Args:
+    lines: an iterator over input text lines. They should not contain
+      line terminator (e.g. '\n').
+  Returns:
+    A new list of text lines, properly auto-indented.
+  """
+  margin = ''
+  result = []
+  # NOTE: Intentional but significant speed optimizations in this function:
+  #   - |line and line[0] == <char>| instead of |line.startswith(<char>)|.
+  #   - |line and line[-1] == <char>| instead of |line.endswith(<char>)|.
+  for line in lines:
+    if line and line[0] == '}':
+      margin = margin[:-indentation]
+    result.append(margin + line)
+    if line and line[-1] == '{':
+      margin += ' ' * indentation
+
+  return result
+
+
+# pylint: disable=line-too-long
+
+# NOTE: aapt dump will quote the following characters only: \n, \ and "
+# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270
+
+# pylint: enable=line-too-long
+
+
+def UnquoteString(s):
+  """Unquote a given string from aapt dump.
+
+  Args:
+    s: An UTF-8 encoded string that contains backslashes for quotes, as found
+      in the output of 'aapt dump resources --values'.
+  Returns:
+    The unquoted version of the input string.
+  """
+  if not '\\' in s:
+    return s
+
+  result = ''
+  start = 0
+  size = len(s)
+  while start < size:
+    pos = s.find('\\', start)
+    if pos < 0:
+      break
+
+    result += s[start:pos]
+    count = 1
+    while pos + count < size and s[pos + count] == '\\':
+      count += 1
+
+    result += '\\' * (count / 2)
+    start = pos + count
+    if count & 1:
+      if start < size:
+        ch = s[start]
+        if ch == 'n':  # \n is the only non-printable character supported.
+          ch = '\n'
+        result += ch
+        start += 1
+      else:
+        result += '\\'
+
+  result += s[start:]
+  return result
+
+
+assert UnquoteString(r'foo bar') == 'foo bar'
+assert UnquoteString(r'foo\nbar') == 'foo\nbar'
+assert UnquoteString(r'foo\\nbar') == 'foo\\nbar'
+assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar'
+assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar'
+assert UnquoteString(r'foo\\bar') == r'foo\bar'
+
+
+def QuoteString(s):
+  """Quote a given string for external output.
+
+  Args:
+    s: An input UTF-8 encoded string.
+  Returns:
+    A quoted version of the string, using the same rules as 'aapt dump'.
+  """
+  # NOTE: Using repr() would escape all non-ASCII bytes in the string, which
+  # is undesirable.
+  return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n')
+
+
+assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"'
+assert QuoteString('foo\nbar') == 'foo\\nbar'
+
+
+def ReadStringMapFromRTxt(r_txt_path):
+  """Read all string resource IDs and names from an R.txt file.
+
+  Args:
+    r_txt_path: Input file path.
+  Returns:
+    A {res_id -> res_name} dictionary corresponding to the string resources
+    from the input R.txt file.
+  """
+  # NOTE: Typical line of interest looks like:
+  # int string AllowedDomainsForAppsTitle 0x7f130001
+  result = {}
+  prefix = 'int string '
+  with open(r_txt_path) as f:
+    for line in f:
+      line = line.rstrip()
+      if line.startswith(prefix):
+        res_name, res_id = line[len(prefix):].split(' ')
+        result[int(res_id, 0)] = res_name
+  return result
+
+
+class ResourceStringValues(object):
+  """Models all possible values for a named string."""
+
+  def __init__(self):
+    self.res_name = None
+    self.res_values = {}
+
+  def AddValue(self, res_name, res_config, res_value):
+    """Add a new value to this entry.
+
+    Args:
+      res_name: Resource name. If this is not the first time this method
+        is called with the same resource name, then |res_name| should match
+        previous parameters for sanity checking.
+      res_config: Config associated with this value. This can actually be
+        anything that can be converted to a string.
+      res_value: UTF-8 encoded string value.
+    """
+    if res_name is not self.res_name and res_name != self.res_name:
+      if self.res_name is None:
+        self.res_name = res_name
+      else:
+        # Sanity check: the resource name should be the same for all chunks.
+        # Resource ID is redefined with a different name!!
+        print('WARNING: Resource key ignored (%s, should be %s)' %
+              (res_name, self.res_name))
+
+    if self.res_values.setdefault(res_config, res_value) is not res_value:
+      print('WARNING: Duplicate value definition for [config %s]: %s ' \
+            '(already has %s)' % (
+                res_config, res_value, self.res_values[res_config]))
+
+  def ToStringList(self, res_id):
+    """Convert entry to string list for human-friendly output."""
+    values = sorted(
+        [(str(config), value) for config, value in self.res_values.iteritems()])
+    if res_id is None:
+      # res_id will be None when the resource ID should not be part
+      # of the output.
+      result = ['name=%s count=%d {' % (self.res_name, len(values))]
+    else:
+      result = [
+          'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name,
+                                                len(values))
+      ]
+    for config, value in values:
+      result.append('%-16s "%s"' % (config, QuoteString(value)))
+    result.append('}')
+    return result
+
+
+class ResourceStringMap(object):
+  """Convenience class to hold the set of all localized strings in a table.
+
+  Usage is the following:
+     1) Create new (empty) instance.
+     2) Call AddValue() repeatedly to add new values.
+     3) Eventually call RemapResourceNames() to remap resource names.
+     4) Call ToStringList() to convert the instance to a human-readable
+        list of strings that can later be used with AutoIndentStringList()
+        for example.
+  """
+
+  def __init__(self):
+    self._res_map = collections.defaultdict(ResourceStringValues)
+
+  def AddValue(self, res_id, res_name, res_config, res_value):
+    self._res_map[res_id].AddValue(res_name, res_config, res_value)
+
+  def RemapResourceNames(self, id_name_map):
+    """Rename all entries according to a given {res_id -> res_name} map."""
+    for res_id, res_name in id_name_map.iteritems():
+      if res_id in self._res_map:
+        self._res_map[res_id].res_name = res_name
+
+  def ToStringList(self, omit_ids=False):
+    """Dump content to a human-readable string list.
+
+    Note that the strings are ordered by their resource name first, and
+    resource id second.
+
+    Args:
+      omit_ids: If True, do not put resource IDs in the result. This might
+        be useful when comparing the outputs of two different builds of the
+        same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk)
+        where the resource IDs might be slightly different, but not the
+        string contents.
+    Return:
+      A list of strings that can later be sent to AutoIndentStringList().
+    """
+    result = ['Resource strings (count=%d) {' % len(self._res_map)]
+    res_map = self._res_map
+
+    # A small function to compare two (res_id, values) tuples
+    # by resource name first, then resource ID.
+    def cmp_id_name(a, b):
+      result = cmp(a[1].res_name, b[1].res_name)
+      if result == 0:
+        result = cmp(a[0], b[0])
+      return result
+
+    for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name):
+      result += res_map[res_id].ToStringList(None if omit_ids else res_id)
+    result.append('}  # Resource strings')
+    return result
+
+
+@contextlib.contextmanager
+def ManagedOutput(output_file):
+  """Create an output File object that will be closed on exit if necessary.
+
+  Args:
+    output_file: Optional output file path.
+  Yields:
+    If |output_file| is empty, this simply yields sys.stdout. Otherwise, this
+    opens the file path for writing text, and yields its File object. The
+    context will ensure that the object is always closed on scope exit.
+  """
+  close_output = False
+  if output_file:
+    output = open(output_file, 'wt')
+    close_output = True
+  else:
+    output = sys.stdout
+  try:
+    yield output
+  finally:
+    if close_output:
+      output.close()
+
+
+@contextlib.contextmanager
+def ManagedPythonProfiling(enable_profiling, sort_key='tottime'):
+  """Enable Python profiling if needed.
+
+  Args:
+    enable_profiling: Boolean flag. True to enable python profiling.
+    sort_key: Sorting key for the final stats dump.
+  Yields:
+    If |enable_profiling| is False, this yields False. Otherwise, this
+    yields a new Profile instance just after enabling it. The manager
+    ensures that profiling stops and prints statistics on scope exit.
+  """
+  pr = None
+  if enable_profiling:
+    pr = cProfile.Profile()
+    pr.enable()
+  try:
+    yield pr
+  finally:
+    if pr:
+      pr.disable()
+      pr.print_stats(sort=sort_key)
+
+
+def IsFilePathABundle(input_file):
+  """Return True iff |input_file| holds an Android app bundle."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo('BundleConfig.pb')
+      return True
+  except:
+    return False
+
+
+# Example output from 'bundletool dump resources --values' corresponding
+# to strings:
+#
+# 0x7F1200A0 - string/abc_action_menu_overflow_description
+#         (default) - [STR] "More options"
+#         locale: "ca" - [STR] "Més opcions"
+#         locale: "da" - [STR] "Flere muligheder"
+#         locale: "fa" - [STR] " گزینه<U+200C>های بیشتر"
+#         locale: "ja" - [STR] "その他のオプション"
+#         locale: "ta" - [STR] "மேலும் விருப்பங்கள்"
+#         locale: "nb" - [STR] "Flere alternativer"
+#         ...
+#
+# Fun fact #1: Bundletool uses <lang>-<REGION> instead of <lang>-r<REGION>
+#              for locales!
+#
+# Fun fact #2: The <U+200C> is terminal output for \u200c, the output is
+#              really UTF-8 encoded when it is read by this script.
+#
+# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0.
+#
+_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile(
+    r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$')
+assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match(
+    '0x7F1200A0 - string/abc_action_menu_overflow_description')
+
+_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile(
+    r'^\s+\(default\) - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+    '        (default) - [STR] "More options"').group(1) == "More options"
+
+_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile(
+    r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(
+    u'        locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8'))
+
+
+def ParseBundleResources(bundle_tool_jar_path, bundle_path):
+  """Use bundletool to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [
+      'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle',
+      bundle_path, '--values'
+  ]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+  res_map = ResourceStringMap()
+  current_resource_id = None
+  current_resource_name = None
+  keep_parsing = True
+  need_value = False
+  while keep_parsing:
+    line = p.stdout.readline()
+    if not line:
+      break
+    # Do not use rstrip(), since this should only remove trailing newlines
+    # but not trailing whitespace that happen to be embedded in the string
+    # value for some reason.
+    line = line.rstrip('\n\r')
+    m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line)
+    if m:
+      current_resource_id = int(m.group(1), 16)
+      current_resource_name = m.group(2)
+      need_value = True
+      continue
+
+    if not need_value:
+      continue
+
+    resource_config = None
+    m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line)
+    if m:
+      resource_config = 'config (default)'
+      resource_value = m.group(1)
+    else:
+      m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line)
+      if m:
+        resource_config = 'config %s' % m.group(1)
+        resource_value = m.group(2)
+
+    if resource_config is None:
+      need_value = False
+      continue
+
+    res_map.AddValue(current_resource_id, current_resource_name,
+                     resource_config, UnquoteString(resource_value))
+  return res_map
+
+
+# Name of the binary resources table file inside an APK.
+RESOURCES_FILENAME = 'resources.arsc'
+
+
+def IsFilePathAnApk(input_file):
+  """Returns True iff a ZipFile instance is for a regular APK."""
+  try:
+    with zipfile.ZipFile(input_file) as input_zip:
+      _ = input_zip.getinfo(RESOURCES_FILENAME)
+      return True
+  except:
+    return False
+
+
+# pylint: disable=line-too-long
+
+# Example output from 'aapt dump resources --values' corresponding
+# to strings:
+#
+#      config zh-rHK
+#        resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)
+#          (string8) "瀏覽首頁"
+#        resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00)
+#          (string8) "向上瀏覽"
+#
+
+# The following are compiled regular expressions used to recognize each
+# of line and extract relevant information.
+#
+_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$')
+assert _RE_AAPT_CONFIG.match('   config (default):')
+assert _RE_AAPT_CONFIG.match('   config zh-rTW:')
+
+# Match an ISO 639-1 or ISO 639-2 locale.
+_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$')
+assert _RE_AAPT_ISO_639_LOCALE.match('de')
+assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW')
+assert _RE_AAPT_ISO_639_LOCALE.match('fil')
+assert not _RE_AAPT_ISO_639_LOCALE.match('land')
+
+_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn')
+assert _RE_AAPT_BCP47_LOCALE.match('b+en+US')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+1234')
+
+_RE_AAPT_STRING_RESOURCE_HEADER = re.compile(
+    r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$')
+assert _RE_AAPT_STRING_RESOURCE_HEADER.match(
+    r'  resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)'
+)
+
+_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$')
+assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r'       (string8) "瀏覽首頁"')
+
+# pylint: enable=line-too-long
+
+
+def _ConvertAaptLocaleToBcp47(locale):
+  """Convert a locale name from 'aapt dump' to its BCP-47 form."""
+  if locale.startswith('b+'):
+    return '-'.join(locale[2:].split('+'))
+  lang, _, region = locale.partition('-r')
+  if region:
+    return '%s-%s' % (lang, region)
+  return lang
+
+
+assert _ConvertAaptLocaleToBcp47('(default)') == '(default)'
+assert _ConvertAaptLocaleToBcp47('en') == 'en'
+assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('fil') == 'fil'
+assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn'
+
+
+def ParseApkResources(aapt_path, apk_path):
+  """Use aapt to extract the localized strings of a given bundle.
+
+  Args:
+    bundle_tool_jar_path: Path to bundletool .jar executable.
+    bundle_path: Path to input bundle.
+  Returns:
+    A new ResourceStringMap instance populated with the bundle's content.
+  """
+  cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path]
+  p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+
+  res_map = ResourceStringMap()
+  current_locale = None
+  current_resource_id = None
+  current_resource_name = None
+  need_value = False
+  while True:
+    line = p.stdout.readline().rstrip()
+    if not line:
+      break
+    m = _RE_AAPT_CONFIG.match(line)
+    if m:
+      locale = None
+      aapt_locale = m.group(1)
+      if aapt_locale == '(default)':
+        locale = aapt_locale
+      elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale):
+        locale = aapt_locale
+      if locale is not None:
+        current_locale = _ConvertAaptLocaleToBcp47(locale)
+      continue
+
+    if current_locale is None:
+      continue
+
+    if need_value:
+      m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line)
+      if not m:
+        # Should not happen
+        sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' %
+                         (current_resource_id, current_resource_name))
+        resource_value = '<MISSING_STRING_%08x>' % current_resource_id
+      else:
+        resource_value = UnquoteString(m.group(1))
+
+      res_map.AddValue(current_resource_id, current_resource_name,
+                       'config %s' % current_locale, resource_value)
+      need_value = False
+    else:
+      m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line)
+      if m:
+        current_resource_id = int(m.group(1), 16)
+        current_resource_name = m.group(2)
+        need_value = True
+
+  return res_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      'input_file',
+      help='Input file path. This can be either an APK, or an app bundle.')
+  parser.add_argument('--output', help='Optional output file path.')
+  parser.add_argument(
+      '--omit-ids',
+      action='store_true',
+      help='Omit resource IDs in the output. This is useful '
+      'to compare the contents of two distinct builds of the '
+      'same APK.')
+  parser.add_argument(
+      '--aapt-path',
+      default=_AAPT_DEFAULT_PATH,
+      help='Path to aapt executable. Optional for APKs.')
+  parser.add_argument(
+      '--r-txt-path',
+      help='Path to an optional input R.txt file used to translate resource '
+      'IDs to string names. Useful when resources names in the input files '
+      'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used '
+      'automatically by this script.')
+  parser.add_argument(
+      '--bundletool-path',
+      default=_DEFAULT_BUNDLETOOL_PATH,
+      help='Path to alternate bundletool .jar file. Only used for bundles.')
+  parser.add_argument(
+      '--profile', action='store_true', help='Enable Python profiling.')
+
+  options = parser.parse_args(args)
+
+  # Create a {res_id -> res_name} map for unobfuscation, if needed.
+  res_id_name_map = {}
+  r_txt_path = options.r_txt_path
+  if not r_txt_path:
+    candidate_r_txt_path = options.input_file + '.R.txt'
+    if os.path.exists(candidate_r_txt_path):
+      r_txt_path = candidate_r_txt_path
+
+  if r_txt_path:
+    res_id_name_map = ReadStringMapFromRTxt(r_txt_path)
+
+  # Create a helper lambda that creates a new ResourceStringMap instance
+  # based on the input file's type.
+  if IsFilePathABundle(options.input_file):
+    if not options.bundletool_path:
+      parser.error(
+          '--bundletool-path <BUNDLETOOL_JAR> is required to parse bundles.')
+
+    # use bundletool to parse the bundle resources.
+    def create_string_map():
+      return ParseBundleResources(options.bundletool_path, options.input_file)
+
+  elif IsFilePathAnApk(options.input_file):
+    if not options.aapt_path:
+      parser.error('--aapt-path <AAPT> is required to parse APKs.')
+
+    # Use aapt dump to parse the APK resources.
+    def create_string_map():
+      return ParseApkResources(options.aapt_path, options.input_file)
+
+  else:
+    parser.error('Unknown file format: %s' % options.input_file)
+
+  # Print everything now.
+  with ManagedOutput(options.output) as output:
+    with ManagedPythonProfiling(options.profile):
+      res_map = create_string_map()
+      res_map.RemapResourceNames(res_id_name_map)
+      lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids))
+      for line in lines:
+        output.write(line)
+        output.write('\n')
+
+
+if __name__ == "__main__":
+  main(sys.argv[1:])
diff --git a/src/build/android/emma_coverage_stats.py b/src/build/android/emma_coverage_stats.py
new file mode 100755
index 0000000..f45f4d4
--- /dev/null
+++ b/src/build/android/emma_coverage_stats.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates incremental code coverage reports for Java code in Chromium.
+
+Usage:
+
+  build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
+    <EMMA file directory> --lines-for-coverage-file
+    <path to file containing lines for coverage>
+
+  Creates a JSON representation of the overall and file coverage stats and saves
+  this information to the specified output file.
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import re
+import sys
+from xml.etree import ElementTree
+
+import devil_chromium
+from devil.utils import run_tests_helper
+
+NOT_EXECUTABLE = -1
+NOT_COVERED = 0
+COVERED = 1
+PARTIALLY_COVERED = 2
+
+# Coverage information about a single line of code.
+LineCoverage = collections.namedtuple(
+    'LineCoverage',
+    ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
+
+
+class _EmmaHtmlParser(object):
+  """Encapsulates HTML file parsing operations.
+
+  This class contains all operations related to parsing HTML files that were
+  produced using the EMMA code coverage tool.
+
+  Example HTML:
+
+  Package links:
+    <a href="_files/1.html">org.chromium.chrome</a>
+    This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
+
+  Class links:
+    <a href="1e.html">DoActivity.java</a>
+    This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
+
+  Line coverage data:
+    <tr class="p">
+       <td class="l" title="78% line coverage (7 out of 9)">108</td>
+       <td title="78% line coverage (7 out of 9 instructions)">
+         if (index < 0 || index = mSelectors.size()) index = 0;</td>
+    </tr>
+    <tr>
+       <td class="l">109</td>
+       <td> </td>
+    </tr>
+    <tr class="c">
+       <td class="l">110</td>
+       <td>        if (mSelectors.get(index) != null) {</td>
+    </tr>
+    <tr class="z">
+       <td class="l">111</td>
+       <td>            for (int i = 0; i < mSelectors.size(); i++) {</td>
+    </tr>
+    Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
+
+    We can parse this to get:
+      1. Line number
+      2. Line of source code
+      3. Coverage status (c, z, or p)
+      4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
+  """
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different packages.
+  _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
+
+  # Selector to match all <a> elements within the rows that are in the table
+  # that displays all of the different classes within a package.
+  _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
+
+  # Selector to match all <tr> elements within the table containing Java source
+  # code in an EMMA HTML file.
+  _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
+
+  # Children of HTML elements are represented as a list in ElementTree. These
+  # constants represent list indices corresponding to relevant child elements.
+
+  # Child 1 contains percentage covered for a line.
+  _ELEMENT_PERCENT_COVERED = 1
+
+  # Child 1 contains the original line of source code.
+  _ELEMENT_CONTAINING_SOURCE_CODE = 1
+
+  # Child 0 contains the line number.
+  _ELEMENT_CONTAINING_LINENO = 0
+
+  # Maps CSS class names to corresponding coverage constants.
+  _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
+
+  # UTF-8 no break space.
+  _NO_BREAK_SPACE = '\xc2\xa0'
+
+  def __init__(self, emma_file_base_dir):
+    """Initializes _EmmaHtmlParser.
+
+    Args:
+      emma_file_base_dir: Path to the location where EMMA report files are
+        stored. Should be where index.html is stored.
+    """
+    self._base_dir = emma_file_base_dir
+    self._emma_files_path = os.path.join(self._base_dir, '_files')
+    self._index_path = os.path.join(self._base_dir, 'index.html')
+
+  def GetLineCoverage(self, emma_file_path):
+    """Returns a list of LineCoverage objects for the given EMMA HTML file.
+
+    Args:
+      emma_file_path: String representing the path to the EMMA HTML file.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    line_tr_elements = self._FindElements(
+        emma_file_path, self._XPATH_SELECT_LOC)
+    line_coverage = []
+    for tr in line_tr_elements:
+      # Get the coverage status.
+      coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
+      # Get the fractional coverage value.
+      if coverage_status == PARTIALLY_COVERED:
+        title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
+        # Parse string that contains percent covered: "83% line coverage ...".
+        percent_covered = title_attribute.split('%')[0]
+        fractional_coverage = int(percent_covered) / 100.0
+      else:
+        fractional_coverage = 1.0
+
+      # Get the line number.
+      lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
+      # Handles oddly formatted HTML (where there is an extra <a> tag).
+      lineno = int(lineno_element.text or
+                   lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
+      # Get the original line of Java source code.
+      raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
+      utf8_source = raw_source.encode('UTF-8')
+      source = utf8_source.replace(self._NO_BREAK_SPACE, ' ')
+
+      line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
+      line_coverage.append(line)
+
+    return line_coverage
+
+  def GetPackageNameToEmmaFileDict(self):
+    """Returns a dict mapping Java packages to EMMA HTML coverage files.
+
+    Parses the EMMA index.html file to get a list of packages, then parses each
+    package HTML file to get a list of classes for that package, and creates
+    a dict with this info.
+
+    Returns:
+      A dict mapping string representation of Java packages (with class
+        names appended) to the corresponding file paths of EMMA HTML files.
+    """
+    # These <a> elements contain each package name and the path of the file
+    # where all classes within said package are listed.
+    package_link_elements = self._FindElements(
+        self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
+    # Maps file path of package directory (EMMA generated) to package name.
+    # Example: emma_dir/f.html: org.chromium.chrome.
+    package_links = {
+      os.path.join(self._base_dir, link.attrib['HREF']): link.text
+      for link in package_link_elements if 'HREF' in link.attrib
+    }
+
+    package_to_emma = {}
+    for package_emma_file_path, package_name in package_links.iteritems():
+      # These <a> elements contain each class name in the current package and
+      # the path of the file where the coverage info is stored for each class.
+      coverage_file_link_elements = self._FindElements(
+          package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
+
+      for class_name_element in coverage_file_link_elements:
+        emma_coverage_file_path = os.path.join(
+            self._emma_files_path, class_name_element.attrib['HREF'])
+        full_package_name = '%s.%s' % (package_name, class_name_element.text)
+        package_to_emma[full_package_name] = emma_coverage_file_path
+
+    return package_to_emma
+
+  # pylint: disable=no-self-use
+  def _FindElements(self, file_path, xpath_selector):
+    """Reads a HTML file and performs an XPath match.
+
+    Args:
+      file_path: String representing the path to the HTML file.
+      xpath_selector: String representing xpath search pattern.
+
+    Returns:
+      A list of ElementTree.Elements matching the given XPath selector.
+        Returns an empty list if there is no match.
+    """
+    with open(file_path) as f:
+      file_contents = f.read().decode('ISO-8859-1').encode('UTF-8')
+      root = ElementTree.fromstring(file_contents)
+      return root.findall(xpath_selector)
+
+
+class _EmmaCoverageStats(object):
+  """Computes code coverage stats for Java code using the coverage tool EMMA.
+
+  This class provides an API that allows users to capture absolute code coverage
+  and code coverage on a subset of lines for each Java source file. Coverage
+  reports are generated in JSON format.
+  """
+  # Regular expression to get package name from Java package statement.
+  RE_PACKAGE_MATCH_GROUP = 'package'
+  RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
+
+  def __init__(self, emma_file_base_dir, files_for_coverage):
+    """Initialize _EmmaCoverageStats.
+
+    Args:
+      emma_file_base_dir: String representing the path to the base directory
+        where EMMA HTML coverage files are stored, i.e. parent of index.html.
+      files_for_coverage: A list of Java source code file paths to get EMMA
+        coverage for.
+    """
+    self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
+    self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
+
+  def GetCoverageDict(self, lines_for_coverage):
+    """Returns a dict containing detailed coverage information.
+
+    Gets detailed coverage stats for each file specified in the
+    |lines_for_coverage| dict and the total incremental number of lines covered
+    and executable for all files in |lines_for_coverage|.
+
+    Args:
+      lines_for_coverage: A dict mapping Java source file paths to lists of line
+        numbers.
+
+    Returns:
+      A dict containing coverage stats for the given dict of files and lines.
+        Contains absolute coverage stats for each file, coverage stats for each
+        file's lines specified in |lines_for_coverage|, line by line coverage
+        for each file, and overall coverage stats for the lines specified in
+        |lines_for_coverage|.
+    """
+    file_coverage = {}
+    for file_path, line_numbers in lines_for_coverage.iteritems():
+      file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
+      if file_coverage_dict:
+        file_coverage[file_path] = file_coverage_dict
+      else:
+        logging.warning(
+            'No code coverage data for %s, skipping.', file_path)
+
+    covered_statuses = [s['incremental'] for s in file_coverage.itervalues()]
+    num_covered_lines = sum(s['covered'] for s in covered_statuses)
+    num_total_lines = sum(s['total'] for s in covered_statuses)
+    return {
+      'files': file_coverage,
+      'patch': {
+        'incremental': {
+          'covered': num_covered_lines,
+          'total': num_total_lines
+        }
+      }
+    }
+
+  def GetCoverageDictForFile(self, file_path, line_numbers):
+    """Returns a dict containing detailed coverage info for the given file.
+
+    Args:
+      file_path: The path to the Java source file that we want to create the
+        coverage dict for.
+      line_numbers: A list of integer line numbers to retrieve additional stats
+        for.
+
+    Returns:
+      A dict containing absolute, incremental, and line by line coverage for
+        a file.
+    """
+    if file_path not in self._source_to_emma:
+      return None
+    emma_file = self._source_to_emma[file_path]
+    total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
+    incremental_line_coverage = [line for line in total_line_coverage
+                                 if line.lineno in line_numbers]
+    line_by_line_coverage = [
+      {
+        'line': line.source,
+        'coverage': line.covered_status,
+        'changed': line.lineno in line_numbers,
+        'fractional_coverage': line.fractional_line_coverage,
+      }
+      for line in total_line_coverage
+    ]
+    total_covered_lines, total_lines = (
+        self.GetSummaryStatsForLines(total_line_coverage))
+    incremental_covered_lines, incremental_total_lines = (
+        self.GetSummaryStatsForLines(incremental_line_coverage))
+
+    file_coverage_stats = {
+      'absolute': {
+        'covered': total_covered_lines,
+        'total': total_lines
+      },
+      'incremental': {
+        'covered': incremental_covered_lines,
+        'total': incremental_total_lines
+      },
+      'source': line_by_line_coverage,
+    }
+    return file_coverage_stats
+
+  # pylint: disable=no-self-use
+  def GetSummaryStatsForLines(self, line_coverage):
+    """Gets summary stats for a given list of LineCoverage objects.
+
+    Args:
+      line_coverage: A list of LineCoverage objects.
+
+    Returns:
+      A tuple containing the number of lines that are covered and the total
+        number of lines that are executable, respectively
+    """
+    partially_covered_sum = 0
+    covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
+    for line in line_coverage:
+      status = line.covered_status
+      if status == NOT_EXECUTABLE:
+        continue
+      covered_status_totals[status] += 1
+      if status == PARTIALLY_COVERED:
+        partially_covered_sum += line.fractional_line_coverage
+
+    total_covered = covered_status_totals[COVERED] + partially_covered_sum
+    total_lines = sum(covered_status_totals.values())
+    return total_covered, total_lines
+
+  def _GetSourceFileToEmmaFileDict(self, files):
+    """Gets a dict used to correlate Java source files with EMMA HTML files.
+
+    This method gathers the information needed to correlate EMMA HTML
+    files with Java source files. EMMA XML and plain text reports do not provide
+    line by line coverage data, so HTML reports must be used instead.
+    Unfortunately, the HTML files that are created are given garbage names
+    (i.e 1.html) so we need to manually correlate EMMA HTML files
+    with the original Java source files.
+
+    Args:
+      files: A list of file names for which coverage information is desired.
+
+    Returns:
+      A dict mapping Java source file paths to EMMA HTML file paths.
+    """
+    # Maps Java source file paths to package names.
+    # Example: /usr/code/file.java -> org.chromium.file.java.
+    source_to_package = {}
+    for file_path in files:
+      package = self.GetPackageNameFromFile(file_path)
+      if package:
+        source_to_package[file_path] = package
+      else:
+        logging.warning("Skipping %s because it doesn\'t have a package "
+                        "statement.", file_path)
+
+    # Maps package names to EMMA report HTML files.
+    # Example: org.chromium.file.java -> out/coverage/1a.html.
+    package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
+    # Finally, we have a dict mapping Java file paths to EMMA report files.
+    # Example: /usr/code/file.java -> out/coverage/1a.html.
+    source_to_emma = {source: package_to_emma[package]
+                      for source, package in source_to_package.iteritems()
+                      if package in package_to_emma}
+    return source_to_emma
+
+  @staticmethod
+  def NeedsCoverage(file_path):
+    """Checks to see if the file needs to be analyzed for code coverage.
+
+    Args:
+      file_path: A string representing path to the file.
+
+    Returns:
+      True for Java files that exist, False for all others.
+    """
+    if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
+      return True
+    else:
+      logging.info('Skipping file %s, cannot compute code coverage.', file_path)
+      return False
+
+  @staticmethod
+  def GetPackageNameFromFile(file_path):
+    """Gets the full package name including the file name for a given file path.
+
+    Args:
+      file_path: String representing the path to the Java source file.
+
+    Returns:
+      A string representing the full package name with file name appended or
+        None if there is no package statement in the file.
+    """
+    with open(file_path) as f:
+      file_content = f.read()
+      package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
+      if package_match:
+        package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
+        file_name = os.path.basename(file_path)
+        return '%s.%s' % (package, file_name)
+      else:
+        return None
+
+
+def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
+  """Generates a coverage report for a given set of lines.
+
+  Writes the results of the coverage analysis to the file specified by
+  |out_file_path|.
+
+  Args:
+    line_coverage_file: The path to a file which contains a dict mapping file
+      names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
+      that we should compute coverage information on lines 1 - 3 for file1.
+    out_file_path: A string representing the location to write the JSON report.
+    coverage_dir: A string representing the file path where the EMMA
+      HTML coverage files are located (i.e. folder where index.html is located).
+  """
+  with open(line_coverage_file) as f:
+    potential_files_for_coverage = json.load(f)
+
+  files_for_coverage = {f: lines
+                        for f, lines in potential_files_for_coverage.iteritems()
+                        if _EmmaCoverageStats.NeedsCoverage(f)}
+
+  coverage_results = {}
+  if files_for_coverage:
+    code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys())
+    coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
+  else:
+    logging.info('No Java files requiring coverage were included in %s.',
+                 line_coverage_file)
+
+  with open(out_file_path, 'w+') as out_status_file:
+    json.dump(coverage_results, out_status_file)
+
+
+def main():
+  argparser = argparse.ArgumentParser()
+  argparser.add_argument('--out', required=True, type=str,
+                         help='Report output file path.')
+  argparser.add_argument('--emma-dir', required=True, type=str,
+                         help='EMMA HTML report directory.')
+  argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
+                         help='File containing a JSON object. Should contain a '
+                         'dict mapping file names to lists of line numbers of '
+                         'code for which coverage information is desired.')
+  argparser.add_argument('-v', '--verbose', action='count',
+                         help='Print verbose log information.')
+  args = argparser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+  devil_chromium.Initialize()
+  GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/emma_coverage_stats_test.py b/src/build/android/emma_coverage_stats_test.py
new file mode 100755
index 0000000..d53292c
--- /dev/null
+++ b/src/build/android/emma_coverage_stats_test.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+from xml.etree import ElementTree
+
+import emma_coverage_stats
+
+import mock  # pylint: disable=import-error
+
+EMPTY_COVERAGE_STATS_DICT = {
+  'files': {},
+  'patch': {
+    'incremental': {
+      'covered': 0, 'total': 0
+    }
+  }
+}
+
+
+class _EmmaHtmlParserTest(unittest.TestCase):
+  """Tests for _EmmaHtmlParser.
+
+  Uses modified EMMA report HTML that contains only the subset of tags needed
+  for test verification.
+  """
+
+  def setUp(self):
+    self.emma_dir = 'fake/dir/'
+    self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
+    self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
+    self.index_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CLASS="it" CELLSPACING="0">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="_files/0.html"'
+              '>org.chromium.chrome.browser</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="_files/1.html"'
+              '>org.chromium.chrome.browser.tabmodel</A></TD>'
+              '<TD CLASS="h">0%   (0/8)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_1_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
+              '<TD CLASS="h">0%   (0/3)</TD>'
+              '<TD CLASS="h">0%   (0/9)</TD>'
+              '<TD CLASS="h">0%   (0/97)</TD>'
+              '<TD CLASS="h">0%   (0/26)</TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.package_2_class_list_html = (
+      '<HTML>'
+        '<BODY>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+          '<TABLE CELLSPACING="0" WIDTH="100%">'
+            '<TR>'
+              '<TH CLASS="f">name</TH>'
+              '<TH>class, %</TH>'
+              '<TH>method, %</TH>'
+              '<TH>block, %</TH>'
+              '<TH>line, %</TH>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
+              '<TD CLASS="h">0%   (0/1)</TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
+            '</TR>'
+            '<TR CLASS="o">'
+              '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
+            '</TR>'
+            '<TR>'
+              '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
+            '</TR>'
+          '</TABLE>'
+          '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+          '</TABLE>'
+        '</BODY>'
+      '</HTML>'
+    )
+    self.partially_covered_tr_html = (
+      '<TR CLASS="p">'
+        '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
+        '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
+          'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
+      '</TR>'
+    )
+    self.covered_tr_html = (
+      '<TR CLASS="c">'
+        '<TD CLASS="l">110</TD>'
+        '<TD>        if (mSelectors.get(index) != null) {</TD>'
+      '</TR>'
+    )
+    self.not_executable_tr_html = (
+      '<TR>'
+        '<TD CLASS="l">109</TD>'
+        '<TD> </TD>'
+      '</TR>'
+    )
+    self.tr_with_extra_a_tag = (
+      '<TR CLASS="z">'
+        '<TD CLASS="l">'
+          '<A name="1f">54</A>'
+        '</TD>'
+        '<TD>            }</TD>'
+      '</TR>'
+    )
+
+  def testInit(self):
+    emma_dir = self.emma_dir
+    parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
+    self.assertEqual(parser._base_dir, emma_dir)
+    self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
+    self.assertEqual(parser._index_path, 'fake/dir/index.html')
+
+  def testFindElements_basic(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TD')
+    self.assertIs(type(found), list)
+    self.assertIs(type(found[0]), ElementTree.Element)
+    self.assertEqual(found[0].text, 'Test HTML')
+
+  def testFindElements_multipleElements(self):
+    multiple_trs = self.not_executable_tr_html + self.covered_tr_html
+    read_values = ['<div>' + multiple_trs + '</div>']
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEquals(2, len(found))
+
+  def testFindElements_noMatch(self):
+    read_values = [self.simple_html]
+    found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+                                   file_path='fake', xpath_selector='.//TR')
+    self.assertEqual(found, [])
+
+  def testFindElements_badFilePath(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        self.parser._FindElements('fake', xpath_selector='//tr')
+
+  def testGetPackageNameToEmmaFileDict_basic(self):
+    expected_dict = {
+      'org.chromium.chrome.browser.AccessibilityUtil.java':
+      'fake/dir/_files/23.html',
+      'org.chromium.chrome.browser.ContextualMenuBar.java':
+      'fake/dir/_files/22.html',
+      'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
+      'fake/dir/_files/1e.html',
+      'org.chromium.chrome.browser.ContentSetting.java':
+      'fake/dir/_files/1f.html',
+      'org.chromium.chrome.browser.DevToolsServer.java':
+      'fake/dir/_files/20.html',
+      'org.chromium.chrome.browser.NavigationPopup.java':
+      'fake/dir/_files/24.html',
+      'org.chromium.chrome.browser.FileProviderHelper.java':
+      'fake/dir/_files/21.html'}
+
+    read_values = [self.index_html, self.package_1_class_list_html,
+                   self.package_2_class_list_html]
+    return_dict, mock_open = MockOpenForFunction(
+        self.parser.GetPackageNameToEmmaFileDict, read_values)
+
+    self.assertDictEqual(return_dict, expected_dict)
+    self.assertEqual(mock_open.call_count, 3)
+    calls = [mock.call('fake/dir/index.html'),
+             mock.call('fake/dir/_files/1.html'),
+             mock.call('fake/dir/_files/0.html')]
+    mock_open.assert_has_calls(calls)
+
+  def testGetPackageNameToEmmaFileDict_noPackageElements(self):
+    self.parser._FindElements = mock.Mock(return_value=[])
+    return_dict = self.parser.GetPackageNameToEmmaFileDict()
+    self.assertDictEqual({}, return_dict)
+
+  def testGetLineCoverage_status_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.COVERED)
+
+  def testGetLineCoverage_status_statusMissing(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.not_executable_tr_html])
+    self.assertEqual(line_coverage[0].covered_status,
+                     emma_coverage_stats.NOT_EXECUTABLE)
+
+  def testGetLineCoverage_fractionalCoverage_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
+
+  def testGetLineCoverage_fractionalCoverage_partial(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.partially_covered_tr_html])
+    self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
+
+  def testGetLineCoverage_lineno_basic(self):
+    line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+    self.assertEqual(line_coverage[0].lineno, 110)
+
+  def testGetLineCoverage_lineno_withAlternativeHtml(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.tr_with_extra_a_tag])
+    self.assertEqual(line_coverage[0].lineno, 54)
+
+  def testGetLineCoverage_source(self):
+    self.parser._FindElements = mock.Mock(
+        return_value=[ElementTree.fromstring(self.covered_tr_html)])
+    line_coverage = self.parser.GetLineCoverage('fake_path')
+    self.assertEqual(line_coverage[0].source,
+                     '        if (mSelectors.get(index) != null) {')
+
+  def testGetLineCoverage_multipleElements(self):
+    line_coverage = self.GetLineCoverageWithFakeElements(
+        [self.covered_tr_html, self.partially_covered_tr_html,
+         self.tr_with_extra_a_tag])
+    self.assertEqual(len(line_coverage), 3)
+
+  def GetLineCoverageWithFakeElements(self, html_elements):
+    """Wraps GetLineCoverage so mock HTML can easily be used.
+
+    Args:
+      html_elements: List of strings each representing an HTML element.
+
+    Returns:
+      A list of LineCoverage objects.
+    """
+    elements = [ElementTree.fromstring(string) for string in html_elements]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=elements):
+      return self.parser.GetLineCoverage('fake_path')
+
+
+class _EmmaCoverageStatsTest(unittest.TestCase):
+  """Tests for _EmmaCoverageStats."""
+
+  def setUp(self):
+    self.good_source_to_emma = {
+      '/path/to/1/File1.java': '/emma/1.html',
+      '/path/2/File2.java': '/emma/2.html',
+      '/path/2/File3.java': '/emma/3.html'
+    }
+    self.line_coverage = [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
+        emma_coverage_stats.LineCoverage(
+            6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
+    ]
+    self.lines_for_coverage = [1, 3, 5, 6]
+    with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+                    return_value=[]):
+      self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
+          'fake_dir', {})
+
+  def testInit(self):
+    coverage_stats = self.simple_coverage
+    self.assertIsInstance(coverage_stats._emma_parser,
+                          emma_coverage_stats._EmmaHtmlParser)
+    self.assertIsInstance(coverage_stats._source_to_emma, dict)
+
+  def testNeedsCoverage_withExistingJavaFile(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertTrue(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_withNonJavaFile(self):
+    test_file = '/path/to/file/File.c'
+    with mock.patch('os.path.exists', return_value=True):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testNeedsCoverage_fileDoesNotExist(self):
+    test_file = '/path/to/file/File.java'
+    with mock.patch('os.path.exists', return_value=False):
+      self.assertFalse(
+          emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+  def testGetPackageNameFromFile_basic(self):
+    test_file_text = """// Test Copyright
+    package org.chromium.chrome.browser;
+    import android.graphics.RectF;"""
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        [test_file_text], file_path='/path/to/file/File.java')
+    self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
+
+  def testGetPackageNameFromFile_noPackageStatement(self):
+    result_package, _ = MockOpenForFunction(
+        emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+        ['not a package statement'], file_path='/path/to/file/File.java')
+    self.assertIsNone(result_package)
+
+  def testGetSummaryStatsForLines_basic(self):
+    covered, total = self.simple_coverage.GetSummaryStatsForLines(
+        self.line_coverage)
+    self.assertEqual(covered, 3.05)
+    self.assertEqual(total, 5)
+
+  def testGetSourceFileToEmmaFileDict(self):
+    package_names = {
+      '/path/to/1/File1.java': 'org.fake.one.File1.java',
+      '/path/2/File2.java': 'org.fake.File2.java',
+      '/path/2/File3.java': 'org.fake.File3.java'
+    }
+    package_to_emma = {
+      'org.fake.one.File1.java': '/emma/1.html',
+      'org.fake.File2.java': '/emma/2.html',
+      'org.fake.File3.java': '/emma/3.html'
+    }
+    with mock.patch('os.path.exists', return_value=True):
+      coverage_stats = self.simple_coverage
+      coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
+          return_value=package_to_emma)
+      coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
+      result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
+          package_names.keys())
+    self.assertDictEqual(result_dict, self.good_source_to_emma)
+
+  def testGetCoverageDictForFile(self):
+    line_coverage = self.line_coverage
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
+    self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
+    lines = self.lines_for_coverage
+    expected_dict = {
+      'absolute': {
+        'covered': 3.05,
+        'total': 5
+      },
+      'incremental': {
+        'covered': 2.05,
+        'total': 3
+      },
+      'source': [
+        {
+          'line': line_coverage[0].source,
+          'coverage': line_coverage[0].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[0].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[1].source,
+          'coverage': line_coverage[1].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[1].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[2].source,
+          'coverage': line_coverage[2].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[2].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[3].source,
+          'coverage': line_coverage[3].covered_status,
+          'changed': False,
+          'fractional_coverage': line_coverage[3].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[4].source,
+          'coverage': line_coverage[4].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[4].fractional_line_coverage,
+        },
+        {
+          'line': line_coverage[5].source,
+          'coverage': line_coverage[5].covered_status,
+          'changed': True,
+          'fractional_coverage': line_coverage[5].fractional_line_coverage,
+        }
+      ]
+    }
+    result_dict = self.simple_coverage.GetCoverageDictForFile(
+        '/fake/src', lines)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_emptyCoverage(self):
+    expected_dict = {
+      'absolute': {'covered': 0, 'total': 0},
+      'incremental': {'covered': 0, 'total': 0},
+      'source': []
+    }
+    self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
+    self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDictForFile_missingCoverage(self):
+    self.simple_coverage._source_to_emma = {}
+    result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
+    self.assertIsNone(result_dict)
+
+  def testGetCoverageDict_basic(self):
+    files_for_coverage = {
+      '/path/to/1/File1.java': [1, 3, 4],
+      '/path/2/File2.java': [1, 2]
+    }
+    self.simple_coverage._source_to_emma = {
+      '/path/to/1/File1.java': 'emma_1',
+      '/path/2/File2.java': 'emma_2'
+    }
+    coverage_info = {
+      'emma_1': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
+        emma_coverage_stats.LineCoverage(
+            3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+        emma_coverage_stats.LineCoverage(
+            4, '', emma_coverage_stats.COVERED, 1.0)
+      ],
+      'emma_2': [
+        emma_coverage_stats.LineCoverage(
+            1, '', emma_coverage_stats.NOT_COVERED, 1.0),
+        emma_coverage_stats.LineCoverage(
+            2, '', emma_coverage_stats.COVERED, 1.0)
+      ]
+    }
+    expected_dict = {
+      'files': {
+        '/path/2/File2.java': {
+          'absolute': {'covered': 1, 'total': 2},
+          'incremental': {'covered': 1, 'total': 2},
+          'source': [{'changed': True, 'coverage': 0,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        },
+        '/path/to/1/File1.java': {
+          'absolute': {'covered': 2.5, 'total': 3},
+          'incremental': {'covered': 2, 'total': 2},
+          'source': [{'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': False, 'coverage': 2,
+                      'line': '', 'fractional_coverage': 0.5},
+                     {'changed': True, 'coverage': -1,
+                      'line': '', 'fractional_coverage': 1.0},
+                     {'changed': True, 'coverage': 1,
+                      'line': '', 'fractional_coverage': 1.0}]
+        }
+      },
+      'patch': {'incremental': {'covered': 3, 'total': 4}}
+    }
+    # Return the relevant coverage info for each file.
+    self.simple_coverage._emma_parser.GetLineCoverage = (
+        lambda x: coverage_info[x])
+    result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
+    self.assertDictEqual(result_dict, expected_dict)
+
+  def testGetCoverageDict_noCoverage(self):
+    result_dict = self.simple_coverage.GetCoverageDict({})
+    self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
+
+
+class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
+  """Tests for GenerateCoverageReport."""
+
+  def testGenerateCoverageReport_missingJsonFile(self):
+    with self.assertRaises(IOError):
+      with mock.patch('os.path.exists', return_value=False):
+        emma_coverage_stats.GenerateCoverageReport('', '', '')
+
+  def testGenerateCoverageReport_invalidJsonFile(self):
+    with self.assertRaises(ValueError):
+      with mock.patch('os.path.exists', return_value=True):
+        MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
+                            line_coverage_file='', out_file_path='',
+                            coverage_dir='')
+
+
+def MockOpenForFunction(func, side_effects, **kwargs):
+  """Allows easy mock open and read for callables that open multiple files.
+
+  Will mock the python open function in a way such that each time read() is
+  called on an open file, the next element in |side_effects| is returned. This
+  makes it easier to test functions that call open() multiple times.
+
+  Args:
+    func: The callable to invoke once mock files are setup.
+    side_effects: A list of return values for each file to return once read.
+      Length of list should be equal to the number calls to open in |func|.
+    **kwargs: Keyword arguments to be passed to |func|.
+
+  Returns:
+    A tuple containing the return value of |func| and the MagicMock object used
+      to mock all calls to open respectively.
+  """
+  mock_open = mock.mock_open()
+  mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
+                           for side_effect in side_effects]
+  with mock.patch('__builtin__.open', mock_open):
+    return func(**kwargs), mock_open
+
+
+if __name__ == '__main__':
+  # Suppress logging messages.
+  unittest.main(buffer=True)
diff --git a/src/build/android/envsetup.sh b/src/build/android/envsetup.sh
new file mode 100755
index 0000000..7f549d9
--- /dev/null
+++ b/src/build/android/envsetup.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Adds Android SDK tools and related helpers to PATH, useful for development.
+# Not used on bots, nor required for any commands to succeed.
+# Use like: source build/android/envsetup.sh
+
+# Make sure we're being sourced.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+  local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")"
+
+  # Some tools expect these environmental variables.
+  export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public"
+  # ANDROID_HOME is deprecated, but generally means the same thing as
+  # ANDROID_SDK_ROOT and shouldn't hurt to set it.
+  export ANDROID_HOME="$ANDROID_SDK_ROOT"
+
+  # Set up PATH to point to SDK-provided (and other) tools, such as 'adb'.
+  export PATH=${CHROME_SRC}/build/android:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/tools/:$PATH
+  export PATH=${ANDROID_SDK_ROOT}/platform-tools:$PATH
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
diff --git a/src/build/android/fast_local_dev_server.py b/src/build/android/fast_local_dev_server.py
new file mode 100755
index 0000000..a35c500
--- /dev/null
+++ b/src/build/android/fast_local_dev_server.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an server to offload non-critical-path GN targets."""
+
+from __future__ import annotations
+
+import argparse
+import json
+import os
+import queue
+import shutil
+import socket
+import subprocess
+import sys
+import threading
+from typing import Callable, Dict, List, Optional, Tuple
+
+sys.path.append(os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import server_utils
+
+
+def log(msg: str, *, end: str = ''):
+  # Shrink the message (leaving a 2-char prefix and use the rest of the room
+  # for the suffix) according to terminal size so it is always one line.
+  width = shutil.get_terminal_size().columns
+  prefix = f'[{TaskStats.prefix()}] '
+  max_msg_width = width - len(prefix)
+  if len(msg) > max_msg_width:
+    length_to_show = max_msg_width - 5  # Account for ellipsis and header.
+    msg = f'{msg[:2]}...{msg[-length_to_show:]}'
+  # \r to return the carriage to the beginning of line.
+  # \033[K to replace the normal \n to erase until the end of the line.
+  # Avoid the default line ending so the next \r overwrites the same line just
+  #     like ninja's output.
+  print(f'\r{prefix}{msg}\033[K', end=end, flush=True)
+
+
+class TaskStats:
+  """Class to keep track of aggregate stats for all tasks across threads."""
+  _num_processes = 0
+  _completed_tasks = 0
+  _total_tasks = 0
+  _lock = threading.Lock()
+
+  @classmethod
+  def no_running_processes(cls):
+    return cls._num_processes == 0
+
+  @classmethod
+  def add_task(cls):
+    # Only the main thread calls this, so there is no need for locking.
+    cls._total_tasks += 1
+
+  @classmethod
+  def add_process(cls):
+    with cls._lock:
+      cls._num_processes += 1
+
+  @classmethod
+  def remove_process(cls):
+    with cls._lock:
+      cls._num_processes -= 1
+
+  @classmethod
+  def complete_task(cls):
+    with cls._lock:
+      cls._completed_tasks += 1
+
+  @classmethod
+  def prefix(cls):
+    # Ninja's prefix is: [205 processes, 6/734 @ 6.5/s : 0.922s ]
+    # Time taken and task completion rate are not important for the build server
+    # since it is always running in the background and uses idle priority for
+    # its tasks.
+    with cls._lock:
+      word = 'process' if cls._num_processes == 1 else 'processes'
+      return (f'{cls._num_processes} {word}, '
+              f'{cls._completed_tasks}/{cls._total_tasks}')
+
+
+class TaskManager:
+  """Class to encapsulate a threadsafe queue and handle deactivating it."""
+
+  def __init__(self):
+    self._queue: queue.SimpleQueue[Task] = queue.SimpleQueue()
+    self._deactivated = False
+
+  def add_task(self, task: Task):
+    assert not self._deactivated
+    TaskStats.add_task()
+    self._queue.put(task)
+    log(f'QUEUED {task.name}')
+    self._maybe_start_tasks()
+
+  def deactivate(self):
+    self._deactivated = True
+    while not self._queue.empty():
+      try:
+        task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      task.terminate()
+
+  @staticmethod
+  def _num_running_processes():
+    with open('/proc/stat') as f:
+      for line in f:
+        if line.startswith('procs_running'):
+          return int(line.rstrip().split()[1])
+    assert False, 'Could not read /proc/stat'
+
+  def _maybe_start_tasks(self):
+    if self._deactivated:
+      return
+    # Include load avg so that a small dip in the number of currently running
+    # processes will not cause new tasks to be started while the overall load is
+    # heavy.
+    cur_load = max(self._num_running_processes(), os.getloadavg()[0])
+    num_started = 0
+    # Always start a task if we don't have any running, so that all tasks are
+    # eventually finished. Try starting up tasks when the overall load is light.
+    # Limit to at most 2 new tasks to prevent ramping up too fast. There is a
+    # chance where multiple threads call _maybe_start_tasks and each gets to
+    # spawn up to 2 new tasks, but since the only downside is some build tasks
+    # get worked on earlier rather than later, it is not worth mitigating.
+    while num_started < 2 and (TaskStats.no_running_processes()
+                               or num_started + cur_load < os.cpu_count()):
+      try:
+        next_task = self._queue.get_nowait()
+      except queue.Empty:
+        return
+      num_started += next_task.start(self._maybe_start_tasks)
+
+
+# TODO(wnwen): Break this into Request (encapsulating what ninja sends) and Task
+#              when a Request starts to be run. This would eliminate ambiguity
+#              about when and whether _proc/_thread are initialized.
+class Task:
+  """Class to represent one task and operations on it."""
+
+  def __init__(self, name: str, cwd: str, cmd: List[str], stamp_file: str):
+    self.name = name
+    self.cwd = cwd
+    self.cmd = cmd
+    self.stamp_file = stamp_file
+    self._terminated = False
+    self._lock = threading.Lock()
+    self._proc: Optional[subprocess.Popen] = None
+    self._thread: Optional[threading.Thread] = None
+    self._return_code: Optional[int] = None
+
+  @property
+  def key(self):
+    return (self.cwd, self.name)
+
+  def start(self, on_complete_callback: Callable[[], None]) -> int:
+    """Starts the task if it has not already been terminated.
+
+    Returns the number of processes that have been started. This is called at
+    most once when the task is popped off the task queue."""
+
+    # The environment variable forces the script to actually run in order to
+    # avoid infinite recursion.
+    env = os.environ.copy()
+    env[server_utils.BUILD_SERVER_ENV_VARIABLE] = '1'
+
+    with self._lock:
+      if self._terminated:
+        return 0
+      # Use os.nice(19) to ensure the lowest priority (idle) for these analysis
+      # tasks since we want to avoid slowing down the actual build.
+      # TODO(wnwen): Use ionice to reduce resource consumption.
+      TaskStats.add_process()
+      log(f'STARTING {self.name}')
+      self._proc = subprocess.Popen(
+          self.cmd,
+          stdout=subprocess.PIPE,
+          stderr=subprocess.STDOUT,
+          cwd=self.cwd,
+          env=env,
+          text=True,
+          preexec_fn=lambda: os.nice(19),
+      )
+      self._thread = threading.Thread(
+          target=self._complete_when_process_finishes,
+          args=(on_complete_callback, ))
+      self._thread.start()
+      return 1
+
+  def terminate(self):
+    """Can be called multiple times to cancel and ignore the task's output."""
+
+    with self._lock:
+      if self._terminated:
+        return
+      self._terminated = True
+    # It is safe to access _proc and _thread outside of _lock since they are
+    # only changed by self.start holding _lock when self._terminate is false.
+    # Since we have just set self._terminate to true inside of _lock, we know
+    # that neither _proc nor _thread will be changed from this point onwards.
+    if self._proc:
+      self._proc.terminate()
+      self._proc.wait()
+    # Ensure that self._complete is called either by the thread or by us.
+    if self._thread:
+      self._thread.join()
+    else:
+      self._complete()
+
+  def _complete_when_process_finishes(self,
+                                      on_complete_callback: Callable[[], None]):
+    assert self._proc
+    # We know Popen.communicate will return a str and not a byte since it is
+    # constructed with text=True.
+    stdout: str = self._proc.communicate()[0]
+    self._return_code = self._proc.returncode
+    TaskStats.remove_process()
+    self._complete(stdout)
+    on_complete_callback()
+
+  def _complete(self, stdout: str = ''):
+    """Update the user and ninja after the task has run or been terminated.
+
+    This method should only be run once per task. Avoid modifying the task so
+    that this method does not need locking."""
+
+    TaskStats.complete_task()
+    failed = False
+    if self._terminated:
+      log(f'TERMINATED {self.name}')
+      # Ignore stdout as it is now outdated.
+      failed = True
+    else:
+      log(f'FINISHED {self.name}')
+      if stdout or self._return_code != 0:
+        failed = True
+        # An extra new line is needed since we want to preserve the previous
+        # _log line. Use a single print so that it is threadsafe.
+        # TODO(wnwen): Improve stdout display by parsing over it and moving the
+        #              actual error to the bottom. Otherwise long command lines
+        #              in the Traceback section obscure the actual error(s).
+        print('\n' + '\n'.join([
+            f'FAILED: {self.name}',
+            f'Return code: {self._return_code}',
+            ' '.join(self.cmd),
+            stdout,
+        ]))
+
+    if failed:
+      # Force ninja to consider failed targets as dirty.
+      try:
+        os.unlink(os.path.join(self.cwd, self.stamp_file))
+      except FileNotFoundError:
+        pass
+    else:
+      # Ninja will rebuild targets when their inputs change even if their stamp
+      # file has a later modified time. Thus we do not need to worry about the
+      # script being run by the build server updating the mtime incorrectly.
+      pass
+
+
+def _listen_for_request_data(sock: socket.socket):
+  while True:
+    conn = sock.accept()[0]
+    received = []
+    with conn:
+      while True:
+        data = conn.recv(4096)
+        if not data:
+          break
+        received.append(data)
+    if received:
+      yield json.loads(b''.join(received))
+
+
+def _process_requests(sock: socket.socket):
+  # Since dicts in python can contain anything, explicitly type tasks to help
+  # make static type checking more useful.
+  tasks: Dict[Tuple[str, str], Task] = {}
+  task_manager = TaskManager()
+  try:
+    for data in _listen_for_request_data(sock):
+      task = Task(name=data['name'],
+                  cwd=data['cwd'],
+                  cmd=data['cmd'],
+                  stamp_file=data['stamp_file'])
+      existing_task = tasks.get(task.key)
+      if existing_task:
+        existing_task.terminate()
+      tasks[task.key] = task
+      task_manager.add_task(task)
+  except KeyboardInterrupt:
+    log('STOPPING SERVER...', end='\n')
+    # Gracefully shut down the task manager, terminating all queued tasks.
+    task_manager.deactivate()
+    # Terminate all currently running tasks.
+    for task in tasks.values():
+      task.terminate()
+    log('STOPPED', end='\n')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.parse_args()
+  with socket.socket(socket.AF_UNIX) as sock:
+    sock.bind(server_utils.SOCKET_ADDRESS)
+    sock.listen()
+    _process_requests(sock)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/generate_jacoco_report.py b/src/build/android/generate_jacoco_report.py
new file mode 100755
index 0000000..d0a9987
--- /dev/null
+++ b/src/build/android/generate_jacoco_report.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env vpython
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates Jacoco coverage files to produce output."""
+
+from __future__ import print_function
+
+import argparse
+import fnmatch
+import json
+import os
+import sys
+
+import devil_chromium
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+
+# Source paths should be passed to Jacoco in a way that the relative file paths
+# reflect the class package name.
+_PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium']
+
+# The sources_json_file is generated by jacoco_instr.py with source directories
+# and input path to non-instrumented jars.
+# e.g.
+# 'source_dirs': [
+#   "chrome/android/java/src/org/chromium/chrome/browser/toolbar/bottom",
+#   "chrome/android/java/src/org/chromium/chrome/browser/ui/system",
+# ...]
+# 'input_path':
+#   '$CHROMIUM_OUTPUT_DIR/\
+#    obj/chrome/android/features/tab_ui/java__process_prebuilt-filtered.jar'
+
+_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json'
+
+# These should match the jar class files generated in internal_rules.gni
+_DEVICE_CLASS_EXCLUDE_SUFFIX = 'host_filter.jar'
+_HOST_CLASS_EXCLUDE_SUFFIX = 'device_filter.jar'
+
+
+def _CreateClassfileArgs(class_files, exclude_suffix=None):
+  """Returns a list of files that don't have a given suffix.
+
+  Args:
+    class_files: A list of class files.
+    exclude_suffix: Suffix to look for to exclude.
+
+  Returns:
+    A list of files that don't use the suffix.
+  """
+  result_class_files = []
+  for f in class_files:
+    if exclude_suffix:
+      if not f.endswith(exclude_suffix):
+        result_class_files += ['--classfiles', f]
+    else:
+      result_class_files += ['--classfiles', f]
+
+  return result_class_files
+
+
+def _GenerateReportOutputArgs(args, class_files, report_type):
+  class_jar_exclude = None
+  if report_type == 'device':
+    class_jar_exclude = _DEVICE_CLASS_EXCLUDE_SUFFIX
+  elif report_type == 'host':
+    class_jar_exclude = _HOST_CLASS_EXCLUDE_SUFFIX
+
+  cmd = _CreateClassfileArgs(class_files, class_jar_exclude)
+  if args.format == 'html':
+    report_dir = os.path.join(args.output_dir, report_type)
+    if not os.path.exists(report_dir):
+      os.makedirs(report_dir)
+    cmd += ['--html', report_dir]
+  elif args.format == 'xml':
+    cmd += ['--xml', args.output_file]
+  elif args.format == 'csv':
+    cmd += ['--csv', args.output_file]
+
+  return cmd
+
+
+def _GetFilesWithSuffix(root_dir, suffix):
+  """Gets all files with a given suffix.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    suffix: Suffix to look for.
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*' + suffix)
+    files.extend([os.path.join(root, basename) for basename in basenames])
+
+  return files
+
+
+def _GetExecFiles(root_dir, exclude_substr=None):
+  """ Gets all .exec files
+
+  Args:
+    root_dir: Root directory in which to search for files.
+    exclude_substr: Substring which should be absent in filename. If None, all
+      files are selected.
+
+  Returns:
+    A list of absolute paths to .exec files
+
+  """
+  all_exec_files = _GetFilesWithSuffix(root_dir, ".exec")
+  valid_exec_files = []
+  for exec_file in all_exec_files:
+    if not exclude_substr or exclude_substr not in exec_file:
+      valid_exec_files.append(exec_file)
+  return valid_exec_files
+
+
+def _ParseArguments(parser):
+  """Parses the command line arguments.
+
+  Args:
+    parser: ArgumentParser object.
+
+  Returns:
+    The parsed arguments.
+  """
+  parser.add_argument(
+      '--format',
+      required=True,
+      choices=['html', 'xml', 'csv'],
+      help='Output report format. Choose one from html, xml and csv.')
+  parser.add_argument(
+      '--device-or-host',
+      choices=['device', 'host'],
+      help='Selection on whether to use the device classpath files or the '
+      'host classpath files. Host would typically be used for junit tests '
+      ' and device for tests that run on the device. Only used for xml and csv'
+      ' reports.')
+  parser.add_argument('--output-dir', help='html report output directory.')
+  parser.add_argument('--output-file',
+                      help='xml file to write device coverage results.')
+  parser.add_argument(
+      '--coverage-dir',
+      required=True,
+      help='Root of the directory in which to search for '
+      'coverage data (.exec) files.')
+  parser.add_argument('--exec-filename-excludes',
+                      required=False,
+                      help='Excludes .exec files which contain a particular '
+                      'substring in their name')
+  parser.add_argument(
+      '--sources-json-dir',
+      help='Root of the directory in which to search for '
+      '*__jacoco_sources.json files.')
+  parser.add_argument(
+      '--class-files',
+      nargs='+',
+      help='Location of Java non-instrumented class files. '
+      'Use non-instrumented jars instead of instrumented jars. '
+      'e.g. use chrome_java__process_prebuilt_(host/device)_filter.jar instead'
+      'of chrome_java__process_prebuilt-instrumented.jar')
+  parser.add_argument(
+      '--sources',
+      nargs='+',
+      help='Location of the source files. '
+      'Specified source folders must be the direct parent of the folders '
+      'that define the Java packages.'
+      'e.g. <src_dir>/chrome/android/java/src/')
+  parser.add_argument(
+      '--cleanup',
+      action='store_true',
+      help='If set, removes coverage files generated at '
+      'runtime.')
+  args = parser.parse_args()
+
+  if args.format == 'html' and not args.output_dir:
+    parser.error('--output-dir needed for report.')
+  if args.format in ('csv', 'xml'):
+    if not args.output_file:
+      parser.error('--output-file needed for xml/csv reports.')
+    if not args.device_or_host and args.sources_json_dir:
+      parser.error('--device-or-host selection needed with --sources-json-dir')
+  if not (args.sources_json_dir or args.class_files):
+    parser.error('At least either --sources-json-dir or --class-files needed.')
+  return args
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  args = _ParseArguments(parser)
+
+  devil_chromium.Initialize()
+
+  coverage_files = _GetExecFiles(args.coverage_dir, args.exec_filename_excludes)
+  if not coverage_files:
+    parser.error('No coverage file found under %s' % args.coverage_dir)
+  print('Found coverage files: %s' % str(coverage_files))
+
+  class_files = []
+  source_dirs = []
+  if args.sources_json_dir:
+    sources_json_files = _GetFilesWithSuffix(args.sources_json_dir,
+                                             _SOURCES_JSON_FILES_SUFFIX)
+    for f in sources_json_files:
+      with open(f, 'r') as json_file:
+        data = json.load(json_file)
+        class_files.extend(data['input_path'])
+        source_dirs.extend(data['source_dirs'])
+
+  # Fix source directories as direct parent of Java packages.
+  fixed_source_dirs = set()
+  for path in source_dirs:
+    for partial in _PARTIAL_PACKAGE_NAMES:
+      if partial in path:
+        fixed_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                 path[:path.index(partial)])
+        fixed_source_dirs.add(fixed_dir)
+        break
+
+  if args.class_files:
+    class_files += args.class_files
+  if args.sources:
+    fixed_source_dirs.update(args.sources)
+
+  cmd = [
+      'java', '-jar',
+      os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib',
+                   'jacococli.jar'), 'report'
+  ] + coverage_files
+
+  for source in fixed_source_dirs:
+    cmd += ['--sourcefiles', source]
+
+  if args.format == 'html':
+    # Both reports are generated for html as the cq bot generates an html
+    # report and we wouldn't know which one a developer needed.
+    device_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'device')
+    host_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'host')
+    device_exit_code = cmd_helper.RunCmd(device_cmd)
+    host_exit_code = cmd_helper.RunCmd(host_cmd)
+    exit_code = device_exit_code or host_exit_code
+  else:
+    cmd = cmd + _GenerateReportOutputArgs(args, class_files,
+                                          args.device_or_host)
+    exit_code = cmd_helper.RunCmd(cmd)
+
+  if args.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  # Command tends to exit with status 0 when it actually failed.
+  if not exit_code:
+    if args.format == 'html':
+      if not os.path.isdir(args.output_dir) or not os.listdir(args.output_dir):
+        print('No report generated at %s' % args.output_dir)
+        exit_code = 1
+    elif not os.path.isfile(args.output_file):
+      print('No device coverage report generated at %s' % args.output_file)
+      exit_code = 1
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gradle/AndroidManifest.xml b/src/build/android/gradle/AndroidManifest.xml
new file mode 100644
index 0000000..f3e50e0
--- /dev/null
+++ b/src/build/android/gradle/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright 2018 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by Android Studio's _all target.
+  No <uses-sdk> is allowed due to https://crbug.com/841529.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.dummy">
+</manifest>
diff --git a/src/build/android/gradle/android.jinja b/src/build/android/gradle/android.jinja
new file mode 100644
index 0000000..40d4506
--- /dev/null
+++ b/src/build/android/gradle/android.jinja
@@ -0,0 +1,114 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_sourceset(variables, prefix) %}
+{% if variables is defined %}
+        {{ prefix }} {
+{% if variables.android_manifest is defined %}
+            manifest.srcFile "{{ variables.android_manifest }}"
+{% endif %}
+{% if variables.java_dirs is defined %}
+            java.srcDirs = [
+{% for path in variables.java_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.java_excludes is defined %}
+            java.filter.exclude([
+{% for path in variables.java_excludes %}
+                "{{ path }}",
+{% endfor %}
+            ])
+{% endif %}
+{% if variables.jni_libs is defined %}
+            jniLibs.srcDirs = [
+{% for path in variables.jni_libs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+{% if variables.res_dirs is defined %}
+            res.srcDirs = [
+{% for path in variables.res_dirs %}
+                "{{ path }}",
+{% endfor %}
+            ]
+{% endif %}
+        }
+{% endif %}
+{% endmacro %}
+// Generated by //build/android/generate_gradle.py
+
+{% if template_type in ('android_library', 'android_junit') %}
+apply plugin: "com.android.library"
+{% elif template_type == 'android_apk' %}
+apply plugin: "com.android.application"
+{% endif %}
+
+android {
+    compileSdkVersion "{{ compile_sdk_version }}"
+
+    defaultConfig {
+        vectorDrawables.useSupportLibrary = true
+        minSdkVersion 21
+        targetSdkVersion {{ target_sdk_version }}
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
+{% if native is defined %}
+    externalNativeBuild {
+        cmake {
+            path "CMakeLists.txt"
+        }
+    }
+{% endif %}
+
+    sourceSets {
+{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %}
+        {{ name }} {
+            aidl.srcDirs = []
+            assets.srcDirs = []
+            java.srcDirs = []
+            jni.srcDirs = []
+            renderscript.srcDirs = []
+            res.srcDirs = []
+            resources.srcDirs = []
+        }
+{% endfor %}
+
+{{ expand_sourceset(main, 'main') }}
+{{ expand_sourceset(test, 'test') }}
+{% if android_test is defined %}
+{% for t in android_test %}
+{{ expand_sourceset(t, 'androidTest') }}
+{% endfor %}
+{% endif %}
+    }
+}
+
+{% include 'dependencies.jinja' %}
+
+afterEvaluate {
+    def tasksToDisable = tasks.findAll {
+        return (it.name.equals('generateDebugSources')  // causes unwanted AndroidManifest.java
+                || it.name.equals('generateReleaseSources')
+                || it.name.endsWith('BuildConfig')  // causes unwanted BuildConfig.java
+                || it.name.equals('preDebugAndroidTestBuild')
+{% if not use_gradle_process_resources %}
+                || it.name.endsWith('Assets')
+                || it.name.endsWith('Resources')
+                || it.name.endsWith('ResValues')
+{% endif %}
+                || it.name.endsWith('Aidl')
+                || it.name.endsWith('Renderscript')
+                || it.name.endsWith('Shaders'))
+    }
+    tasksToDisable.each { Task task ->
+      task.enabled = false
+    }
+}
diff --git a/src/build/android/gradle/cmake.jinja b/src/build/android/gradle/cmake.jinja
new file mode 100644
index 0000000..b727388
--- /dev/null
+++ b/src/build/android/gradle/cmake.jinja
@@ -0,0 +1,25 @@
+{# Copyright 2018 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+# Generated by //build/android/generate_gradle.py
+
+cmake_minimum_required(VERSION 3.4.1)
+
+project(chrome C CXX)
+
+{% if native.includes is defined %}
+include_directories(
+{% for path in native.includes %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
+
+# Android studio will index faster when adding all sources into one library.
+{% if native.sources is defined %}
+add_library("chromium"
+{% for path in native.sources %}
+    {{ path }}
+{% endfor %}
+)
+{% endif %}
diff --git a/src/build/android/gradle/dependencies.jinja b/src/build/android/gradle/dependencies.jinja
new file mode 100644
index 0000000..87bc312
--- /dev/null
+++ b/src/build/android/gradle/dependencies.jinja
@@ -0,0 +1,28 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_deps(variables, prefix) %}
+{% if variables is defined %}
+{% if variables.prebuilts is defined %}
+{% for path in variables.prebuilts %}
+    {{ prefix }} files("{{ path }}")
+{% endfor %}
+{% endif %}
+{% if variables.java_project_deps is defined %}
+{% for proj in variables.java_project_deps %}
+    {{ prefix }} project(":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% if variables.android_project_deps is defined %}
+{% for proj in variables.android_project_deps %}
+    {{ prefix }} project(path: ":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+dependencies {
+{{ expand_deps(main, 'implementation') }}
+{{ expand_deps(test, 'testImplementation') }}
+{{ expand_deps(android_test, 'androidTestImplementation') }}
+}
diff --git a/src/build/android/gradle/generate_gradle.py b/src/build/android/gradle/generate_gradle.py
new file mode 100755
index 0000000..80d0b0a
--- /dev/null
+++ b/src/build/android/gradle/generate_gradle.py
@@ -0,0 +1,932 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an Android Studio project from a GN target."""
+
+import argparse
+import codecs
+import collections
+import glob
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+import devil_chromium
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+import jinja_template
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.dirname(_BUILD_ANDROID))
+import gn_helpers
+
+_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
+                                 'depot_tools')
+_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle',
+    'AndroidManifest.xml')
+_FILE_DIR = os.path.dirname(__file__)
+_GENERATED_JAVA_SUBDIR = 'generated_java'
+_JNI_LIBS_SUBDIR = 'symlinked-libs'
+_ARMEABI_SUBDIR = 'armeabi'
+_GRADLE_BUILD_FILE = 'build.gradle'
+_CMAKE_FILE = 'CMakeLists.txt'
+# This needs to come first alphabetically among all modules.
+_MODULE_ALL = '_all'
+_SRC_INTERNAL = os.path.join(
+    os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk'
+
+_DEFAULT_TARGETS = [
+    '//android_webview/test/embedded_test_server:aw_net_test_support_apk',
+    '//android_webview/test:webview_instrumentation_apk',
+    '//android_webview/test:webview_instrumentation_test_apk',
+    '//base:base_junit_tests',
+    '//chrome/android:chrome_junit_tests',
+    '//chrome/android:chrome_public_apk',
+    '//chrome/android:chrome_public_test_apk',
+    '//content/public/android:content_junit_tests',
+    '//content/shell/android:content_shell_apk',
+    # Below must be included even with --all since they are libraries.
+    '//base/android/jni_generator:jni_processor',
+    '//tools/android/errorprone_plugin:errorprone_plugin_java',
+]
+
+_EXCLUDED_PREBUILT_JARS = [
+    # Android Studio already provides Desugar runtime.
+    # Including it would cause linking error because of a duplicate class.
+    'lib.java/third_party/bazel/desugar/Desugar-runtime.jar'
+]
+
+
+def _TemplatePath(name):
+  return os.path.join(_FILE_DIR, '{}.jinja'.format(name))
+
+
+def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
+  """Makes the given path(s) relative to new_cwd, or absolute if not specified.
+
+  If new_cwd is not specified, absolute paths are returned.
+  If old_cwd is not specified, constants.GetOutDirectory() is assumed.
+  """
+  if path_or_list is None:
+    return []
+  if not isinstance(path_or_list, basestring):
+    return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list]
+  if old_cwd is None:
+    old_cwd = constants.GetOutDirectory()
+  old_cwd = os.path.abspath(old_cwd)
+  if new_cwd:
+    new_cwd = os.path.abspath(new_cwd)
+    return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd)
+  return os.path.abspath(os.path.join(old_cwd, path_or_list))
+
+
+def _IsSubpathOf(child, parent):
+  """Returns whether |child| is a subpath of |parent|."""
+  return not os.path.relpath(child, parent).startswith(os.pardir)
+
+
+def _WriteFile(path, data):
+  """Writes |data| to |path|, constucting parent directories if necessary."""
+  logging.info('Writing %s', path)
+  dirname = os.path.dirname(path)
+  if not os.path.exists(dirname):
+    os.makedirs(dirname)
+  with codecs.open(path, 'w', 'utf-8') as output_file:
+    output_file.write(data)
+
+
+def _RunGnGen(output_dir, args=None):
+  cmd = [os.path.join(_DEPOT_TOOLS_PATH, 'gn'), 'gen', output_dir]
+  if args:
+    cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _RunNinja(output_dir, args):
+  # Don't use version within _DEPOT_TOOLS_PATH, since most devs don't use
+  # that one when building.
+  cmd = ['autoninja', '-C', output_dir]
+  cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.check_call(cmd)
+
+
+def _QueryForAllGnTargets(output_dir):
+  cmd = [
+      os.path.join(_BUILD_ANDROID, 'list_java_targets.py'), '--gn-labels',
+      '--nested', '--build', '--output-directory', output_dir
+  ]
+  logging.info('Running: %r', cmd)
+  return subprocess.check_output(cmd).splitlines()
+
+
+class _ProjectEntry(object):
+  """Helper class for project entries."""
+
+  _cached_entries = {}
+
+  def __init__(self, gn_target):
+    # Use _ProjectEntry.FromGnTarget instead for caching.
+    self._gn_target = gn_target
+    self._build_config = None
+    self._java_files = None
+    self._all_entries = None
+    self.android_test_entries = []
+
+  @classmethod
+  def FromGnTarget(cls, gn_target):
+    assert gn_target.startswith('//'), gn_target
+    if ':' not in gn_target:
+      gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target))
+    if gn_target not in cls._cached_entries:
+      cls._cached_entries[gn_target] = cls(gn_target)
+    return cls._cached_entries[gn_target]
+
+  @classmethod
+  def FromBuildConfigPath(cls, path):
+    prefix = 'gen/'
+    suffix = '.build_config'
+    assert path.startswith(prefix) and path.endswith(suffix), path
+    subdir = path[len(prefix):-len(suffix)]
+    gn_target = '//%s:%s' % (os.path.split(subdir))
+    return cls.FromGnTarget(gn_target)
+
+  def __hash__(self):
+    return hash(self._gn_target)
+
+  def __eq__(self, other):
+    return self._gn_target == other.GnTarget()
+
+  def GnTarget(self):
+    return self._gn_target
+
+  def NinjaTarget(self):
+    return self._gn_target[2:]
+
+  def GnBuildConfigTarget(self):
+    return '%s__build_config_crbug_908819' % self._gn_target
+
+  def GradleSubdir(self):
+    """Returns the output subdirectory."""
+    ninja_target = self.NinjaTarget()
+    # Support targets at the root level. e.g. //:foo
+    if ninja_target[0] == ':':
+      ninja_target = ninja_target[1:]
+    return ninja_target.replace(':', os.path.sep)
+
+  def GeneratedJavaSubdir(self):
+    return _RebasePath(
+        os.path.join('gen', self.GradleSubdir(), _GENERATED_JAVA_SUBDIR))
+
+  def ProjectName(self):
+    """Returns the Gradle project name."""
+    return self.GradleSubdir().replace(os.path.sep, '.')
+
+  def BuildConfig(self):
+    """Reads and returns the project's .build_config JSON."""
+    if not self._build_config:
+      path = os.path.join('gen', self.GradleSubdir() + '.build_config')
+      with open(_RebasePath(path)) as jsonfile:
+        self._build_config = json.load(jsonfile)
+    return self._build_config
+
+  def DepsInfo(self):
+    return self.BuildConfig()['deps_info']
+
+  def Gradle(self):
+    return self.BuildConfig()['gradle']
+
+  def Javac(self):
+    return self.BuildConfig()['javac']
+
+  def GetType(self):
+    """Returns the target type from its .build_config."""
+    return self.DepsInfo()['type']
+
+  def IsValid(self):
+    return self.GetType() in (
+        'android_apk',
+        'android_app_bundle_module',
+        'java_library',
+        "java_annotation_processor",
+        'java_binary',
+        'junit_binary',
+    )
+
+  def ResSources(self):
+    return self.DepsInfo().get('lint_resource_sources', [])
+
+  def JavaFiles(self):
+    if self._java_files is None:
+      java_sources_file = self.DepsInfo().get('java_sources_file')
+      java_files = []
+      if java_sources_file:
+        java_sources_file = _RebasePath(java_sources_file)
+        java_files = build_utils.ReadSourcesList(java_sources_file)
+      self._java_files = java_files
+    return self._java_files
+
+  def PrebuiltJars(self):
+    all_jars = self.Gradle().get('dependent_prebuilt_jars', [])
+    return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS]
+
+  def AllEntries(self):
+    """Returns a list of all entries that the current entry depends on.
+
+    This includes the entry itself to make iterating simpler."""
+    if self._all_entries is None:
+      logging.debug('Generating entries for %s', self.GnTarget())
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_android_projects']]
+      deps.extend(_ProjectEntry.FromBuildConfigPath(p)
+          for p in self.Gradle()['dependent_java_projects'])
+      all_entries = set()
+      for dep in deps:
+        all_entries.update(dep.AllEntries())
+      all_entries.add(self)
+      self._all_entries = list(all_entries)
+    return self._all_entries
+
+
+class _ProjectContextGenerator(object):
+  """Helper class to generate gradle build files"""
+  def __init__(self, project_dir, build_vars, use_gradle_process_resources,
+               jinja_processor, split_projects, channel):
+    self.project_dir = project_dir
+    self.build_vars = build_vars
+    self.use_gradle_process_resources = use_gradle_process_resources
+    self.jinja_processor = jinja_processor
+    self.split_projects = split_projects
+    self.channel = channel
+    self.processed_java_dirs = set()
+    self.processed_prebuilts = set()
+    self.processed_res_dirs = set()
+
+  def _GenJniLibs(self, root_entry):
+    libraries = []
+    for entry in self._GetEntries(root_entry):
+      libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
+    if libraries:
+      return _CreateJniLibsDir(constants.GetOutDirectory(),
+          self.EntryOutputDir(root_entry), libraries)
+    return []
+
+  def _GenJavaDirs(self, root_entry):
+    java_files = []
+    for entry in self._GetEntries(root_entry):
+      java_files += entry.JavaFiles()
+    java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes(
+        constants.GetOutDirectory(), java_files)
+    return java_dirs, excludes
+
+  def _GenCustomManifest(self, entry):
+    """Returns the path to the generated AndroidManifest.xml.
+
+    Gradle uses package id from manifest when generating R.class. So, we need
+    to generate a custom manifest if we let gradle process resources. We cannot
+    simply set android.defaultConfig.applicationId because it is not supported
+    for library targets."""
+    resource_packages = entry.Javac().get('resource_packages')
+    if not resource_packages:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'unknown package. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+    elif len(resource_packages) > 1:
+      logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+          'multiple packages. Unable to process with gradle.')
+      return _DEFAULT_ANDROID_MANIFEST_PATH
+
+    variables = {'package': resource_packages[0]}
+    data = self.jinja_processor.Render(_TemplatePath('manifest'), variables)
+    output_file = os.path.join(
+        self.EntryOutputDir(entry), 'AndroidManifest.xml')
+    _WriteFile(output_file, data)
+
+    return output_file
+
+  def _Relativize(self, entry, paths):
+    return _RebasePath(paths, self.EntryOutputDir(entry))
+
+  def _GetEntries(self, entry):
+    if self.split_projects:
+      return [entry]
+    return entry.AllEntries()
+
+  def EntryOutputDir(self, entry):
+    return os.path.join(self.project_dir, entry.GradleSubdir())
+
+  def GeneratedInputs(self, root_entry):
+    generated_inputs = set()
+    for entry in self._GetEntries(root_entry):
+      generated_inputs.update(entry.PrebuiltJars())
+    return generated_inputs
+
+  def GenerateManifest(self, root_entry):
+    android_manifest = root_entry.DepsInfo().get('android_manifest')
+    if not android_manifest:
+      android_manifest = self._GenCustomManifest(root_entry)
+    return self._Relativize(root_entry, android_manifest)
+
+  def Generate(self, root_entry):
+    # TODO(agrieve): Add an option to use interface jars and see if that speeds
+    # things up at all.
+    variables = {}
+    java_dirs, excludes = self._GenJavaDirs(root_entry)
+    java_dirs.extend(
+        e.GeneratedJavaSubdir() for e in self._GetEntries(root_entry))
+    self.processed_java_dirs.update(java_dirs)
+    java_dirs.sort()
+    variables['java_dirs'] = self._Relativize(root_entry, java_dirs)
+    variables['java_excludes'] = excludes
+    variables['jni_libs'] = self._Relativize(
+        root_entry, set(self._GenJniLibs(root_entry)))
+    prebuilts = set(
+        p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
+    self.processed_prebuilts.update(prebuilts)
+    variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
+    res_sources_files = _RebasePath(
+        set(p for e in self._GetEntries(root_entry) for p in e.ResSources()))
+    res_sources = []
+    for res_sources_file in res_sources_files:
+      res_sources.extend(build_utils.ReadSourcesList(res_sources_file))
+    res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources)
+    # Do not add generated resources for the all module since it creates many
+    # duplicates, and currently resources are only used for editing.
+    self.processed_res_dirs.update(res_dirs)
+    variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
+    if self.split_projects:
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_android_projects']]
+      variables['android_project_deps'] = [d.ProjectName() for d in deps]
+      deps = [_ProjectEntry.FromBuildConfigPath(p)
+              for p in root_entry.Gradle()['dependent_java_projects']]
+      variables['java_project_deps'] = [d.ProjectName() for d in deps]
+    return variables
+
+
+def _ComputeJavaSourceDirs(java_files):
+  """Returns a dictionary of source dirs with each given files in one."""
+  found_roots = {}
+  for path in java_files:
+    path_root = path
+    # Recognize these tokens as top-level.
+    while True:
+      path_root = os.path.dirname(path_root)
+      basename = os.path.basename(path_root)
+      assert basename, 'Failed to find source dir for ' + path
+      if basename in ('java', 'src'):
+        break
+      if basename in ('javax', 'org', 'com'):
+        path_root = os.path.dirname(path_root)
+        break
+    if path_root not in found_roots:
+      found_roots[path_root] = []
+    found_roots[path_root].append(path)
+  return found_roots
+
+
+def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
+  """Returns exclude patters to exclude unwanted files but keep wanted files.
+
+  - Shortens exclude list by globbing if possible.
+  - Exclude patterns are relative paths from the parent directory.
+  """
+  excludes = []
+  files_to_include = set(wanted_files)
+  files_to_exclude = set(unwanted_files)
+  while files_to_exclude:
+    unwanted_file = files_to_exclude.pop()
+    target_exclude = os.path.join(
+        os.path.dirname(unwanted_file), '*.java')
+    found_files = set(glob.glob(target_exclude))
+    valid_files = found_files & files_to_include
+    if valid_files:
+      excludes.append(os.path.relpath(unwanted_file, parent_dir))
+    else:
+      excludes.append(os.path.relpath(target_exclude, parent_dir))
+      files_to_exclude -= found_files
+  return excludes
+
+
+def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+  """Computes the list of java source directories and exclude patterns.
+
+  1. Computes the root java source directories from the list of files.
+  2. Compute exclude patterns that exclude all extra files only.
+  3. Returns the list of java source directories and exclude patterns.
+  """
+  java_dirs = []
+  excludes = []
+  if java_files:
+    java_files = _RebasePath(java_files)
+    computed_dirs = _ComputeJavaSourceDirs(java_files)
+    java_dirs = computed_dirs.keys()
+    all_found_java_files = set()
+
+    for directory, files in computed_dirs.iteritems():
+      found_java_files = build_utils.FindInDirectory(directory, '*.java')
+      all_found_java_files.update(found_java_files)
+      unwanted_java_files = set(found_java_files) - set(files)
+      if unwanted_java_files:
+        logging.debug('Directory requires excludes: %s', directory)
+        excludes.extend(
+            _ComputeExcludeFilters(files, unwanted_java_files, directory))
+
+    missing_java_files = set(java_files) - all_found_java_files
+    # Warn only about non-generated files that are missing.
+    missing_java_files = [p for p in missing_java_files
+                          if not p.startswith(output_dir)]
+    if missing_java_files:
+      logging.warning(
+          'Some java files were not found: %s', missing_java_files)
+
+  return java_dirs, excludes
+
+
+def _CreateRelativeSymlink(target_path, link_path):
+  link_dir = os.path.dirname(link_path)
+  relpath = os.path.relpath(target_path, link_dir)
+  logging.debug('Creating symlink %s -> %s', link_path, relpath)
+  os.symlink(relpath, link_path)
+
+
+def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
+  """Creates directory with symlinked .so files if necessary.
+
+  Returns list of JNI libs directories."""
+
+  if so_files:
+    symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR)
+    shutil.rmtree(symlink_dir, True)
+    abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR)
+    if not os.path.exists(abi_dir):
+      os.makedirs(abi_dir)
+    for so_file in so_files:
+      target_path = os.path.join(output_dir, so_file)
+      symlinked_path = os.path.join(abi_dir, so_file)
+      _CreateRelativeSymlink(target_path, symlinked_path)
+
+    return [symlink_dir]
+
+  return []
+
+
+def _GenerateLocalProperties(sdk_dir):
+  """Returns the data for local.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      'sdk.dir=%s' % sdk_dir,
+      '',
+  ])
+
+
+def _GenerateGradleWrapperPropertiesCanary():
+  """Returns the data for gradle-wrapper.properties as a string."""
+  # Before May 2020, this wasn't necessary. Might not be necessary at some point
+  # in the future?
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      ('distributionUrl=https\\://services.gradle.org/distributions/'
+       'gradle-6.5-rc-1-all.zip\n'),
+      '',
+  ])
+
+
+def _GenerateGradleProperties():
+  """Returns the data for gradle.properties as a string."""
+  return '\n'.join([
+      '# Generated by //build/android/gradle/generate_gradle.py',
+      '',
+      '# Tells Gradle to show warnings during project sync.',
+      'org.gradle.warning.mode=all',
+      '',
+  ])
+
+
+def _GenerateBaseVars(generator, build_vars):
+  variables = {}
+  variables['compile_sdk_version'] = (
+      'android-%s' % build_vars['compile_sdk_version'])
+  target_sdk_version = build_vars['android_sdk_version']
+  if target_sdk_version.isalpha():
+    target_sdk_version = '"{}"'.format(target_sdk_version)
+  variables['target_sdk_version'] = target_sdk_version
+  variables['use_gradle_process_resources'] = (
+      generator.use_gradle_process_resources)
+  variables['channel'] = generator.channel
+  return variables
+
+
+def _GenerateGradleFile(entry, generator, build_vars, jinja_processor):
+  """Returns the data for a project's build.gradle."""
+  deps_info = entry.DepsInfo()
+  variables = _GenerateBaseVars(generator, build_vars)
+  sourceSetName = 'main'
+
+  if deps_info['type'] == 'android_apk':
+    target_type = 'android_apk'
+  elif deps_info['type'] in ('java_library', 'java_annotation_processor'):
+    is_prebuilt = deps_info.get('is_prebuilt', False)
+    gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False)
+    if is_prebuilt or gradle_treat_as_prebuilt:
+      return None
+    elif deps_info['requires_android']:
+      target_type = 'android_library'
+    else:
+      target_type = 'java_library'
+  elif deps_info['type'] == 'java_binary':
+    target_type = 'java_binary'
+    variables['main_class'] = deps_info.get('main_class')
+  elif deps_info['type'] == 'junit_binary':
+    target_type = 'android_junit'
+    sourceSetName = 'test'
+  else:
+    return None
+
+  variables['target_name'] = os.path.splitext(deps_info['name'])[0]
+  variables['template_type'] = target_type
+  variables['main'] = {}
+  variables[sourceSetName] = generator.Generate(entry)
+  variables['main']['android_manifest'] = generator.GenerateManifest(entry)
+
+  if entry.android_test_entries:
+    variables['android_test'] = []
+    for e in entry.android_test_entries:
+      test_entry = generator.Generate(e)
+      test_entry['android_manifest'] = generator.GenerateManifest(e)
+      variables['android_test'].append(test_entry)
+      for key, value in test_entry.iteritems():
+        if isinstance(value, list):
+          test_entry[key] = sorted(set(value) - set(variables['main'][key]))
+
+  return jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+
+
+# Example: //chrome/android:monochrome
+def _GetNative(relative_func, target_names):
+  """Returns an object containing native c++ sources list and its included path
+
+  Iterate through all target_names and their deps to get the list of included
+  paths and sources."""
+  out_dir = constants.GetOutDirectory()
+  with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
+    projects = json.load(project_file)
+  project_targets = projects['targets']
+  root_dir = projects['build_settings']['root_path']
+  includes = set()
+  processed_target = set()
+  targets_stack = list(target_names)
+  sources = []
+
+  while targets_stack:
+    target_name = targets_stack.pop()
+    if target_name in processed_target:
+      continue
+    processed_target.add(target_name)
+    target = project_targets[target_name]
+    includes.update(target.get('include_dirs', []))
+    targets_stack.extend(target.get('deps', []))
+    # Ignore generated files
+    sources.extend(f for f in target.get('sources', [])
+                   if f.endswith('.cc') and not f.startswith('//out'))
+
+  def process_paths(paths):
+    # Ignores leading //
+    return relative_func(
+        sorted(os.path.join(root_dir, path[2:]) for path in paths))
+
+  return {
+      'sources': process_paths(sources),
+      'includes': process_paths(includes),
+  }
+
+
+def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
+                       jinja_processor, native_targets):
+  """Returns the data for a pseudo build.gradle of all dirs.
+
+  See //docs/android_studio.md for more details."""
+  variables = _GenerateBaseVars(generator, build_vars)
+  target_type = 'android_apk'
+  variables['target_name'] = _MODULE_ALL
+  variables['template_type'] = target_type
+  java_dirs = sorted(generator.processed_java_dirs)
+  prebuilts = sorted(generator.processed_prebuilts)
+  res_dirs = sorted(generator.processed_res_dirs)
+  def Relativize(paths):
+    return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL))
+
+  # As after clank modularization, the java and javatests code will live side by
+  # side in the same module, we will list both of them in the main target here.
+  main_java_dirs = [d for d in java_dirs if 'junit/' not in d]
+  junit_test_java_dirs = [d for d in java_dirs if 'junit/' in d]
+  variables['main'] = {
+      'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
+      'java_dirs': Relativize(main_java_dirs),
+      'prebuilts': Relativize(prebuilts),
+      'java_excludes': ['**/*.java'],
+      'res_dirs': Relativize(res_dirs),
+  }
+  variables['android_test'] = [{
+      'java_dirs': Relativize(junit_test_java_dirs),
+      'java_excludes': ['**/*.java'],
+  }]
+  if native_targets:
+    variables['native'] = _GetNative(
+        relative_func=Relativize, target_names=native_targets)
+  data = jinja_processor.Render(
+      _TemplatePath(target_type.split('_')[0]), variables)
+  _WriteFile(
+      os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data)
+  if native_targets:
+    cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables)
+    _WriteFile(
+        os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
+
+
+def _GenerateRootGradle(jinja_processor, channel):
+  """Returns the data for the root project's build.gradle."""
+  return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+
+
+def _GenerateSettingsGradle(project_entries):
+  """Returns the data for settings.gradle."""
+  project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT))
+  lines = []
+  lines.append('// Generated by //build/android/gradle/generate_gradle.py')
+  lines.append('rootProject.name = "%s"' % project_name)
+  lines.append('rootProject.projectDir = settingsDir')
+  lines.append('')
+  for name, subdir in project_entries:
+    # Example target:
+    # android_webview:android_webview_java__build_config_crbug_908819
+    lines.append('include ":%s"' % name)
+    lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' %
+                 (name, subdir))
+  return '\n'.join(lines)
+
+
+def _FindAllProjectEntries(main_entries):
+  """Returns the list of all _ProjectEntry instances given the root project."""
+  found = set()
+  to_scan = list(main_entries)
+  while to_scan:
+    cur_entry = to_scan.pop()
+    if cur_entry in found:
+      continue
+    found.add(cur_entry)
+    sub_config_paths = cur_entry.DepsInfo()['deps_configs']
+    to_scan.extend(
+        _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths)
+  return list(found)
+
+
+def _CombineTestEntries(entries):
+  """Combines test apks into the androidTest source set of their target.
+
+  - Speeds up android studio
+  - Adds proper dependency between test and apk_under_test
+  - Doesn't work for junit yet due to resulting circular dependencies
+    - e.g. base_junit_tests > base_junit_test_support > base_java
+  """
+  combined_entries = []
+  android_test_entries = collections.defaultdict(list)
+  for entry in entries:
+    target_name = entry.GnTarget()
+    if (target_name.endswith(_INSTRUMENTATION_TARGET_SUFFIX)
+        and 'apk_under_test' in entry.Gradle()):
+      apk_name = entry.Gradle()['apk_under_test']
+      android_test_entries[apk_name].append(entry)
+    else:
+      combined_entries.append(entry)
+  for entry in combined_entries:
+    target_name = entry.DepsInfo()['name']
+    if target_name in android_test_entries:
+      entry.android_test_entries = android_test_entries[target_name]
+      del android_test_entries[target_name]
+  # Add unmatched test entries as individual targets.
+  combined_entries.extend(e for l in android_test_entries.values() for e in l)
+  return combined_entries
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('-v',
+                      '--verbose',
+                      dest='verbose_count',
+                      default=0,
+                      action='count',
+                      help='Verbose level')
+  parser.add_argument('--target',
+                      dest='targets',
+                      action='append',
+                      help='GN target to generate project for. Replaces set of '
+                           'default targets. May be repeated.')
+  parser.add_argument('--extra-target',
+                      dest='extra_targets',
+                      action='append',
+                      help='GN target to generate project for, in addition to '
+                           'the default ones. May be repeated.')
+  parser.add_argument('--project-dir',
+                      help='Root of the output project.',
+                      default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
+  parser.add_argument('--all',
+                      action='store_true',
+                      help='Include all .java files reachable from any '
+                           'apk/test/binary target. On by default unless '
+                           '--split-projects is used (--split-projects can '
+                           'slow down Studio given too many targets).')
+  parser.add_argument('--use-gradle-process-resources',
+                      action='store_true',
+                      help='Have gradle generate R.java rather than ninja')
+  parser.add_argument('--split-projects',
+                      action='store_true',
+                      help='Split projects by their gn deps rather than '
+                           'combining all the dependencies of each target')
+  parser.add_argument('--native-target',
+                      dest='native_targets',
+                      action='append',
+                      help='GN native targets to generate for. May be '
+                           'repeated.')
+  parser.add_argument('--compile-sdk-version',
+                      type=int,
+                      default=0,
+                      help='Override compileSdkVersion for android sdk docs. '
+                           'Useful when sources for android_sdk_version is '
+                           'not available in Android Studio.')
+  parser.add_argument(
+      '--sdk-path',
+      default=os.path.expanduser('~/Android/Sdk'),
+      help='The path to use as the SDK root, overrides the '
+      'default at ~/Android/Sdk.')
+  version_group = parser.add_mutually_exclusive_group()
+  version_group.add_argument('--beta',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Beta.')
+  version_group.add_argument('--canary',
+                      action='store_true',
+                      help='Generate a project that is compatible with '
+                           'Android Studio Canary.')
+  args = parser.parse_args()
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  constants.CheckOutputDirectory()
+  output_dir = constants.GetOutDirectory()
+  devil_chromium.Initialize(output_directory=output_dir)
+  run_tests_helper.SetLogLevel(args.verbose_count)
+
+  if args.use_gradle_process_resources:
+    assert args.split_projects, (
+        'Gradle resources does not work without --split-projects.')
+
+  _gradle_output_dir = os.path.abspath(
+      args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
+  logging.warning('Creating project at: %s', _gradle_output_dir)
+
+  # Generate for "all targets" by default when not using --split-projects (too
+  # slow), and when no --target has been explicitly set. "all targets" means all
+  # java targets that are depended on by an apk or java_binary (leaf
+  # java_library targets will not be included).
+  args.all = args.all or (not args.split_projects and not args.targets)
+
+  targets_from_args = set(args.targets or _DEFAULT_TARGETS)
+  if args.extra_targets:
+    targets_from_args.update(args.extra_targets)
+
+  if args.all:
+    if args.native_targets:
+      _RunGnGen(output_dir, ['--ide=json'])
+    elif not os.path.exists(os.path.join(output_dir, 'build.ninja')):
+      _RunGnGen(output_dir)
+    else:
+      # Faster than running "gn gen" in the no-op case.
+      _RunNinja(output_dir, ['build.ninja'])
+    # Query ninja for all __build_config_crbug_908819 targets.
+    targets = _QueryForAllGnTargets(output_dir)
+  else:
+    assert not args.native_targets, 'Native editing requires --all.'
+    targets = [
+        re.sub(r'_test_apk$', _INSTRUMENTATION_TARGET_SUFFIX, t)
+        for t in targets_from_args
+    ]
+    # Necessary after "gn clean"
+    if not os.path.exists(
+        os.path.join(output_dir, gn_helpers.BUILD_VARS_FILENAME)):
+      _RunGnGen(output_dir)
+
+  build_vars = gn_helpers.ReadBuildVars(output_dir)
+  jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
+  if args.beta:
+    channel = 'beta'
+  elif args.canary:
+    channel = 'canary'
+  else:
+    channel = 'stable'
+  if args.compile_sdk_version:
+    build_vars['compile_sdk_version'] = args.compile_sdk_version
+  else:
+    build_vars['compile_sdk_version'] = build_vars['android_sdk_version']
+  generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
+      args.use_gradle_process_resources, jinja_processor, args.split_projects,
+      channel)
+
+  main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
+
+  if args.all:
+    # There are many unused libraries, so restrict to those that are actually
+    # used by apks/bundles/binaries/tests or that are explicitly mentioned in
+    # --targets.
+    BASE_TYPES = ('android_apk', 'android_app_bundle_module', 'java_binary',
+                  'junit_binary')
+    main_entries = [
+        e for e in main_entries
+        if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args
+            or e.GnTarget().endswith(_INSTRUMENTATION_TARGET_SUFFIX))
+    ]
+
+  if args.split_projects:
+    main_entries = _FindAllProjectEntries(main_entries)
+
+  logging.info('Generating for %d targets.', len(main_entries))
+
+  entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
+  logging.info('Creating %d projects for targets.', len(entries))
+
+  logging.warning('Writing .gradle files...')
+  project_entries = []
+  # When only one entry will be generated we want it to have a valid
+  # build.gradle file with its own AndroidManifest.
+  for entry in entries:
+    data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor)
+    if data and not args.all:
+      project_entries.append((entry.ProjectName(), entry.GradleSubdir()))
+      _WriteFile(
+          os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE),
+          data)
+  if args.all:
+    project_entries.append((_MODULE_ALL, _MODULE_ALL))
+    _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
+                       jinja_processor, args.native_targets)
+
+  _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
+             _GenerateRootGradle(jinja_processor, channel))
+
+  _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
+             _GenerateSettingsGradle(project_entries))
+
+  # Ensure the Android Studio sdk is correctly initialized.
+  if not os.path.exists(args.sdk_path):
+    # Help first-time users avoid Android Studio forcibly changing back to
+    # the previous default due to not finding a valid sdk under this dir.
+    shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path)
+  _WriteFile(
+      os.path.join(generator.project_dir, 'local.properties'),
+      _GenerateLocalProperties(args.sdk_path))
+  _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'),
+             _GenerateGradleProperties())
+
+  wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper',
+                                    'gradle-wrapper.properties')
+  if os.path.exists(wrapper_properties):
+    os.unlink(wrapper_properties)
+  if args.canary:
+    _WriteFile(wrapper_properties, _GenerateGradleWrapperPropertiesCanary())
+
+  generated_inputs = set()
+  for entry in entries:
+    entries_to_gen = [entry]
+    entries_to_gen.extend(entry.android_test_entries)
+    for entry_to_gen in entries_to_gen:
+      # Build all paths references by .gradle that exist within output_dir.
+      generated_inputs.update(generator.GeneratedInputs(entry_to_gen))
+  if generated_inputs:
+    targets = _RebasePath(generated_inputs, output_dir)
+    _RunNinja(output_dir, targets)
+
+  logging.warning('Generated files will only appear once you\'ve built them.')
+  logging.warning('Generated projects for Android Studio %s', channel)
+  logging.warning('For more tips: https://chromium.googlesource.com/chromium'
+                  '/src.git/+/master/docs/android_studio.md')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gradle/gn_to_cmake.py b/src/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 0000000..d3e80ae
--- /dev/null
+++ b/src/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,689 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+from __future__ import print_function
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+  """Escapes the string 'a' for use inside a CMake string.
+
+  This means escaping
+  '\' otherwise it may be seen as modifying the next character
+  '"' otherwise it will end the string
+  ';' otherwise the string becomes a list
+
+  The following do not need to be escaped
+  '#' when the lexer is in string state, this does not start a comment
+  """
+  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+  """Escapes the string 'a' for use as a CMake target name.
+
+  CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+  The ':' is only allowed for imported targets.
+  """
+  def Escape(c):
+    if c in string.ascii_letters or c in string.digits or c in '_.+-':
+      return c
+    else:
+      return '__'
+  return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+  """Sets a CMake variable."""
+  out.write('set("')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('" "')
+  out.write(CMakeStringEscape(value))
+  out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+  """Sets a CMake variable to a list."""
+  if not values:
+    return SetVariable(out, variable_name, "")
+  if len(values) == 1:
+    return SetVariable(out, variable_name, values[0])
+  out.write('list(APPEND "')
+  out.write(CMakeStringEscape(variable_name))
+  out.write('"\n  "')
+  out.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
+  out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+  """Given a set of source files, sets the given property on them."""
+  output.write('set_source_files_properties(')
+  WriteVariable(output, variable)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+  """Given a target, sets the given property."""
+  out.write('set_target_properties("${target}" PROPERTIES ')
+  out.write(property_name)
+  out.write(' "')
+  for value in values:
+    out.write(CMakeStringEscape(value))
+    out.write(sep)
+  out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+  if prepend:
+    output.write(prepend)
+  output.write('${')
+  output.write(variable_name)
+  output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+  '.cc': 'cxx',
+  '.cpp': 'cxx',
+  '.cxx': 'cxx',
+  '.c': 'c',
+  '.s': 'asm',
+  '.S': 'asm',
+  '.asm': 'asm',
+  '.o': 'obj',
+  '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+  def __init__(self, command, modifier, property_modifier, is_linkable):
+    self.command = command
+    self.modifier = modifier
+    self.property_modifier = property_modifier
+    self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+                                         None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+  'unknown': CMakeTargetType.custom,
+  'group': CMakeTargetType.custom,
+  'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+  'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+  'copy': CMakeTargetType.custom,
+  'action': CMakeTargetType.custom,
+  'action_foreach': CMakeTargetType.custom,
+  'bundle_data': CMakeTargetType.custom,
+  'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+  return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+  # See <chromium>/src/tools/gn/label.cc#Resolve
+  # //base/test:test_support(//build/toolchain/win:msvc)
+  path_separator = FindFirstOf(gn_target_name, (':', '('))
+  location = None
+  name = None
+  toolchain = None
+  if not path_separator:
+    location = gn_target_name[2:]
+  else:
+    location = gn_target_name[2:path_separator]
+    toolchain_separator = gn_target_name.find('(', path_separator)
+    if toolchain_separator == -1:
+      name = gn_target_name[path_separator + 1:]
+    else:
+      if toolchain_separator > path_separator:
+        name = gn_target_name[path_separator + 1:toolchain_separator]
+      assert gn_target_name.endswith(')')
+      toolchain = gn_target_name[toolchain_separator + 1:-1]
+  assert location or name
+
+  cmake_target_name = None
+  if location.endswith('/' + name):
+    cmake_target_name = location
+  elif location:
+    cmake_target_name = location + '_' + name
+  else:
+    cmake_target_name = name
+  if toolchain:
+    cmake_target_name += '--' + toolchain
+  return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+  def __init__(self, project_json):
+    self.targets = project_json['targets']
+    build_settings = project_json['build_settings']
+    self.root_path = build_settings['root_path']
+    self.build_path = posixpath.join(self.root_path,
+                                     build_settings['build_dir'][2:])
+    self.object_source_deps = {}
+
+  def GetAbsolutePath(self, path):
+    if path.startswith("//"):
+      return self.root_path + "/" + path[2:]
+    else:
+      return path
+
+  def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose sources have not been absorbed."""
+    if gn_target_name in self.object_source_deps:
+      object_dependencies.update(self.object_source_deps[gn_target_name])
+      return
+    target_deps = set()
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        target_deps.add(dependency)
+      if dependency_type not in gn_target_types_that_absorb_objects:
+        self.GetObjectSourceDependencies(dependency, target_deps)
+    self.object_source_deps[gn_target_name] = target_deps
+    object_dependencies.update(target_deps)
+
+  def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+    """All OBJECT libraries whose libraries have not been absorbed."""
+    dependencies = self.targets[gn_target_name].get('deps', [])
+    for dependency in dependencies:
+      dependency_type = self.targets[dependency].get('type', None)
+      if dependency_type == 'source_set':
+        object_dependencies.add(dependency)
+        self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+  def __init__(self, gn_target_name, project):
+    self.gn_name = gn_target_name
+    self.properties = project.targets[self.gn_name]
+    self.cmake_name = GetCMakeTargetName(self.gn_name)
+    self.gn_type = self.properties.get('type', None)
+    self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+  outputs = []
+  output_directories = set()
+  for output in target.properties.get('outputs', []):
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+    output_directory = posixpath.dirname(output_abs_path)
+    if output_directory:
+      output_directories.add(output_directory)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  if output_directories:
+    out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+    out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+    out.write('"\n')
+
+  script = target.properties['script']
+  arguments = target.properties['args']
+  out.write('  COMMAND python "')
+  out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+  out.write('"')
+  if arguments:
+    out.write('\n    "')
+    out.write('"\n    "'.join([CMakeStringEscape(a) for a in arguments]))
+    out.write('"')
+  out.write('\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  #TODO: CMake 3.7 is introducing DEPFILE
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Action: ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+  source_dir, source_file_part = posixpath.split(source)
+  source_name_part, _ = posixpath.splitext(source_file_part)
+  #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+  return a.replace('{{source}}', source) \
+          .replace('{{source_file_part}}', source_file_part) \
+          .replace('{{source_name_part}}', source_name_part) \
+          .replace('{{source_dir}}', source_dir) \
+          .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+  all_outputs = target.properties.get('outputs', [])
+  inputs = target.properties.get('sources', [])
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs_per_input = len(all_outputs) / len(inputs)
+  for count, source in enumerate(inputs):
+    source_abs_path = project.GetAbsolutePath(source)
+
+    outputs = []
+    output_directories = set()
+    for output in all_outputs[outputs_per_input *  count:
+                              outputs_per_input * (count+1)]:
+      output_abs_path = project.GetAbsolutePath(output)
+      outputs.append(output_abs_path)
+      output_directory = posixpath.dirname(output_abs_path)
+      if output_directory:
+        output_directories.add(output_directory)
+    outputs_name = '${target}__output_' + str(count)
+    SetVariableList(out, outputs_name, outputs)
+
+    out.write('add_custom_command(OUTPUT ')
+    WriteVariable(out, outputs_name)
+    out.write('\n')
+
+    if output_directories:
+      out.write('  COMMAND ${CMAKE_COMMAND} -E make_directory "')
+      out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+      out.write('"\n')
+
+    script = target.properties['script']
+    # TODO: need to expand {{xxx}} in arguments
+    arguments = target.properties['args']
+    out.write('  COMMAND python "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+    out.write('"')
+    if arguments:
+      out.write('\n    "')
+      expand = functools.partial(ExpandPlaceholders, source_abs_path)
+      out.write('"\n    "'.join(
+          [CMakeStringEscape(expand(a)) for a in arguments]))
+      out.write('"')
+    out.write('\n')
+
+    out.write('  DEPENDS')
+    if 'input' in sources:
+      WriteVariable(out, sources['input'], ' ')
+    out.write(' "')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    #TODO: CMake 3.7 is introducing DEPFILE
+
+    out.write('  WORKING_DIRECTORY "')
+    out.write(CMakeStringEscape(project.build_path))
+    out.write('"\n')
+
+    out.write('  COMMENT "Action ${target} on ')
+    out.write(CMakeStringEscape(source_abs_path))
+    out.write('"\n')
+
+    out.write('  VERBATIM)\n')
+
+    synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+  inputs = target.properties.get('sources', [])
+  raw_outputs = target.properties.get('outputs', [])
+
+  # TODO: consider expanding 'output_patterns' instead.
+  outputs = []
+  for output in raw_outputs:
+    output_abs_path = project.GetAbsolutePath(output)
+    outputs.append(output_abs_path)
+  outputs_name = '${target}__output'
+  SetVariableList(out, outputs_name, outputs)
+
+  out.write('add_custom_command(OUTPUT ')
+  WriteVariable(out, outputs_name)
+  out.write('\n')
+
+  for src, dst in zip(inputs, outputs):
+    out.write('  COMMAND ${CMAKE_COMMAND} -E copy "')
+    out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+    out.write('" "')
+    out.write(CMakeStringEscape(dst))
+    out.write('"\n')
+
+  out.write('  DEPENDS ')
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  out.write('\n')
+
+  out.write('  WORKING_DIRECTORY "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('"\n')
+
+  out.write('  COMMENT "Copy ${target}"\n')
+
+  out.write('  VERBATIM)\n')
+
+  synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+  # Hack, set linker language to c if no c or cxx files present.
+  if not 'c' in sources and not 'cxx' in sources:
+    SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+  # Mark uncompiled sources as uncompiled.
+  if 'input' in sources:
+    SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+  if 'other' in sources:
+    SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+  # Mark object sources as linkable.
+  if 'obj' in sources:
+    SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+  # TODO: 'output_name', 'output_dir', 'output_extension'
+  # This includes using 'source_outputs' to direct compiler output.
+
+  # Includes
+  includes = target.properties.get('include_dirs', [])
+  if includes:
+    out.write('set_property(TARGET "${target}" ')
+    out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+    for include_dir in includes:
+      out.write('\n  "')
+      out.write(project.GetAbsolutePath(include_dir))
+      out.write('"')
+    out.write(')\n')
+
+  # Defines
+  defines = target.properties.get('defines', [])
+  if defines:
+    SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+  # Compile flags
+  # "arflags", "asmflags", "cflags",
+  # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+  # CMake does not have per target lang compile flags.
+  # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+  #       http://public.kitware.com/Bug/view.php?id=14857
+  flags = []
+  flags.extend(target.properties.get('cflags', []))
+  cflags_asm = target.properties.get('asmflags', [])
+  cflags_c = target.properties.get('cflags_c', [])
+  cflags_cxx = target.properties.get('cflags_cc', [])
+  if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+    flags.extend(cflags_c)
+  elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+    flags.extend(cflags_cxx)
+  else:
+    # TODO: This is broken, one cannot generally set properties on files,
+    # as other targets may require different properties on the same files.
+    if 'asm' in sources and cflags_asm:
+      SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+    if 'c' in sources and cflags_c:
+      SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+    if 'cxx' in sources and cflags_cxx:
+      SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+  if flags:
+    SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+  # Linker flags
+  ldflags = target.properties.get('ldflags', [])
+  if ldflags:
+    SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+  'executable',
+  'loadable_module',
+  'shared_library',
+  'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+  # gn separates the sheep from the goats based on file extensions.
+  # A full separation is done here because of flag handing (see Compile flags).
+  source_types = {'cxx':[], 'c':[], 'asm':[],
+                  'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+  # TODO .def files on Windows
+  for source in target.properties.get('sources', []):
+    _, ext = posixpath.splitext(source)
+    source_abs_path = project.GetAbsolutePath(source)
+    source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+  for input_path in target.properties.get('inputs', []):
+    input_abs_path = project.GetAbsolutePath(input_path)
+    source_types['input'].append(input_abs_path)
+
+  # OBJECT library dependencies need to be listed as sources.
+  # Only executables and non-OBJECT libraries may reference an OBJECT library.
+  # https://gitlab.kitware.com/cmake/cmake/issues/14778
+  if target.gn_type in gn_target_types_that_absorb_objects:
+    object_dependencies = set()
+    project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+    for dependency in object_dependencies:
+      cmake_dependency_name = GetCMakeTargetName(dependency)
+      obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+      source_types['obj_target'].append(obj_target_sources)
+
+  sources = {}
+  for source_type, sources_of_type in source_types.items():
+    if sources_of_type:
+      sources[source_type] = '${target}__' + source_type + '_srcs'
+      SetVariableList(out, sources[source_type], sources_of_type)
+  return sources
+
+
+def WriteTarget(out, target, project):
+  out.write('\n#')
+  out.write(target.gn_name)
+  out.write('\n')
+
+  if target.cmake_type is None:
+    print('Target {} has unknown target type {}, skipping.'.format(
+        target.gn_name, target.gn_type))
+    return
+
+  SetVariable(out, 'target', target.cmake_name)
+
+  sources = WriteSourceVariables(out, target, project)
+
+  synthetic_dependencies = set()
+  if target.gn_type == 'action':
+    WriteAction(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'action_foreach':
+    WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+  if target.gn_type == 'copy':
+    WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+  out.write(target.cmake_type.command)
+  out.write('("${target}"')
+  if target.cmake_type.modifier is not None:
+    out.write(' ')
+    out.write(target.cmake_type.modifier)
+  for sources_type_name in sources.values():
+    WriteVariable(out, sources_type_name, ' ')
+  if synthetic_dependencies:
+    out.write(' DEPENDS')
+    for synthetic_dependencie in synthetic_dependencies:
+      WriteVariable(out, synthetic_dependencie, ' ')
+  out.write(')\n')
+
+  if target.cmake_type.command != 'add_custom_target':
+    WriteCompilerFlags(out, target, project, sources)
+
+  libraries = set()
+  nonlibraries = set()
+
+  dependencies = set(target.properties.get('deps', []))
+  # Transitive OBJECT libraries are in sources.
+  # Those sources are dependent on the OBJECT library dependencies.
+  # Those sources cannot bring in library dependencies.
+  object_dependencies = set()
+  if target.gn_type != 'source_set':
+    project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+  for object_dependency in object_dependencies:
+    dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+  for dependency in dependencies:
+    gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+    cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+    cmake_dependency_name = GetCMakeTargetName(dependency)
+    if cmake_dependency_type.command != 'add_library':
+      nonlibraries.add(cmake_dependency_name)
+    elif cmake_dependency_type.modifier != 'OBJECT':
+      if target.cmake_type.is_linkable:
+        libraries.add(cmake_dependency_name)
+      else:
+        nonlibraries.add(cmake_dependency_name)
+
+  # Non-library dependencies.
+  if nonlibraries:
+    out.write('add_dependencies("${target}"')
+    for nonlibrary in nonlibraries:
+      out.write('\n  "')
+      out.write(nonlibrary)
+      out.write('"')
+    out.write(')\n')
+
+  # Non-OBJECT library dependencies.
+  external_libraries = target.properties.get('libs', [])
+  if target.cmake_type.is_linkable and (external_libraries or libraries):
+    library_dirs = target.properties.get('lib_dirs', [])
+    if library_dirs:
+      SetVariableList(out, '${target}__library_directories', library_dirs)
+
+    system_libraries = []
+    for external_library in external_libraries:
+      if '/' in external_library:
+        libraries.add(project.GetAbsolutePath(external_library))
+      else:
+        if external_library.endswith('.framework'):
+          external_library = external_library[:-len('.framework')]
+        system_library = 'library__' + external_library
+        if library_dirs:
+          system_library = system_library + '__for_${target}'
+        out.write('find_library("')
+        out.write(CMakeStringEscape(system_library))
+        out.write('" "')
+        out.write(CMakeStringEscape(external_library))
+        out.write('"')
+        if library_dirs:
+          out.write(' PATHS "')
+          WriteVariable(out, '${target}__library_directories')
+          out.write('"')
+        out.write(')\n')
+        system_libraries.append(system_library)
+    out.write('target_link_libraries("${target}"')
+    for library in libraries:
+      out.write('\n  "')
+      out.write(CMakeStringEscape(library))
+      out.write('"')
+    for system_library in system_libraries:
+      WriteVariable(out, system_library, '\n  "')
+      out.write('"')
+    out.write(')\n')
+
+
+def WriteProject(project):
+  out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+  # Update the gn generated ninja build.
+  # If a build file has changed, this will update CMakeLists.ext if
+  # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+  # style was used to create this config.
+  out.write('execute_process(COMMAND ninja -C "')
+  out.write(CMakeStringEscape(project.build_path))
+  out.write('" build.ninja)\n')
+
+  out.write('include(CMakeLists.ext)\n')
+  out.close()
+
+  out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+  out.write('# Generated by gn_to_cmake.py.\n')
+  out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  out.write('cmake_policy(VERSION 2.8.8)\n')
+
+  # The following appears to be as-yet undocumented.
+  # http://public.kitware.com/Bug/view.php?id=8392
+  out.write('enable_language(ASM)\n\n')
+  # ASM-ATT does not support .S files.
+  # output.write('enable_language(ASM-ATT)\n')
+
+  # Current issues with automatic re-generation:
+  # The gn generated build.ninja target uses build.ninja.d
+  #   but build.ninja.d does not contain the ide or gn.
+  # Currently the ide is not run if the project.json file is not changed
+  #   but the ide needs to be run anyway if it has itself changed.
+  #   This can be worked around by deleting the project.json file.
+  out.write('file(READ "')
+  gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+  out.write(CMakeStringEscape(gn_deps_file))
+  out.write('" "gn_deps_string" OFFSET ')
+  out.write(str(len('build.ninja: ')))
+  out.write(')\n')
+  # One would think this would need to worry about escaped spaces
+  # but gn doesn't escape spaces here (it generates invalid .d files).
+  out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+  out.write('foreach("gn_dep" ${gn_deps})\n')
+  out.write('  configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+  out.write('endforeach("gn_dep")\n')
+
+  for target_name in project.targets.keys():
+    out.write('\n')
+    WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+  if len(sys.argv) != 2:
+    print('Usage: ' + sys.argv[0] + ' <json_file_name>')
+    exit(1)
+
+  json_path = sys.argv[1]
+  project = None
+  with open(json_path, 'r') as json_file:
+    project = json.loads(json_file.read())
+
+  WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+  main()
diff --git a/src/build/android/gradle/java.jinja b/src/build/android/gradle/java.jinja
new file mode 100644
index 0000000..7626f61
--- /dev/null
+++ b/src/build/android/gradle/java.jinja
@@ -0,0 +1,41 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+apply plugin: "java"
+{% if template_type == 'java_binary' %}
+apply plugin: "application"
+{% endif %}
+
+sourceSets {
+    main {
+        java.srcDirs = [
+{% for path in main.java_dirs %}
+            "{{ path }}",
+{% endfor %}
+        ]
+{% if main.java_excludes is defined %}
+        java.filter.exclude([
+{% for path in main.java_excludes %}
+            "{{ path }}",
+{% endfor %}
+        ])
+{% endif %}
+    }
+}
+
+sourceCompatibility = JavaVersion.VERSION_1_8
+targetCompatibility = JavaVersion.VERSION_1_8
+
+{% if template_type == 'java_binary' %}
+applicationName = "{{ target_name }}"
+{% if main_class %}
+mainClassName = "{{ main_class }}"
+{% endif %}
+{% endif %}
+{% if template_type in ('java_binary', 'java_library') %}
+archivesBaseName = "{{ target_name }}"
+{% endif %}
+
+{% include 'dependencies.jinja' %}
diff --git a/src/build/android/gradle/manifest.jinja b/src/build/android/gradle/manifest.jinja
new file mode 100644
index 0000000..dea7071
--- /dev/null
+++ b/src/build/android/gradle/manifest.jinja
@@ -0,0 +1,7 @@
+{# Copyright 2017 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="{{ package }}">
+</manifest>
diff --git a/src/build/android/gradle/root.jinja b/src/build/android/gradle/root.jinja
new file mode 100644
index 0000000..15b5e10
--- /dev/null
+++ b/src/build/android/gradle/root.jinja
@@ -0,0 +1,26 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+buildscript {
+    repositories {
+        google()
+        jcenter()
+{% if channel == 'canary' %}
+        // Workaround for http://b/144885480.
+        //maven() {
+        //  url "http://dl.bintray.com/kotlin/kotlin-eap"
+        //}
+{% endif %}
+    }
+    dependencies {
+{% if channel == 'canary' %}
+        classpath "com.android.tools.build:gradle:4.1.0-beta01"
+{% elif channel == 'beta' %}
+        classpath "com.android.tools.build:gradle:4.0.0-rc01"
+{% else %}
+        classpath "com.android.tools.build:gradle:4.0.1"
+{% endif %}
+    }
+}
diff --git a/src/build/android/gtest_apk/BUILD.gn b/src/build/android/gtest_apk/BUILD.gn
new file mode 100644
index 0000000..2a72bc4
--- /dev/null
+++ b/src/build/android/gtest_apk/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("native_test_instrumentation_test_runner_java") {
+  testonly = true
+  sources = [
+    "java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java",
+    "java/src/org/chromium/build/gtest_apk/NativeTestIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusIntent.java",
+    "java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java",
+  ]
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
new file mode 100644
index 0000000..652333b
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java
@@ -0,0 +1,281 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.ActivityManager;
+import android.app.Instrumentation;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.Process;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.SparseArray;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Queue;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ *  An Instrumentation that runs tests based on NativeTest.
+ */
+public class NativeTestInstrumentationTestRunner extends Instrumentation {
+    private static final String EXTRA_NATIVE_TEST_ACTIVITY =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.NativeTestActivity";
+    private static final String EXTRA_SHARD_NANO_TIMEOUT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardNanoTimeout";
+    private static final String EXTRA_SHARD_SIZE_LIMIT =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardSizeLimit";
+    private static final String EXTRA_STDOUT_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.StdoutFile";
+    private static final String EXTRA_TEST_LIST_FILE =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.TestList";
+    private static final String EXTRA_TEST =
+            "org.chromium.native_test.NativeTestInstrumentationTestRunner.Test";
+
+    private static final String TAG = "NativeTest";
+
+    private static final long DEFAULT_SHARD_NANO_TIMEOUT = 60 * 1000000000L;
+    // Default to no size limit.
+    private static final int DEFAULT_SHARD_SIZE_LIMIT = 0;
+
+    private Handler mHandler = new Handler();
+    private Bundle mLogBundle = new Bundle();
+    private SparseArray<ShardMonitor> mMonitors = new SparseArray<ShardMonitor>();
+    private String mNativeTestActivity;
+    private TestStatusReceiver mReceiver;
+    private Queue<String> mShards = new ArrayDeque<String>();
+    private long mShardNanoTimeout = DEFAULT_SHARD_NANO_TIMEOUT;
+    private int mShardSizeLimit = DEFAULT_SHARD_SIZE_LIMIT;
+    private File mStdoutFile;
+    private Bundle mTransparentArguments;
+
+    @Override
+    public void onCreate(Bundle arguments) {
+        Context context = getContext();
+        mTransparentArguments = new Bundle(arguments);
+
+        mNativeTestActivity = arguments.getString(EXTRA_NATIVE_TEST_ACTIVITY);
+        if (mNativeTestActivity == null) {
+            Log.e(TAG,
+                    "Unable to find org.chromium.native_test.NativeUnitTestActivity extra on "
+                            + "NativeTestInstrumentationTestRunner launch intent.");
+            finish(Activity.RESULT_CANCELED, new Bundle());
+            return;
+        }
+        mTransparentArguments.remove(EXTRA_NATIVE_TEST_ACTIVITY);
+
+        String shardNanoTimeout = arguments.getString(EXTRA_SHARD_NANO_TIMEOUT);
+        if (shardNanoTimeout != null) mShardNanoTimeout = Long.parseLong(shardNanoTimeout);
+        mTransparentArguments.remove(EXTRA_SHARD_NANO_TIMEOUT);
+
+        String shardSizeLimit = arguments.getString(EXTRA_SHARD_SIZE_LIMIT);
+        if (shardSizeLimit != null) mShardSizeLimit = Integer.parseInt(shardSizeLimit);
+        mTransparentArguments.remove(EXTRA_SHARD_SIZE_LIMIT);
+
+        String stdoutFile = arguments.getString(EXTRA_STDOUT_FILE);
+        if (stdoutFile != null) {
+            mStdoutFile = new File(stdoutFile);
+        } else {
+            try {
+                mStdoutFile = File.createTempFile(
+                        ".temp_stdout_", ".txt", Environment.getExternalStorageDirectory());
+                Log.i(TAG, "stdout file created: " + mStdoutFile.getAbsolutePath());
+            } catch (IOException e) {
+                Log.e(TAG, "Unable to create temporary stdout file.", e);
+                finish(Activity.RESULT_CANCELED, new Bundle());
+                return;
+            }
+        }
+
+        mTransparentArguments.remove(EXTRA_STDOUT_FILE);
+
+        String singleTest = arguments.getString(EXTRA_TEST);
+        if (singleTest != null) {
+            mShards.add(singleTest);
+        }
+
+        String testListFilePath = arguments.getString(EXTRA_TEST_LIST_FILE);
+        if (testListFilePath != null) {
+            File testListFile = new File(testListFilePath);
+            try {
+                BufferedReader testListFileReader =
+                        new BufferedReader(new FileReader(testListFile));
+
+                String test;
+                ArrayList<String> workingShard = new ArrayList<String>();
+                while ((test = testListFileReader.readLine()) != null) {
+                    workingShard.add(test);
+                    if (workingShard.size() == mShardSizeLimit) {
+                        mShards.add(TextUtils.join(":", workingShard));
+                        workingShard = new ArrayList<String>();
+                    }
+                }
+
+                if (!workingShard.isEmpty()) {
+                    mShards.add(TextUtils.join(":", workingShard));
+                }
+
+                testListFileReader.close();
+            } catch (IOException e) {
+                Log.e(TAG, "Error reading " + testListFile.getAbsolutePath(), e);
+            }
+        }
+        mTransparentArguments.remove(EXTRA_TEST_LIST_FILE);
+
+        start();
+    }
+
+    @Override
+    @SuppressLint("DefaultLocale")
+    public void onStart() {
+        super.onStart();
+
+        mReceiver = new TestStatusReceiver();
+        mReceiver.register(getContext());
+        mReceiver.registerCallback(new TestStatusReceiver.TestRunCallback() {
+            @Override
+            public void testRunStarted(int pid) {
+                if (pid != Process.myPid()) {
+                    ShardMonitor m = new ShardMonitor(pid, System.nanoTime() + mShardNanoTimeout);
+                    mMonitors.put(pid, m);
+                    mHandler.post(m);
+                }
+            }
+
+            @Override
+            public void testRunFinished(int pid) {
+                ShardMonitor m = mMonitors.get(pid);
+                if (m != null) {
+                    m.stopped();
+                    mMonitors.remove(pid);
+                }
+                mHandler.post(new ShardEnder(pid));
+            }
+
+            @Override
+            public void uncaughtException(int pid, String stackTrace) {
+                mLogBundle.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
+                        String.format("Uncaught exception in test process (pid: %d)%n%s%n", pid,
+                                stackTrace));
+                sendStatus(0, mLogBundle);
+            }
+        });
+
+        mHandler.post(new ShardStarter());
+    }
+
+    /** Monitors a test shard's execution. */
+    private class ShardMonitor implements Runnable {
+        private static final int MONITOR_FREQUENCY_MS = 1000;
+
+        private long mExpirationNanoTime;
+        private int mPid;
+        private AtomicBoolean mStopped;
+
+        public ShardMonitor(int pid, long expirationNanoTime) {
+            mPid = pid;
+            mExpirationNanoTime = expirationNanoTime;
+            mStopped = new AtomicBoolean(false);
+        }
+
+        public void stopped() {
+            mStopped.set(true);
+        }
+
+        @Override
+        public void run() {
+            if (mStopped.get()) {
+                return;
+            }
+
+            if (isAppProcessAlive(getContext(), mPid)) {
+                if (System.nanoTime() > mExpirationNanoTime) {
+                    Log.e(TAG, String.format("Test process %d timed out.", mPid));
+                    mHandler.post(new ShardEnder(mPid));
+                    return;
+                } else {
+                    mHandler.postDelayed(this, MONITOR_FREQUENCY_MS);
+                    return;
+                }
+            }
+
+            Log.e(TAG, String.format("Test process %d died unexpectedly.", mPid));
+            mHandler.post(new ShardEnder(mPid));
+        }
+    }
+
+    private static boolean isAppProcessAlive(Context context, int pid) {
+        ActivityManager activityManager =
+                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        for (ActivityManager.RunningAppProcessInfo processInfo :
+                activityManager.getRunningAppProcesses()) {
+            if (processInfo.pid == pid) return true;
+        }
+        return false;
+    }
+
+    protected Intent createShardMainIntent() {
+        Intent i = new Intent(Intent.ACTION_MAIN);
+        i.setComponent(new ComponentName(getContext().getPackageName(), mNativeTestActivity));
+        i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+        i.putExtras(mTransparentArguments);
+        if (mShards != null && !mShards.isEmpty()) {
+            String gtestFilter = mShards.remove();
+            i.putExtra(NativeTestIntent.EXTRA_GTEST_FILTER, gtestFilter);
+        }
+        i.putExtra(NativeTestIntent.EXTRA_STDOUT_FILE, mStdoutFile.getAbsolutePath());
+        return i;
+    }
+
+    /**
+     * Starts the NativeTest Activity.
+     */
+    private class ShardStarter implements Runnable {
+        @Override
+        public void run() {
+            getContext().startActivity(createShardMainIntent());
+        }
+    }
+
+    private class ShardEnder implements Runnable {
+        private static final int WAIT_FOR_DEATH_MILLIS = 10;
+
+        private int mPid;
+
+        public ShardEnder(int pid) {
+            mPid = pid;
+        }
+
+        @Override
+        public void run() {
+            if (mPid != Process.myPid()) {
+                Process.killProcess(mPid);
+                try {
+                    while (isAppProcessAlive(getContext(), mPid)) {
+                        Thread.sleep(WAIT_FOR_DEATH_MILLIS);
+                    }
+                } catch (InterruptedException e) {
+                    Log.e(TAG, String.format("%d may still be alive.", mPid), e);
+                }
+            }
+            if (mShards != null && !mShards.isEmpty()) {
+                mHandler.post(new ShardStarter());
+            } else {
+                finish(Activity.RESULT_OK, new Bundle());
+            }
+        }
+    }
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
new file mode 100644
index 0000000..a875e97
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java
@@ -0,0 +1,22 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Extras for intent sent by NativeTestInstrumentationTestRunner.
+ */
+public class NativeTestIntent {
+    public static final String EXTRA_COMMAND_LINE_FILE =
+            "org.chromium.native_test.NativeTest.CommandLineFile";
+    public static final String EXTRA_COMMAND_LINE_FLAGS =
+            "org.chromium.native_test.NativeTest.CommandLineFlags";
+    public static final String EXTRA_RUN_IN_SUB_THREAD =
+            "org.chromium.native_test.NativeTest.RunInSubThread";
+    public static final String EXTRA_GTEST_FILTER =
+            "org.chromium.native_test.NativeTest.GtestFilter";
+    public static final String EXTRA_STDOUT_FILE = "org.chromium.native_test.NativeTest.StdoutFile";
+    public static final String EXTRA_COVERAGE_DEVICE_FILE =
+            "org.chromium.native_test.NativeTest.CoverageDeviceFile";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
new file mode 100644
index 0000000..520b748
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java
@@ -0,0 +1,21 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+/**
+ * Intent action and extras of broadcasts intercepted by TestStatusReceiver.
+ */
+public class TestStatusIntent {
+    public static final String ACTION_TEST_RUN_STARTED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_STARTED";
+    public static final String ACTION_TEST_RUN_FINISHED =
+            "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_FINISHED";
+    public static final String ACTION_UNCAUGHT_EXCEPTION =
+            "org.chromium.test.reporter.TestStatusReporter.UNCAUGHT_EXCEPTION";
+    public static final String DATA_TYPE_RESULT = "org.chromium.test.reporter/result";
+    public static final String EXTRA_PID = "org.chromium.test.reporter.TestStatusReporter.PID";
+    public static final String EXTRA_STACK_TRACE =
+            "org.chromium.test.reporter.TestStatusReporter.STACK_TRACE";
+}
diff --git a/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
new file mode 100644
index 0000000..e539009
--- /dev/null
+++ b/src/build/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java
@@ -0,0 +1,89 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.gtest_apk;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+   Receives test status broadcasts sent from
+    {@link org.chromium.test.reporter.TestStatusReporter}.
+ */
+public class TestStatusReceiver extends BroadcastReceiver {
+    private static final String TAG = "test_reporter";
+
+    private final List<TestRunCallback> mTestRunCallbacks = new ArrayList<TestRunCallback>();
+
+    /** An IntentFilter that matches the intents that this class can receive. */
+    private static final IntentFilter INTENT_FILTER;
+    static {
+        IntentFilter filter = new IntentFilter();
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_STARTED);
+        filter.addAction(TestStatusIntent.ACTION_TEST_RUN_FINISHED);
+        filter.addAction(TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION);
+        try {
+            filter.addDataType(TestStatusIntent.DATA_TYPE_RESULT);
+        } catch (IntentFilter.MalformedMimeTypeException e) {
+            Log.wtf(TAG, "Invalid MIME type", e);
+        }
+        INTENT_FILTER = filter;
+    }
+
+    /** A callback used when a test run has started or finished. */
+    public interface TestRunCallback {
+        void testRunStarted(int pid);
+        void testRunFinished(int pid);
+        void uncaughtException(int pid, String stackTrace);
+    }
+
+    /** Register a callback for when a test run has started or finished. */
+    public void registerCallback(TestRunCallback c) {
+        mTestRunCallbacks.add(c);
+    }
+
+    /** Register this receiver using the provided context. */
+    public void register(Context c) {
+        c.registerReceiver(this, INTENT_FILTER);
+    }
+
+    /**
+     * Receive a broadcast intent.
+     *
+     * @param context The Context in which the receiver is running.
+     * @param intent The intent received.
+     */
+    @Override
+    public void onReceive(Context context, Intent intent) {
+        int pid = intent.getIntExtra(TestStatusIntent.EXTRA_PID, 0);
+        String stackTrace = intent.getStringExtra(TestStatusIntent.EXTRA_STACK_TRACE);
+
+        switch (intent.getAction()) {
+            case TestStatusIntent.ACTION_TEST_RUN_STARTED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunStarted(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_TEST_RUN_FINISHED:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.testRunFinished(pid);
+                }
+                break;
+            case TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION:
+                for (TestRunCallback c : mTestRunCallbacks) {
+                    c.uncaughtException(pid, stackTrace);
+                }
+                break;
+            default:
+                Log.e(TAG, "Unrecognized intent received: " + intent.toString());
+                break;
+        }
+    }
+}
diff --git a/src/build/android/gyp/aar.py b/src/build/android/gyp/aar.py
new file mode 100755
index 0000000..b157cd8
--- /dev/null
+++ b/src/build/android/gyp/aar.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                             os.pardir, os.pardir)))
+import gn_helpers
+
+
+_PROGUARD_TXT = 'proguard.txt'
+
+
+def _GetManifestPackage(doc):
+  """Returns the package specified in the manifest.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    String representing the package name.
+  """
+  return doc.attrib['package']
+
+
+def _IsManifestEmpty(doc):
+  """Decides whether the given manifest has merge-worthy elements.
+
+  E.g.: <activity>, <service>, etc.
+
+  Args:
+    doc: an XML tree parsed by ElementTree
+
+  Returns:
+    Whether the manifest has merge-worthy elements.
+  """
+  for node in doc:
+    if node.tag == 'application':
+      if list(node):
+        return False
+    elif node.tag != 'uses-sdk':
+      return False
+
+  return True
+
+
+def _CreateInfo(aar_file):
+  """Extracts and return .info data from an .aar file.
+
+  Args:
+    aar_file: Path to an input .aar file.
+
+  Returns:
+    A dict containing .info data.
+  """
+  data = {}
+  data['aidl'] = []
+  data['assets'] = []
+  data['resources'] = []
+  data['subjars'] = []
+  data['subjar_tuples'] = []
+  data['has_classes_jar'] = False
+  data['has_proguard_flags'] = False
+  data['has_native_libraries'] = False
+  data['has_r_text_file'] = False
+  with zipfile.ZipFile(aar_file) as z:
+    manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml'))
+    data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml)
+    manifest_package = _GetManifestPackage(manifest_xml)
+    if manifest_package:
+      data['manifest_package'] = manifest_package
+
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if name.startswith('aidl/'):
+        data['aidl'].append(name)
+      elif name.startswith('res/'):
+        data['resources'].append(name)
+      elif name.startswith('libs/') and name.endswith('.jar'):
+        label = posixpath.basename(name)[:-4]
+        label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+        data['subjars'].append(name)
+        data['subjar_tuples'].append([label, name])
+      elif name.startswith('assets/'):
+        data['assets'].append(name)
+      elif name.startswith('jni/'):
+        data['has_native_libraries'] = True
+        if 'native_libraries' in data:
+          data['native_libraries'].append(name)
+        else:
+          data['native_libraries'] = [name]
+      elif name == 'classes.jar':
+        data['has_classes_jar'] = True
+      elif name == _PROGUARD_TXT:
+        data['has_proguard_flags'] = True
+      elif name == 'R.txt':
+        # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+        # have no resources as well. We treat empty R.txt as having no R.txt.
+        data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+  return data
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist):
+  with build_utils.TempDir() as tmp_dir:
+    tmp_dir = os.path.join(tmp_dir, 'staging')
+    os.mkdir(tmp_dir)
+    build_utils.ExtractAll(
+        aar_file, path=tmp_dir, predicate=name_allowlist.__contains__)
+    # Write a breadcrumb so that SuperSize can attribute files back to the .aar.
+    with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
+      f.write('source={}\n'.format(aar_file))
+
+    shutil.rmtree(output_dir, ignore_errors=True)
+    shutil.move(tmp_dir, output_dir)
+
+
+def _AddCommonArgs(parser):
+  parser.add_argument(
+      'aar_file', help='Path to the AAR file.', type=os.path.normpath)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  command_parsers = parser.add_subparsers(dest='command')
+  subp = command_parsers.add_parser(
+      'list', help='Output a GN scope describing the contents of the .aar.')
+  _AddCommonArgs(subp)
+  subp.add_argument('--output', help='Output file.', default='-')
+
+  subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+  _AddCommonArgs(subp)
+  subp.add_argument(
+      '--output-dir',
+      help='Output directory for the extracted files.',
+      required=True,
+      type=os.path.normpath)
+  subp.add_argument(
+      '--assert-info-file',
+      help='Path to .info file. Asserts that it matches what '
+      '"list" would output.',
+      type=argparse.FileType('r'))
+  subp.add_argument(
+      '--ignore-resources',
+      action='store_true',
+      help='Whether to skip extraction of res/')
+
+  args = parser.parse_args()
+
+  aar_info = _CreateInfo(args.aar_file)
+  formatted_info = """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
+
+  if args.command == 'extract':
+    if args.assert_info_file:
+      cached_info = args.assert_info_file.read()
+      if formatted_info != cached_info:
+        raise Exception('android_aar_prebuilt() cached .info file is '
+                        'out-of-date. Run gn gen with '
+                        'update_android_aar_prebuilts=true to update it.')
+
+    with zipfile.ZipFile(args.aar_file) as zf:
+      names = zf.namelist()
+      if args.ignore_resources:
+        names = [n for n in names if not n.startswith('res')]
+
+    _PerformExtract(args.aar_file, args.output_dir, set(names))
+
+  elif args.command == 'list':
+    aar_output_present = args.output != '-' and os.path.isfile(args.output)
+    if aar_output_present:
+      # Some .info files are read-only, for examples the cipd-controlled ones
+      # under third_party/android_deps/repositoty. To deal with these, first
+      # that its content is correct, and if it is, exit without touching
+      # the file system.
+      file_info = open(args.output, 'r').read()
+      if file_info == formatted_info:
+        return
+
+    # Try to write the file. This may fail for read-only ones that were
+    # not updated.
+    try:
+      with open(args.output, 'w') as f:
+        f.write(formatted_info)
+    except IOError as e:
+      if not aar_output_present:
+        raise e
+      raise Exception('Could not update output file: %s\n%s\n' %
+                      (args.output, e))
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/aar.pydeps b/src/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000..7e2924b
--- /dev/null
+++ b/src/build/android/gyp/aar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/aidl.py b/src/build/android/gyp/aidl.py
new file mode 100755
index 0000000..b8099aa
--- /dev/null
+++ b/src/build/android/gyp/aidl.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  options.includes = build_utils.ParseGnList(options.includes)
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGnList(options.imports)
+      ]
+      aidl_cmd += ['-I' + s for s in options.includes]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    with build_utils.AtomicOutput(options.srcjar) as f:
+      with zipfile.ZipFile(f, 'w') as srcjar:
+        for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+          with open(path) as fileobj:
+            data = fileobj.read()
+          pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+          arcname = '%s/%s' % (
+              pkg_name.replace('.', '/'), os.path.basename(path))
+          build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+  if options.depfile:
+    include_files = []
+    for include_dir in options.includes:
+      include_files += build_utils.FindInDirectory(include_dir, '*.java')
+    build_utils.WriteDepfile(options.depfile, options.srcjar, include_files)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/aidl.pydeps b/src/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000..11c55ed
--- /dev/null
+++ b/src/build/android/gyp/aidl.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/allot_native_libraries.py b/src/build/android/gyp/allot_native_libraries.py
new file mode 100755
index 0000000..978b173
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Allots libraries to modules to be packaged into.
+
+All libraries that are depended on by a single module will be allotted to this
+module. All other libraries will be allotted to the closest ancestor.
+
+Example:
+  Given the module dependency structure
+
+        c
+       / \
+      b   d
+     /     \
+    a       e
+
+  and libraries assignment
+
+    a: ['lib1.so']
+    e: ['lib2.so', 'lib1.so']
+
+  will make the allotment decision
+
+    c: ['lib1.so']
+    e: ['lib2.so']
+
+  The above example is invoked via:
+
+    ./allot_native_libraries \
+      --libraries 'a,["1.so"]' \
+      --libraries 'e,["2.so", "1.so"]' \
+      --dep c:b \
+      --dep b:a \
+      --dep c:d \
+      --dep d:e \
+      --output <output JSON>
+"""
+
+import argparse
+import collections
+import json
+import sys
+
+from util import build_utils
+
+
+def _ModuleLibrariesPair(arg):
+  pos = arg.find(',')
+  assert pos > 0
+  return (arg[:pos], arg[pos + 1:])
+
+
+def _DepPair(arg):
+  parent, child = arg.split(':')
+  return (parent, child)
+
+
+def _PathFromRoot(module_tree, module):
+  """Computes path from root to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    module: Module to which to compute the path.
+
+  Returns:
+    Path from root the the module.
+  """
+  path = [module]
+  while module_tree.get(module):
+    module = module_tree[module]
+    path = [module] + path
+  return path
+
+
+def _ClosestCommonAncestor(module_tree, modules):
+  """Computes the common ancestor of a set of modules.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent.
+    modules: Set of modules for which to find the closest common ancestor.
+
+  Returns:
+    The closest common ancestor.
+  """
+  paths = [_PathFromRoot(module_tree, m) for m in modules]
+  assert len(paths) > 0
+  ancestor = None
+  for level in zip(*paths):
+    if len(set(level)) != 1:
+      return ancestor
+    ancestor = level[0]
+  return ancestor
+
+
+def _AllotLibraries(module_tree, libraries_map):
+  """Allot all libraries to a module.
+
+  Parameters:
+    module_tree: Dictionary mapping each module to its parent. Modules can map
+      to None, which is considered the root of the tree.
+    libraries_map: Dictionary mapping each library to a set of modules, which
+      depend on the library.
+
+  Returns:
+    A dictionary mapping mapping each module name to a set of libraries allotted
+    to the module such that libraries with multiple dependees are allotted to
+    the closest ancestor.
+
+  Raises:
+    Exception if some libraries can only be allotted to the None root.
+  """
+  allotment_map = collections.defaultdict(set)
+  for library, modules in libraries_map.items():
+    ancestor = _ClosestCommonAncestor(module_tree, modules)
+    if not ancestor:
+      raise Exception('Cannot allot libraries for given dependency tree')
+    allotment_map[ancestor].add(library)
+  return allotment_map
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--libraries',
+      action='append',
+      type=_ModuleLibrariesPair,
+      required=True,
+      help='A pair of module name and GN list of libraries a module depends '
+      'on. Can be specified multiple times.')
+  parser.add_argument(
+      '--output',
+      required=True,
+      help='A JSON file with a key for each module mapping to a list of '
+      'libraries, which should be packaged into this module.')
+  parser.add_argument(
+      '--dep',
+      action='append',
+      type=_DepPair,
+      dest='deps',
+      default=[],
+      help='A pair of parent module name and child module name '
+      '(format: "<parent>:<child>"). Can be specified multiple times.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  options.libraries = [(m, build_utils.ParseGnList(l))
+                       for m, l in options.libraries]
+
+  # Parse input creating libraries and dependency tree.
+  libraries_map = collections.defaultdict(set)  # Maps each library to its
+  #                                               dependee modules.
+  module_tree = {}  # Maps each module name to its parent.
+  for module, libraries in options.libraries:
+    module_tree[module] = None
+    for library in libraries:
+      libraries_map[library].add(module)
+  for parent, child in options.deps:
+    if module_tree.get(child):
+      raise Exception('%s cannot have multiple parents' % child)
+    module_tree[child] = parent
+    module_tree[parent] = module_tree.get(parent)
+
+  # Allot all libraries to a module such that libraries with multiple dependees
+  # are allotted to the closest ancestor.
+  allotment_map = _AllotLibraries(module_tree, libraries_map)
+
+  # The build system expects there to be a set of libraries even for the modules
+  # that don't have any libraries allotted.
+  for module in module_tree:
+    # Creates missing sets because of defaultdict.
+    allotment_map[module] = allotment_map[module]
+
+  with open(options.output, 'w') as f:
+    # Write native libraries config and ensure the output is deterministic.
+    json.dump({m: sorted(l)
+               for m, l in allotment_map.items()},
+              f,
+              sort_keys=True,
+              indent=2)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/allot_native_libraries.pydeps b/src/build/android/gyp/allot_native_libraries.pydeps
new file mode 100644
index 0000000..d8b10cd
--- /dev/null
+++ b/src/build/android/gyp/allot_native_libraries.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py
+../../gn_helpers.py
+allot_native_libraries.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/apkbuilder.py b/src/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000..f1e6563
--- /dev/null
+++ b/src/build/android/gyp/apkbuilder.py
@@ -0,0 +1,560 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import finalize_apk
+
+from util import build_utils
+from util import diff_utils
+from util import zipalign
+
+# Input dex.jar files are zipaligned.
+zipalign.ApplyZipFileZipAlignFix()
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+                           '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+                           '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+                           '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+                           '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--assets',
+      help='GYP-list of files to add as assets in the form '
+      '"srcPath:zipPath", where ":zipPath" is optional.')
+  parser.add_argument(
+      '--java-resources', help='GYP-list of java_resources JARs to include.')
+  parser.add_argument('--write-asset-list',
+                      action='store_true',
+                      help='Whether to create an assets/assets_list file.')
+  parser.add_argument(
+      '--uncompressed-assets',
+      help='Same as --assets, except disables compression.')
+  parser.add_argument('--resource-apk',
+                      help='An .ap_ file built using aapt',
+                      required=True)
+  parser.add_argument('--output-apk',
+                      help='Path to the output file',
+                      required=True)
+  parser.add_argument('--format', choices=['apk', 'bundle-module'],
+                      default='apk', help='Specify output format.')
+  parser.add_argument('--dex-file',
+                      help='Path to the classes.dex to use')
+  parser.add_argument(
+      '--jdk-libs-dex-file',
+      help='Path to classes.dex created by dex_jdk_libs.py')
+  parser.add_argument('--uncompress-dex', action='store_true',
+                      help='Store .dex files uncompressed in the APK')
+  parser.add_argument('--native-libs',
+                      action='append',
+                      help='GYP-list of native libraries to include. '
+                           'Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--secondary-native-libs',
+                      action='append',
+                      help='GYP-list of native libraries for secondary '
+                           'android-abi. Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--android-abi',
+                      help='Android architecture to use for native libraries')
+  parser.add_argument('--secondary-android-abi',
+                      help='The secondary Android architecture to use for'
+                           'secondary native libraries')
+  parser.add_argument(
+      '--is-multi-abi',
+      action='store_true',
+      help='Will add a placeholder for the missing ABI if no native libs or '
+      'placeholders are set for either the primary or secondary ABI. Can only '
+      'be set if both --android-abi and --secondary-android-abi are set.')
+  parser.add_argument(
+      '--native-lib-placeholders',
+      help='GYP-list of native library placeholders to add.')
+  parser.add_argument(
+      '--secondary-native-lib-placeholders',
+      help='GYP-list of native library placeholders to add '
+      'for the secondary ABI')
+  parser.add_argument('--uncompress-shared-libraries', default='False',
+      choices=['true', 'True', 'false', 'False'],
+      help='Whether to uncompress native shared libraries. Argument must be '
+           'a boolean value.')
+  parser.add_argument(
+      '--apksigner-jar', help='Path to the apksigner executable.')
+  parser.add_argument('--zipalign-path',
+                      help='Path to the zipalign executable.')
+  parser.add_argument('--key-path',
+                      help='Path to keystore for signing.')
+  parser.add_argument('--key-passwd',
+                      help='Keystore password')
+  parser.add_argument('--key-name',
+                      help='Keystore name')
+  parser.add_argument(
+      '--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
+  parser.add_argument(
+      '--best-compression',
+      action='store_true',
+      help='Use zip -9 rather than zip -1')
+  parser.add_argument(
+      '--library-always-compress',
+      action='append',
+      help='The list of library files that we always compress.')
+  parser.add_argument(
+      '--library-renames',
+      action='append',
+      help='The list of library files that we prepend crazy. to their names.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+  options.assets = build_utils.ParseGnList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGnList(
+      options.uncompressed_assets)
+  options.native_lib_placeholders = build_utils.ParseGnList(
+      options.native_lib_placeholders)
+  options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+      options.secondary_native_lib_placeholders)
+  options.java_resources = build_utils.ParseGnList(options.java_resources)
+  options.native_libs = build_utils.ParseGnList(options.native_libs)
+  options.secondary_native_libs = build_utils.ParseGnList(
+      options.secondary_native_libs)
+  options.library_always_compress = build_utils.ParseGnList(
+      options.library_always_compress)
+  options.library_renames = build_utils.ParseGnList(options.library_renames)
+
+  # --apksigner-jar, --zipalign-path, --key-xxx arguments are
+  # required when building an APK, but not a bundle module.
+  if options.format == 'apk':
+    required_args = [
+        'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
+    ]
+    for required in required_args:
+      if not vars(options)[required]:
+        raise Exception('Argument --%s is required for APKs.' % (
+            required.replace('_', '-')))
+
+  options.uncompress_shared_libraries = \
+      options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+  if not options.android_abi and (options.native_libs or
+                                  options.native_lib_placeholders):
+    raise Exception('Must specify --android-abi with --native-libs')
+  if not options.secondary_android_abi and (options.secondary_native_libs or
+      options.secondary_native_lib_placeholders):
+    raise Exception('Must specify --secondary-android-abi with'
+                    ' --secondary-native-libs')
+  if options.is_multi_abi and not (options.android_abi
+                                   and options.secondary_android_abi):
+    raise Exception('Must specify --is-multi-abi with both --android-abi '
+                    'and --secondary-android-abi.')
+  return options
+
+
+def _SplitAssetPath(path):
+  """Returns (src, dest) given an asset path in the form src[:dest]."""
+  path_parts = path.split(':')
+  src_path = path_parts[0]
+  if len(path_parts) > 1:
+    dest_path = path_parts[1]
+  else:
+    dest_path = os.path.basename(src_path)
+  return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+  """Converts src:dst into tuples and enumerates files within directories.
+
+  Args:
+    paths: Paths in the form "src_path:dest_path"
+
+  Returns:
+    A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+    ordering within output .apk).
+  """
+  ret = []
+  for path in paths:
+    src_path, dest_path = _SplitAssetPath(path)
+    if os.path.isdir(src_path):
+      for f in build_utils.FindInDirectory(src_path, '*'):
+        ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+    else:
+      ret.append((src_path, dest_path))
+  ret.sort(key=lambda t:t[1])
+  return ret
+
+
+def _GetAssetsToAdd(path_tuples,
+                    fast_align,
+                    disable_compression=False,
+                    allow_reads=True):
+  """Returns the list of file_detail tuples for assets in the apk.
+
+  Args:
+    path_tuples: List of src_path, dest_path tuples to add.
+    fast_align: Whether to perform alignment in python zipfile (alternatively
+                alignment can be done using the zipalign utility out of band).
+    disable_compression: Whether to disable compression.
+    allow_reads: If false, we do not try to read the files from disk (to find
+                 their size for example).
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how assets are added.
+  """
+  assets_to_add = []
+
+  # Group all uncompressed assets together in the hope that it will increase
+  # locality of mmap'ed files.
+  for target_compress in (False, True):
+    for src_path, dest_path in path_tuples:
+      compress = not disable_compression and (
+          os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+
+      if target_compress == compress:
+        # AddToZipHermetic() uses this logic to avoid growing small files.
+        # We need it here in order to set alignment correctly.
+        if allow_reads and compress and os.path.getsize(src_path) < 16:
+          compress = False
+
+        apk_path = 'assets/' + dest_path
+        alignment = 0 if compress and not fast_align else 4
+        assets_to_add.append((apk_path, src_path, compress, alignment))
+  return assets_to_add
+
+
+def _AddFiles(apk, details):
+  """Adds files to the apk.
+
+  Args:
+    apk: path to APK to add to.
+    details: A list of file detail tuples (src_path, apk_path, compress,
+    alignment) representing what and how files are added to the APK.
+  """
+  for apk_path, src_path, compress, alignment in details:
+    # This check is only relevant for assets, but it should not matter if it is
+    # checked for the whole list of files.
+    try:
+      apk.getinfo(apk_path)
+      # Should never happen since write_build_config.py handles merging.
+      raise Exception(
+          'Multiple targets specified the asset path: %s' % apk_path)
+    except KeyError:
+      zipalign.AddToZipHermetic(
+          apk,
+          apk_path,
+          src_path=src_path,
+          compress=compress,
+          alignment=alignment)
+
+
+def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
+                             lib_always_compress, lib_renames):
+  """Returns the list of file_detail tuples for native libraries in the apk.
+
+  Returns: A list of (src_path, apk_path, compress, alignment) tuple
+  representing what and how native libraries are added.
+  """
+  libraries_to_add = []
+
+
+  for path in native_libs:
+    basename = os.path.basename(path)
+    compress = not uncompress or any(lib_name in basename
+                                     for lib_name in lib_always_compress)
+    rename = any(lib_name in basename for lib_name in lib_renames)
+    if rename:
+      basename = 'crazy.' + basename
+
+    lib_android_abi = android_abi
+    if path.startswith('android_clang_arm64_hwasan/'):
+      lib_android_abi = 'arm64-v8a-hwasan'
+
+    apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
+    alignment = 0 if compress and not fast_align else 0x1000
+    libraries_to_add.append((apk_path, path, compress, alignment))
+
+  return libraries_to_add
+
+
+def _CreateExpectationsData(native_libs, assets):
+  """Creates list of native libraries and assets."""
+  native_libs = sorted(native_libs)
+  assets = sorted(assets)
+
+  ret = []
+  for apk_path, _, compress, alignment in native_libs + assets:
+    ret.append('apk_path=%s, compress=%s, alignment=%s\n' %
+               (apk_path, compress, alignment))
+  return ''.join(ret)
+
+
+def main(args):
+  build_utils.InitLogging('APKBUILDER_DEBUG')
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  # Until Python 3.7, there's no better way to set compression level.
+  # The default is 6.
+  if options.best_compression:
+    # Compresses about twice as slow as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 9
+  else:
+    # Compresses about twice as fast as the default.
+    zlib.Z_DEFAULT_COMPRESSION = 1
+
+  # Manually align only when alignment is necessary.
+  # Python's zip implementation duplicates file comments in the central
+  # directory, whereas zipalign does not, so use zipalign for official builds.
+  fast_align = options.format == 'apk' and not options.best_compression
+
+  native_libs = sorted(options.native_libs)
+
+  # Include native libs in the depfile_deps since GN doesn't know about the
+  # dependencies when is_component_build=true.
+  depfile_deps = list(native_libs)
+
+  # For targets that depend on static library APKs, dex paths are created by
+  # the static library's dexsplitter target and GN doesn't know about these
+  # paths.
+  if options.dex_file:
+    depfile_deps.append(options.dex_file)
+
+  secondary_native_libs = []
+  if options.secondary_native_libs:
+    secondary_native_libs = sorted(options.secondary_native_libs)
+    depfile_deps += secondary_native_libs
+
+  if options.java_resources:
+    # Included via .build_config, so need to write it to depfile.
+    depfile_deps.extend(options.java_resources)
+
+  assets = _ExpandPaths(options.assets)
+  uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+  # Included via .build_config, so need to write it to depfile.
+  depfile_deps.extend(x[0] for x in assets)
+  depfile_deps.extend(x[0] for x in uncompressed_assets)
+
+  # Bundle modules have a structure similar to APKs, except that resources
+  # are compiled in protobuf format (instead of binary xml), and that some
+  # files are located into different top-level directories, e.g.:
+  #  AndroidManifest.xml -> manifest/AndroidManifest.xml
+  #  classes.dex -> dex/classes.dex
+  #  res/ -> res/  (unchanged)
+  #  assets/ -> assets/  (unchanged)
+  #  <other-file> -> root/<other-file>
+  #
+  # Hence, the following variables are used to control the location of files in
+  # the final archive.
+  if options.format == 'bundle-module':
+    apk_manifest_dir = 'manifest/'
+    apk_root_dir = 'root/'
+    apk_dex_dir = 'dex/'
+  else:
+    apk_manifest_dir = ''
+    apk_root_dir = ''
+    apk_dex_dir = ''
+
+  def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
+    ret = _GetAssetsToAdd(assets,
+                          fast_align,
+                          disable_compression=False,
+                          allow_reads=allow_reads)
+    ret.extend(
+        _GetAssetsToAdd(uncompressed_assets,
+                        fast_align,
+                        disable_compression=True,
+                        allow_reads=allow_reads))
+    return ret
+
+  libs_to_add = _GetNativeLibrariesToAdd(
+      native_libs, options.android_abi, options.uncompress_shared_libraries,
+      fast_align, options.library_always_compress, options.library_renames)
+  if options.secondary_android_abi:
+    libs_to_add.extend(
+        _GetNativeLibrariesToAdd(
+            secondary_native_libs, options.secondary_android_abi,
+            options.uncompress_shared_libraries, fast_align,
+            options.library_always_compress, options.library_renames))
+
+  if options.expected_file:
+    # We compute expectations without reading the files. This allows us to check
+    # expectations for different targets by just generating their build_configs
+    # and not have to first generate all the actual files and all their
+    # dependencies (for example by just passing --only-verify-expectations).
+    asset_details = _GetAssetDetails(assets,
+                                     uncompressed_assets,
+                                     fast_align,
+                                     allow_reads=False)
+
+    actual_data = _CreateExpectationsData(libs_to_add, asset_details)
+    diff_utils.CheckExpectations(actual_data, options)
+
+    if options.only_verify_expectations:
+      if options.depfile:
+        build_utils.WriteDepfile(options.depfile,
+                                 options.actual_file,
+                                 inputs=depfile_deps)
+      return
+
+  # If we are past this point, we are going to actually create the final apk so
+  # we should recompute asset details again but maybe perform some optimizations
+  # based on the size of the files on disk.
+  assets_to_add = _GetAssetDetails(
+      assets, uncompressed_assets, fast_align, allow_reads=True)
+
+  # Targets generally do not depend on apks, so no need for only_if_changed.
+  with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+    with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+         zipfile.ZipFile(f, 'w') as out_apk:
+
+      def add_to_zip(zip_path, data, compress=True, alignment=4):
+        zipalign.AddToZipHermetic(
+            out_apk,
+            zip_path,
+            data=data,
+            compress=compress,
+            alignment=0 if compress and not fast_align else alignment)
+
+      def copy_resource(zipinfo, out_dir=''):
+        add_to_zip(
+            out_dir + zipinfo.filename,
+            resource_apk.read(zipinfo.filename),
+            compress=zipinfo.compress_type != zipfile.ZIP_STORED)
+
+      # Make assets come before resources in order to maintain the same file
+      # ordering as GYP / aapt. http://crbug.com/561862
+      resource_infos = resource_apk.infolist()
+
+      # 1. AndroidManifest.xml
+      logging.debug('Adding AndroidManifest.xml')
+      copy_resource(
+          resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+      # 2. Assets
+      logging.debug('Adding assets/')
+      _AddFiles(out_apk, assets_to_add)
+
+      # 3. Dex files
+      logging.debug('Adding classes.dex')
+      if options.dex_file:
+        with open(options.dex_file, 'rb') as dex_file_obj:
+          if options.dex_file.endswith('.dex'):
+            max_dex_number = 1
+            # This is the case for incremental_install=true.
+            add_to_zip(
+                apk_dex_dir + 'classes.dex',
+                dex_file_obj.read(),
+                compress=not options.uncompress_dex)
+          else:
+            max_dex_number = 0
+            with zipfile.ZipFile(dex_file_obj) as dex_zip:
+              for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+                max_dex_number += 1
+                add_to_zip(
+                    apk_dex_dir + dex,
+                    dex_zip.read(dex),
+                    compress=not options.uncompress_dex)
+
+      if options.jdk_libs_dex_file:
+        with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj:
+          add_to_zip(
+              apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
+              dex_file_obj.read(),
+              compress=not options.uncompress_dex)
+
+      # 4. Native libraries.
+      logging.debug('Adding lib/')
+      _AddFiles(out_apk, libs_to_add)
+
+      # Add a placeholder lib if the APK should be multi ABI but is missing libs
+      # for one of the ABIs.
+      native_lib_placeholders = options.native_lib_placeholders
+      secondary_native_lib_placeholders = (
+          options.secondary_native_lib_placeholders)
+      if options.is_multi_abi:
+        if ((secondary_native_libs or secondary_native_lib_placeholders)
+            and not native_libs and not native_lib_placeholders):
+          native_lib_placeholders += ['libplaceholder.so']
+        if ((native_libs or native_lib_placeholders)
+            and not secondary_native_libs
+            and not secondary_native_lib_placeholders):
+          secondary_native_lib_placeholders += ['libplaceholder.so']
+
+      # Add placeholder libs.
+      for name in sorted(native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.android_abi, name)
+        add_to_zip(apk_path, '', alignment=0x1000)
+
+      for name in sorted(secondary_native_lib_placeholders):
+        # Note: Empty libs files are ignored by md5check (can cause issues
+        # with stale builds when the only change is adding/removing
+        # placeholders).
+        apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+        add_to_zip(apk_path, '', alignment=0x1000)
+
+      # 5. Resources
+      logging.debug('Adding res/')
+      for info in sorted(resource_infos, key=lambda i: i.filename):
+        if info.filename != 'AndroidManifest.xml':
+          copy_resource(info)
+
+      # 6. Java resources that should be accessible via
+      # Class.getResourceAsStream(), in particular parts of Emma jar.
+      # Prebuilt jars may contain class files which we shouldn't include.
+      logging.debug('Adding Java resources')
+      for java_resource in options.java_resources:
+        with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
+          for apk_path in sorted(java_resource_jar.namelist()):
+            apk_path_lower = apk_path.lower()
+
+            if apk_path_lower.startswith('meta-inf/'):
+              continue
+            if apk_path_lower.endswith('/'):
+              continue
+            if apk_path_lower.endswith('.class'):
+              continue
+
+            add_to_zip(apk_root_dir + apk_path,
+                       java_resource_jar.read(apk_path))
+
+    if options.format == 'apk':
+      zipalign_path = None if fast_align else options.zipalign_path
+      finalize_apk.FinalizeApk(options.apksigner_jar,
+                               zipalign_path,
+                               f.name,
+                               f.name,
+                               options.key_path,
+                               options.key_passwd,
+                               options.key_name,
+                               int(options.min_sdk_version),
+                               warnings_as_errors=options.warnings_as_errors)
+    logging.debug('Moving file into place')
+
+    if options.depfile:
+      build_utils.WriteDepfile(options.depfile,
+                               options.output_apk,
+                               inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/apkbuilder.pydeps b/src/build/android/gyp/apkbuilder.pydeps
new file mode 100644
index 0000000..e6122ed
--- /dev/null
+++ b/src/build/android/gyp/apkbuilder.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../gn_helpers.py
+apkbuilder.py
+finalize_apk.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/zipalign.py
diff --git a/src/build/android/gyp/assert_static_initializers.py b/src/build/android/gyp/assert_static_initializers.py
new file mode 100755
index 0000000..31f2a77
--- /dev/null
+++ b/src/build/android/gyp/assert_static_initializers.py
@@ -0,0 +1,186 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks the number of static initializers in an APK's library."""
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
+                                              'tools', 'linux',
+                                              'dump-static-initializers.py')
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+  return subprocess.check_output([tool_prefix + 'readelf'] + options +
+                                 [so_path]).decode('utf8')
+
+
+def _ParseLibBuildId(so_path, tool_prefix):
+  """Returns the Build ID of the given native library."""
+  stdout = _RunReadelf(so_path, ['-n'], tool_prefix)
+  match = re.search(r'Build ID: (\w+)', stdout)
+  return match.group(1) if match else None
+
+
+def _VerifyLibBuildIdsMatch(tool_prefix, *so_files):
+  if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1:
+    raise Exception('Found differing build ids in output directory and apk. '
+                    'Your output directory is likely stale.')
+
+
+def _GetStaticInitializers(so_path, tool_prefix):
+  output = subprocess.check_output(
+      [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix])
+  summary = re.search(r'Found \d+ static initializers in (\d+) files.', output)
+  return output.splitlines()[:-1], int(summary.group(1))
+
+
+def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
+  lib_name = os.path.basename(apk_so_name).replace('crazy.', '')
+  so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+  if not os.path.exists(so_with_symbols_path):
+    raise Exception('Unstripped .so not found. Looked here: %s',
+                    so_with_symbols_path)
+  _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path)
+  sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix)
+  for si in sis:
+    print(si)
+
+
+# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+def _ReadInitArray(so_path, tool_prefix, expect_no_initializers):
+  stdout = _RunReadelf(so_path, ['-SW'], tool_prefix)
+  # Matches: .init_array INIT_ARRAY 000000000516add0 5169dd0 000010 00 WA 0 0 8
+  match = re.search(r'\.init_array.*$', stdout, re.MULTILINE)
+  if expect_no_initializers:
+    if match:
+      raise Exception(
+          'Expected no initializers for %s, yet some were found' % so_path)
+    else:
+      return 0
+  elif not match:
+    raise Exception('Did not find section: .init_array in {}:\n{}'.format(
+        so_path, stdout))
+  size_str = re.split(r'\W+', match.group(0))[5]
+  return int(size_str, 16)
+
+
+def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers):
+  # Find the number of files with at least one static initializer.
+  # First determine if we're 32 or 64 bit
+  stdout = _RunReadelf(so_path, ['-h'], tool_prefix)
+  elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0)
+  elf_class = re.split(r'\W+', elf_class_line)[1]
+  if elf_class == 'ELF32':
+    word_size = 4
+  else:
+    word_size = 8
+
+  # Then find the number of files with global static initializers.
+  # NOTE: this is very implementation-specific and makes assumptions
+  # about how compiler and linker implement global static initializers.
+  init_array_size = _ReadInitArray(so_path, tool_prefix, expect_no_initializers)
+  return init_array_size / word_size
+
+
+def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir,
+                               ignored_libs, no_initializers_libs):
+  # Static initializer counting mostly copies logic in
+  # infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+  with zipfile.ZipFile(apk_or_aab) as z:
+    so_files = [
+        f for f in z.infolist() if f.filename.endswith('.so')
+        and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
+    ]
+    # Skip checking static initializers for secondary abi libs. They will be
+    # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so
+    # files in the output directory in 64 bit builds.
+    has_64 = any('64' in f.filename for f in so_files)
+    files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+
+    # Do not check partitioned libs. They have no ".init_array" section since
+    # all SIs are considered "roots" by the linker, and so end up in the base
+    # module.
+    files_to_check = [
+        f for f in files_to_check if not f.filename.endswith('_partition.so')
+    ]
+
+    si_count = 0
+    for f in files_to_check:
+      lib_basename = os.path.basename(f.filename)
+      expect_no_initializers = lib_basename in no_initializers_libs
+      with tempfile.NamedTemporaryFile(prefix=lib_basename) as temp:
+        temp.write(z.read(f))
+        temp.flush()
+        si_count += _CountStaticInitializers(temp.name, tool_prefix,
+                                             expect_no_initializers)
+        if dump_sis:
+          # Print count and list of SIs reported by dump-static-initializers.py.
+          # Doesn't work well on all archs (particularly arm), which is why
+          # the readelf method is used for tracking SI counts.
+          _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix)
+  return si_count
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--touch', help='File to touch upon success')
+  parser.add_argument('--tool-prefix', required=True,
+                      help='Prefix for nm and friends')
+  parser.add_argument('--expected-count', required=True, type=int,
+                      help='Fail if number of static initializers is not '
+                           'equal to this value.')
+  parser.add_argument('apk_or_aab', help='Path to .apk or .aab file.')
+  args = parser.parse_args()
+
+  # TODO(crbug.com/838414): add support for files included via loadable_modules.
+  ignored_libs = {
+      'libarcore_sdk_c.so', 'libcrashpad_handler_trampoline.so',
+      'libsketchology_native.so'
+  }
+  # The chromium linker doesn't have static initializers, which makes the
+  # regular check throw. It should not have any.
+  no_initializers_libs = ['libchromium_android_linker.so']
+
+  si_count = _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix,
+                                        False, '.', ignored_libs,
+                                        no_initializers_libs)
+  if si_count != args.expected_count:
+    print('Expected {} static initializers, but found {}.'.format(
+        args.expected_count, si_count))
+    if args.expected_count > si_count:
+      print('You have removed one or more static initializers. Thanks!')
+      print('To fix the build, update the expectation in:')
+      print('    //chrome/android/static_initializers.gni')
+    else:
+      print('Dumping static initializers via dump-static-initializers.py:')
+      sys.stdout.flush()
+      _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.',
+                                 ignored_libs, no_initializers_libs)
+      print()
+      print('If the above list is not useful, consider listing them with:')
+      print('    //tools/binary_size/diagnose_bloat.py')
+      print()
+      print('For more information:')
+      print('    https://chromium.googlesource.com/chromium/src/+/master/docs/'
+            'static_initializers.md')
+    sys.exit(1)
+
+  if args.touch:
+    open(args.touch, 'w')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/assert_static_initializers.pydeps b/src/build/android/gyp/assert_static_initializers.pydeps
new file mode 100644
index 0000000..b574d81
--- /dev/null
+++ b/src/build/android/gyp/assert_static_initializers.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py
+../../gn_helpers.py
+assert_static_initializers.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/bundletool.py b/src/build/android/gyp/bundletool.py
new file mode 100755
index 0000000..dc9b86a
--- /dev/null
+++ b/src/build/android/gyp/bundletool.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple wrapper around the bundletool tool.
+
+Bundletool is distributed as a versioned jar file. This script abstracts the
+location and version of this jar file, as well as the JVM invokation."""
+
+import logging
+import os
+import sys
+
+from util import build_utils
+
+# Assume this is stored under build/android/gyp/
+BUNDLETOOL_DIR = os.path.abspath(os.path.join(
+    __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
+    'bundletool'))
+
+BUNDLETOOL_VERSION = '1.4.0'
+
+BUNDLETOOL_JAR_PATH = os.path.join(
+    BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+
+
+def RunBundleTool(args, warnings_as_errors=(), print_stdout=False):
+  # Use () instead of None because command-line flags are None by default.
+  verify = warnings_as_errors == () or warnings_as_errors
+  # ASAN builds failed with the default of 1GB (crbug.com/1120202).
+  # Bug for bundletool: https://issuetracker.google.com/issues/165911616
+  cmd = build_utils.JavaCmd(verify, xmx='4G')
+  cmd += ['-jar', BUNDLETOOL_JAR_PATH]
+  cmd += args
+  logging.debug(' '.join(cmd))
+  return build_utils.CheckOutput(
+      cmd,
+      print_stdout=print_stdout,
+      print_stderr=True,
+      fail_on_output=False,
+      stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
+
+
+if __name__ == '__main__':
+  RunBundleTool(sys.argv[1:], print_stdout=True)
diff --git a/src/build/android/gyp/bytecode_processor.py b/src/build/android/gyp/bytecode_processor.py
new file mode 100755
index 0000000..d77f159
--- /dev/null
+++ b/src/build/android/gyp/bytecode_processor.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps bin/helper/bytecode_processor and expands @FileArgs."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import server_utils
+
+
+def _AddSwitch(parser, val):
+  parser.add_argument(
+      val, action='store_const', default='--disabled', const=val)
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--target-name', help='Fully qualified GN target name.')
+  parser.add_argument('--script', required=True,
+                      help='Path to the java binary wrapper script.')
+  parser.add_argument('--gn-target', required=True)
+  parser.add_argument('--input-jar', required=True)
+  parser.add_argument('--direct-classpath-jars')
+  parser.add_argument('--sdk-classpath-jars')
+  parser.add_argument('--full-classpath-jars')
+  parser.add_argument('--full-classpath-gn-targets')
+  parser.add_argument('--stamp')
+  parser.add_argument('-v', '--verbose', action='store_true')
+  parser.add_argument('--missing-classes-allowlist')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  _AddSwitch(parser, '--is-prebuilt')
+  args = parser.parse_args(argv)
+
+  if server_utils.MaybeRunCommand(name=args.target_name,
+                                  argv=sys.argv,
+                                  stamp_file=args.stamp):
+    return
+
+  args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+  args.direct_classpath_jars = build_utils.ParseGnList(
+      args.direct_classpath_jars)
+  args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
+  args.full_classpath_gn_targets = build_utils.ParseGnList(
+      args.full_classpath_gn_targets)
+  args.missing_classes_allowlist = build_utils.ParseGnList(
+      args.missing_classes_allowlist)
+
+  verbose = '--verbose' if args.verbose else '--not-verbose'
+
+  cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt]
+  cmd += [str(len(args.missing_classes_allowlist))]
+  cmd += args.missing_classes_allowlist
+  cmd += [str(len(args.sdk_classpath_jars))]
+  cmd += args.sdk_classpath_jars
+  cmd += [str(len(args.direct_classpath_jars))]
+  cmd += args.direct_classpath_jars
+  cmd += [str(len(args.full_classpath_jars))]
+  cmd += args.full_classpath_jars
+  cmd += [str(len(args.full_classpath_gn_targets))]
+  cmd += args.full_classpath_gn_targets
+  build_utils.CheckOutput(cmd,
+                          print_stdout=True,
+                          fail_func=None,
+                          fail_on_output=args.warnings_as_errors)
+
+  if args.stamp:
+    build_utils.Touch(args.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/bytecode_processor.pydeps b/src/build/android/gyp/bytecode_processor.pydeps
new file mode 100644
index 0000000..6105d93
--- /dev/null
+++ b/src/build/android/gyp/bytecode_processor.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../gn_helpers.py
+bytecode_processor.py
+util/__init__.py
+util/build_utils.py
+util/server_utils.py
diff --git a/src/build/android/gyp/bytecode_rewriter.py b/src/build/android/gyp/bytecode_rewriter.py
new file mode 100755
index 0000000..ad232df
--- /dev/null
+++ b/src/build/android/gyp/bytecode_rewriter.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper script around ByteCodeRewriter subclass scripts."""
+
+import argparse
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--script',
+                      required=True,
+                      help='Path to the java binary wrapper script.')
+  parser.add_argument('--classpath', action='append', nargs='+')
+  parser.add_argument('--input-jar', required=True)
+  parser.add_argument('--output-jar', required=True)
+  args = parser.parse_args(argv)
+
+  classpath = build_utils.ParseGnList(args.classpath)
+  build_utils.WriteDepfile(args.depfile, args.output_jar, inputs=classpath)
+
+  classpath.append(args.input_jar)
+  cmd = [
+      args.script, '--classpath', ':'.join(classpath), args.input_jar,
+      args.output_jar
+  ]
+  build_utils.CheckOutput(cmd, print_stdout=True)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/bytecode_rewriter.pydeps b/src/build/android/gyp/bytecode_rewriter.pydeps
new file mode 100644
index 0000000..b8f304a
--- /dev/null
+++ b/src/build/android/gyp/bytecode_rewriter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py
+../../gn_helpers.py
+bytecode_rewriter.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/check_flag_expectations.py b/src/build/android/gyp/check_flag_expectations.py
new file mode 100755
index 0000000..22da211
--- /dev/null
+++ b/src/build/android/gyp/check_flag_expectations.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+
+from util import build_utils
+from util import diff_utils
+
+IGNORE_FLAG_PREFIXES = [
+    # For cflags.
+    '-DANDROID_NDK_VERSION_ROLL',
+    '-DCR_LIBCXX_REVISION',
+    '-I',
+    '-g',
+    '-fcrash-diagnostics-dir=',
+    '-fprofile',
+    '--no-system-header-prefix',
+    '--system-header-prefix',
+    '-isystem',
+    '-iquote',
+    '-fmodule-map',
+    '-frandom-seed',
+    '-c ',
+    '-o ',
+    '-fmodule-name=',
+    '--sysroot=',
+    '-fcolor-diagnostics',
+    '-MF ',
+    '-MD',
+
+    # For ldflags.
+    '-Wl,--thinlto-cache-dir',
+    '-Wl,--thinlto-cache-policy',
+    '-Wl,--thinlto-jobs',
+    '-Wl,--start-lib',
+    '-Wl,--end-lib',
+    '-Wl,-whole-archive',
+    '-Wl,-no-whole-archive',
+    '-l',
+    '-L',
+    '-Wl,-soname',
+    '-Wl,-version-script',
+    '-Wl,--version-script',
+    '-fdiagnostics-color',
+    '-Wl,--color-diagnostics',
+    '-B',
+    '-Wl,--dynamic-linker',
+    '-DCR_CLANG_REVISION=',
+]
+
+FLAGS_WITH_PARAMS = (
+    '-Xclang',
+    '-mllvm',
+    '-Xclang -fdebug-compilation-dir',
+    '-Xclang -add-plugin',
+)
+
+
+def KeepFlag(flag):
+  return not any(flag.startswith(prefix) for prefix in IGNORE_FLAG_PREFIXES)
+
+
+def MergeFlags(flags):
+  flags = _MergeFlagsHelper(flags)
+  # For double params eg: -Xclang -fdebug-compilation-dir
+  flags = _MergeFlagsHelper(flags)
+  return flags
+
+
+def _MergeFlagsHelper(flags):
+  merged_flags = []
+  while flags:
+    current_flag = flags.pop(0)
+    if flags:
+      next_flag = flags[0]
+    else:
+      next_flag = None
+    merge_flags = False
+
+    # Special case some flags that always come with params.
+    if current_flag in FLAGS_WITH_PARAMS:
+      merge_flags = True
+    # Assume flags without '-' are a param.
+    if next_flag and not next_flag.startswith('-'):
+      merge_flags = True
+    # Special case -plugin-arg prefix because it has the plugin name.
+    if current_flag.startswith('-Xclang -plugin-arg'):
+      merge_flags = True
+    if merge_flags:
+      merged_flag = '{} {}'.format(current_flag, next_flag)
+      merged_flags.append(merged_flag)
+      flags.pop(0)
+    else:
+      merged_flags.append(current_flag)
+  return merged_flags
+
+
+def ParseFlags(flag_file_path):
+  flags = []
+  with open(flag_file_path) as f:
+    for flag in f.read().splitlines():
+      if KeepFlag(flag):
+        flags.append(flag)
+  return flags
+
+
+def main():
+  """Compare the flags with the checked in list."""
+  parser = argparse.ArgumentParser()
+  diff_utils.AddCommandLineFlags(parser)
+  parser.add_argument('--current-flags',
+                      help='Path to flags to check against expectations.')
+  options = parser.parse_args()
+
+  flags = ParseFlags(options.current_flags)
+  flags = MergeFlags(flags)
+
+  msg = """
+This expectation file is meant to inform the build team about changes to
+flags used when building native libraries in chrome (most importantly any
+that relate to security). This is to ensure the flags are replicated when
+building native libraries outside of the repo. Please update the .expected
+files and a WATCHLIST entry will alert the build team to your change."""
+  diff_utils.CheckExpectations('\n'.join(sorted(flags)),
+                               options,
+                               custom_msg=msg)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/check_flag_expectations.pydeps b/src/build/android/gyp/check_flag_expectations.pydeps
new file mode 100644
index 0000000..d8c394a
--- /dev/null
+++ b/src/build/android/gyp/check_flag_expectations.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py
+../../gn_helpers.py
+check_flag_expectations.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
diff --git a/src/build/android/gyp/compile_java.py b/src/build/android/gyp/compile_java.py
new file mode 100755
index 0000000..2a92842
--- /dev/null
+++ b/src/build/android/gyp/compile_java.py
@@ -0,0 +1,756 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import multiprocessing
+import optparse
+import os
+import re
+import shutil
+import sys
+import time
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import jar_info_utils
+from util import server_utils
+
+sys.path.insert(
+    0,
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src'))
+import colorama
+
+_JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party',
+                                'android_prebuilts', 'build_tools', 'common',
+                                'framework', 'javac_extractor.jar')
+
+# Add a check here to cause the suggested fix to be applied while compiling.
+# Use this when trying to enable more checks.
+ERRORPRONE_CHECKS_TO_APPLY = []
+
+# Full list of checks: https://errorprone.info/bugpatterns
+ERRORPRONE_WARNINGS_TO_DISABLE = [
+    # The following are super useful, but existing issues need to be fixed first
+    # before they can start failing the build on new errors.
+    'InvalidParam',
+    'InvalidLink',
+    'InvalidInlineTag',
+    'EmptyBlockTag',
+    'PublicConstructorForAbstractClass',
+    'InvalidBlockTag',
+    'StaticAssignmentInConstructor',
+    'MutablePublicArray',
+    'UnescapedEntity',
+    'NonCanonicalType',
+    'AlmostJavadoc',
+    # TODO(crbug.com/834807): Follow steps in bug
+    'DoubleBraceInitialization',
+    # TODO(crbug.com/834790): Follow steps in bug.
+    'CatchAndPrintStackTrace',
+    # TODO(crbug.com/801210): Follow steps in bug.
+    'SynchronizeOnNonFinalField',
+    # TODO(crbug.com/802073): Follow steps in bug.
+    'TypeParameterUnusedInFormals',
+    # TODO(crbug.com/803484): Follow steps in bug.
+    'CatchFail',
+    # TODO(crbug.com/803485): Follow steps in bug.
+    'JUnitAmbiguousTestClass',
+    # Android platform default is always UTF-8.
+    # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
+    'DefaultCharset',
+    # Low priority since the alternatives still work.
+    'JdkObsolete',
+    # We don't use that many lambdas.
+    'FunctionalInterfaceClash',
+    # There are lots of times when we just want to post a task.
+    'FutureReturnValueIgnored',
+    # Nice to be explicit about operators, but not necessary.
+    'OperatorPrecedence',
+    # Just false positives in our code.
+    'ThreadJoinLoop',
+    # Low priority corner cases with String.split.
+    # Linking Guava and using Splitter was rejected
+    # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630.
+    'StringSplitter',
+    # Preferred to use another method since it propagates exceptions better.
+    'ClassNewInstance',
+    # Nice to have static inner classes but not necessary.
+    'ClassCanBeStatic',
+    # Explicit is better than implicit.
+    'FloatCast',
+    # Results in false positives.
+    'ThreadLocalUsage',
+    # Also just false positives.
+    'Finally',
+    # False positives for Chromium.
+    'FragmentNotInstantiable',
+    # Low priority to fix.
+    'HidingField',
+    # Low priority.
+    'IntLongMath',
+    # Low priority.
+    'BadComparable',
+    # Low priority.
+    'EqualsHashCode',
+    # Nice to fix but low priority.
+    'TypeParameterShadowing',
+    # Good to have immutable enums, also low priority.
+    'ImmutableEnumChecker',
+    # False positives for testing.
+    'InputStreamSlowMultibyteRead',
+    # Nice to have better primitives.
+    'BoxedPrimitiveConstructor',
+    # Not necessary for tests.
+    'OverrideThrowableToString',
+    # Nice to have better type safety.
+    'CollectionToArraySafeParameter',
+    # Makes logcat debugging more difficult, and does not provide obvious
+    # benefits in the Chromium codebase.
+    'ObjectToString',
+    # Triggers on private methods that are @CalledByNative.
+    'UnusedMethod',
+    # Triggers on generated R.java files.
+    'UnusedVariable',
+    # Not that useful.
+    'UnsafeReflectiveConstructionCast',
+    # Not that useful.
+    'MixedMutabilityReturnType',
+    # Nice to have.
+    'EqualsGetClass',
+    # A lot of false-positives from CharSequence.equals().
+    'UndefinedEquals',
+    # Nice to have.
+    'ExtendingJUnitAssert',
+    # Nice to have.
+    'SystemExitOutsideMain',
+    # Nice to have.
+    'TypeParameterNaming',
+    # Nice to have.
+    'UnusedException',
+    # Nice to have.
+    'UngroupedOverloads',
+    # Nice to have.
+    'FunctionalInterfaceClash',
+    # Nice to have.
+    'InconsistentOverloads',
+    # Dagger generated code triggers this.
+    'SameNameButDifferent',
+    # Nice to have.
+    'UnnecessaryLambda',
+    # Nice to have.
+    'UnnecessaryAnonymousClass',
+    # Nice to have.
+    'LiteProtoToString',
+    # Nice to have.
+    'MissingSummary',
+    # Nice to have.
+    'ReturnFromVoid',
+    # Nice to have.
+    'EmptyCatch',
+    # Nice to have.
+    'BadImport',
+    # Nice to have.
+    'UseCorrectAssertInTests',
+    # Nice to have.
+    'InlineFormatString',
+    # Nice to have.
+    'DefaultPackage',
+    # Must be off since we are now passing in annotation processor generated
+    # code as a source jar (deduplicating work with turbine).
+    'RefersToDaggerCodegen',
+    # We already have presubmit checks for this. Not necessary to warn on
+    # every build.
+    'RemoveUnusedImports',
+    # We do not care about unnecessary parenthesis enough to check for them.
+    'UnnecessaryParentheses',
+]
+
+# Full list of checks: https://errorprone.info/bugpatterns
+# Only those marked as "experimental" need to be listed here in order to be
+# enabled.
+ERRORPRONE_WARNINGS_TO_ENABLE = [
+    'BinderIdentityRestoredDangerously',
+    'EmptyIf',
+    'EqualsBrokenForNull',
+    'InvalidThrows',
+    'LongLiteralLowerCaseSuffix',
+    'MultiVariableDeclaration',
+    'ParameterNotNullable',
+    'RedundantOverride',
+    'StaticQualifiedUsingExpression',
+    'StringEquality',
+    'TimeUnitMismatch',
+    'UnnecessaryStaticImport',
+    'UseBinds',
+    'WildcardImport',
+]
+
+
+def ProcessJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(fileline_prefix +
+                          r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(fileline_prefix +
+                        r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  # These warnings cannot be suppressed even for third party code. Deprecation
+  # warnings especially do not help since we must support older android version.
+  deprecated_re = re.compile(
+      r'(Note: .* uses? or overrides? a deprecated API.)$')
+  unchecked_re = re.compile(
+      r'(Note: .* uses? unchecked or unsafe operations.)$')
+  recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$')
+
+  activity_re = re.compile(r'^(?P<prefix>\s*location: )class Activity$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET +
+            colorama.Style.RESET_ALL + line[end:])
+
+  def ApplyFilters(line):
+    return not (deprecated_re.match(line) or unchecked_re.match(line)
+                or recompile_re.match(line))
+
+  def Elaborate(line):
+    if activity_re.match(line):
+      prefix = ' ' * activity_re.match(line).end('prefix')
+      return '{}\n{}Expecting a FragmentActivity? See {}'.format(
+          line, prefix, 'docs/ui/android/bytecode_rewriting.md')
+    return line
+
+  def ApplyColors(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  lines = (l for l in output.split('\n') if ApplyFilters(l))
+  lines = (ApplyColors(Elaborate(l)) for l in lines)
+  return '\n'.join(lines)
+
+
+def _ParsePackageAndClassNames(java_file):
+  package_name = ''
+  class_names = []
+  with open(java_file) as f:
+    for l in f:
+      # Strip unindented comments.
+      # Considers a leading * as a continuation of a multi-line comment (our
+      # linter doesn't enforce a space before it like there should be).
+      l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+
+      m = re.match(r'package\s+(.*?);', l)
+      if m and not package_name:
+        package_name = m.group(1)
+
+      # Not exactly a proper parser, but works for sources that Chrome uses.
+      # In order to not match nested classes, it just checks for lack of indent.
+      m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l)
+      if m:
+        class_names.append(m.group(1))
+  return package_name, class_names
+
+
+def _ProcessJavaFileForInfo(java_file):
+  package_name, class_names = _ParsePackageAndClassNames(java_file)
+  return java_file, package_name, class_names
+
+
+class _InfoFileContext(object):
+  """Manages the creation of the class->source file .info file."""
+
+  def __init__(self, chromium_code, excluded_globs):
+    self._chromium_code = chromium_code
+    self._excluded_globs = excluded_globs
+    # Map of .java path -> .srcjar/nested/path.java.
+    self._srcjar_files = {}
+    # List of generators from pool.imap_unordered().
+    self._results = []
+    # Lazily created multiprocessing.Pool.
+    self._pool = None
+
+  def AddSrcJarSources(self, srcjar_path, extracted_paths, parent_dir):
+    for path in extracted_paths:
+      # We want the path inside the srcjar so the viewer can have a tree
+      # structure.
+      self._srcjar_files[path] = '{}/{}'.format(
+          srcjar_path, os.path.relpath(path, parent_dir))
+
+  def SubmitFiles(self, java_files):
+    if self._pool is None:
+      # Restrict to just one process to not slow down compiling. Compiling
+      # is always slower.
+      self._pool = multiprocessing.Pool(1)
+    logging.info('Submitting %d files for info', len(java_files))
+    self._results.append(
+        self._pool.imap_unordered(
+            _ProcessJavaFileForInfo, java_files, chunksize=1000))
+
+  def _CheckPathMatchesClassName(self, java_file, package_name, class_name):
+    parts = package_name.split('.') + [class_name + '.java']
+    expected_path_suffix = os.path.sep.join(parts)
+    if not java_file.endswith(expected_path_suffix):
+      raise Exception(('Java package+class name do not match its path.\n'
+                       'Actual path: %s\nExpected path: %s') %
+                      (java_file, expected_path_suffix))
+
+  def _ProcessInfo(self, java_file, package_name, class_names, source):
+    for class_name in class_names:
+      yield '{}.{}'.format(package_name, class_name)
+      # Skip aidl srcjars since they don't indent code correctly.
+      if '_aidl.srcjar' in source:
+        continue
+      assert not self._chromium_code or len(class_names) == 1, (
+          'Chromium java files must only have one class: {}'.format(source))
+      if self._chromium_code:
+        # This check is not necessary but nice to check this somewhere.
+        self._CheckPathMatchesClassName(java_file, package_name, class_names[0])
+
+  def _ShouldIncludeInJarInfo(self, fully_qualified_name):
+    name_as_class_glob = fully_qualified_name.replace('.', '/') + '.class'
+    return not build_utils.MatchesGlob(name_as_class_glob, self._excluded_globs)
+
+  def _Collect(self):
+    if self._pool is None:
+      return {}
+    ret = {}
+    for result in self._results:
+      for java_file, package_name, class_names in result:
+        source = self._srcjar_files.get(java_file, java_file)
+        for fully_qualified_name in self._ProcessInfo(java_file, package_name,
+                                                      class_names, source):
+          if self._ShouldIncludeInJarInfo(fully_qualified_name):
+            ret[fully_qualified_name] = java_file
+    self._pool.terminate()
+    return ret
+
+  def __del__(self):
+    # Work around for Python 2.x bug with multiprocessing and daemon threads:
+    # https://bugs.python.org/issue4106
+    if self._pool is not None:
+      logging.info('Joining multiprocessing.Pool')
+      self._pool.terminate()
+      self._pool.join()
+      logging.info('Done.')
+
+  def Commit(self, output_path):
+    """Writes a .jar.info file.
+
+    Maps fully qualified names for classes to either the java file that they
+    are defined in or the path of the srcjar that they came from.
+    """
+    logging.info('Collecting info file entries')
+    entries = self._Collect()
+
+    logging.info('Writing info file: %s', output_path)
+    with build_utils.AtomicOutput(output_path, mode='wb') as f:
+      jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files)
+    logging.info('Completed info file: %s', output_path)
+
+
+def _CreateJarFile(jar_path, service_provider_configuration_dir,
+                   additional_jar_files, classes_dir):
+  logging.info('Start creating jar file: %s', jar_path)
+  with build_utils.AtomicOutput(jar_path) as f:
+    with zipfile.ZipFile(f.name, 'w') as z:
+      build_utils.ZipDir(z, classes_dir)
+      if service_provider_configuration_dir:
+        config_files = build_utils.FindInDirectory(
+            service_provider_configuration_dir)
+        for config_file in config_files:
+          zip_path = os.path.relpath(config_file,
+                                     service_provider_configuration_dir)
+          build_utils.AddToZipHermetic(z, zip_path, src_path=config_file)
+
+      if additional_jar_files:
+        for src_path, zip_path in additional_jar_files:
+          build_utils.AddToZipHermetic(z, zip_path, src_path=src_path)
+  logging.info('Completed jar file: %s', jar_path)
+
+
+def _OnStaleMd5(options, javac_cmd, javac_args, java_files):
+  logging.info('Starting _OnStaleMd5')
+  if options.enable_kythe_annotations:
+    # Kythe requires those env variables to be set and compile_java.py does the
+    # same
+    if not os.environ.get('KYTHE_ROOT_DIRECTORY') or \
+        not os.environ.get('KYTHE_OUTPUT_DIRECTORY'):
+      raise Exception('--enable-kythe-annotations requires '
+                      'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY '
+                      'environment variables to be set.')
+    javac_extractor_cmd = build_utils.JavaCmd() + [
+        '-jar',
+        _JAVAC_EXTRACTOR,
+    ]
+    try:
+      _RunCompiler(options, javac_extractor_cmd + javac_args, java_files,
+                   options.classpath, options.jar_path + '.javac_extractor',
+                   save_outputs=False),
+    except build_utils.CalledProcessError as e:
+      # Having no index for particular target is better than failing entire
+      # codesearch. Log and error and move on.
+      logging.error('Could not generate kzip: %s', e)
+
+  # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+  # rules run both in parallel, with Error Prone only used for checks.
+  _RunCompiler(options,
+               javac_cmd + javac_args,
+               java_files,
+               options.classpath,
+               options.jar_path,
+               save_outputs=not options.enable_errorprone)
+  logging.info('Completed all steps in _OnStaleMd5')
+
+
+def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path,
+                 save_outputs=True):
+  logging.info('Starting _RunCompiler')
+
+  # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+  # rules run both in parallel, with Error Prone only used for checks.
+  save_outputs = not options.enable_errorprone
+
+  # Use jar_path's directory to ensure paths are relative (needed for goma).
+  temp_dir = jar_path + '.staging'
+  shutil.rmtree(temp_dir, True)
+  os.makedirs(temp_dir)
+  try:
+    classes_dir = os.path.join(temp_dir, 'classes')
+    service_provider_configuration = os.path.join(
+        temp_dir, 'service_provider_configuration')
+
+    if save_outputs:
+      input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars')
+      annotation_processor_outputs_dir = os.path.join(
+          options.generated_dir, 'annotation_processor_outputs')
+      # Delete any stale files in the generated directory. The purpose of
+      # options.generated_dir is for codesearch.
+      shutil.rmtree(options.generated_dir, True)
+      info_file_context = _InfoFileContext(options.chromium_code,
+                                           options.jar_info_exclude_globs)
+    else:
+      input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars')
+      annotation_processor_outputs_dir = os.path.join(
+          temp_dir, 'annotation_processor_outputs')
+
+    if options.java_srcjars:
+      logging.info('Extracting srcjars to %s', input_srcjars_dir)
+      build_utils.MakeDirectory(input_srcjars_dir)
+      for srcjar in options.java_srcjars:
+        extracted_files = build_utils.ExtractAll(
+            srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java')
+        java_files.extend(extracted_files)
+        if save_outputs:
+          info_file_context.AddSrcJarSources(srcjar, extracted_files,
+                                             input_srcjars_dir)
+      logging.info('Done extracting srcjars')
+
+    if options.header_jar:
+      logging.info('Extracting service provider configs')
+      # Extract META-INF/services/* so that it can be copied into the output
+      # .jar
+      build_utils.ExtractAll(options.header_jar,
+                             no_clobber=True,
+                             path=service_provider_configuration,
+                             pattern='META-INF/services/*')
+      logging.info('Done extracting service provider configs')
+
+    if save_outputs and java_files:
+      info_file_context.SubmitFiles(java_files)
+
+    if java_files:
+      # Don't include the output directory in the initial set of args since it
+      # being in a temp dir makes it unstable (breaks md5 stamping).
+      cmd = list(javac_cmd)
+      os.makedirs(classes_dir)
+      cmd += ['-d', classes_dir]
+
+      if options.processors:
+        os.makedirs(annotation_processor_outputs_dir)
+        cmd += ['-s', annotation_processor_outputs_dir]
+
+      if classpath:
+        cmd += ['-classpath', ':'.join(classpath)]
+
+      # Pass source paths as response files to avoid extremely long command
+      # lines that are tedius to debug.
+      java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+      with open(java_files_rsp_path, 'w') as f:
+        f.write(' '.join(java_files))
+      cmd += ['@' + java_files_rsp_path]
+
+      logging.debug('Build command %s', cmd)
+      start = time.time()
+      build_utils.CheckOutput(cmd,
+                              print_stdout=options.chromium_code,
+                              stdout_filter=ProcessJavacOutput,
+                              stderr_filter=ProcessJavacOutput,
+                              fail_on_output=options.warnings_as_errors)
+      end = time.time() - start
+      logging.info('Java compilation took %ss', end)
+
+    if save_outputs:
+      if options.processors:
+        annotation_processor_java_files = build_utils.FindInDirectory(
+            annotation_processor_outputs_dir)
+        if annotation_processor_java_files:
+          info_file_context.SubmitFiles(annotation_processor_java_files)
+
+      _CreateJarFile(jar_path, service_provider_configuration,
+                     options.additional_jar_files, classes_dir)
+
+      info_file_context.Commit(jar_path + '.info')
+    else:
+      build_utils.Touch(jar_path)
+
+    logging.info('Completed all steps in _RunCompiler')
+  finally:
+    shutil.rmtree(temp_dir)
+
+
+def _ParseOptions(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--target-name', help='Fully qualified GN target name.')
+  parser.add_option('--skip-build-server',
+                    action='store_true',
+                    help='Avoid using the build server.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--generated-dir',
+      help='Subdirectory within target_gen_dir to place extracted srcjars and '
+      'annotation processor output for codesearch to find.')
+  parser.add_option(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_option(
+      '--java-version',
+      help='Java language version to use in -source and -target args to javac.')
+  parser.add_option('--classpath', action='append', help='Classpath to use.')
+  parser.add_option(
+      '--processors',
+      action='append',
+      help='GN list of annotation processor main classes.')
+  parser.add_option(
+      '--processorpath',
+      action='append',
+      help='GN list of jars that comprise the classpath used for Annotation '
+      'Processors.')
+  parser.add_option(
+      '--processor-arg',
+      dest='processor_args',
+      action='append',
+      help='key=value arguments for the annotation processors.')
+  parser.add_option(
+      '--additional-jar-file',
+      dest='additional_jar_files',
+      action='append',
+      help='Additional files to package into jar. By default, only Java .class '
+      'files are packaged into the jar. Files should be specified in '
+      'format <filename>:<path to be placed in jar>.')
+  parser.add_option(
+      '--jar-info-exclude-globs',
+      help='GN list of exclude globs to filter from generated .info files.')
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+  parser.add_option(
+      '--gomacc-path', help='When set, prefix javac command with gomacc')
+  parser.add_option(
+      '--errorprone-path', help='Use the Errorprone compiler at this path.')
+  parser.add_option(
+      '--enable-errorprone',
+      action='store_true',
+      help='Enable errorprone checks')
+  parser.add_option(
+      '--warnings-as-errors',
+      action='store_true',
+      help='Treat all warnings as errors.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option(
+      '--javac-arg',
+      action='append',
+      default=[],
+      help='Additional arguments to pass to javac.')
+  parser.add_option(
+      '--enable-kythe-annotations',
+      action='store_true',
+      help='Enable generation of Kythe kzip, used for codesearch. Ensure '
+      'proper environment variables are set before using this flag.')
+  parser.add_option(
+      '--header-jar',
+      help='This is the header jar for the current target that contains '
+      'META-INF/services/* files to be included in the output jar.')
+
+  options, args = parser.parse_args(argv)
+  build_utils.CheckOptions(options, parser, required=('jar_path', ))
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.processorpath = build_utils.ParseGnList(options.processorpath)
+  options.processors = build_utils.ParseGnList(options.processors)
+  options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+  options.jar_info_exclude_globs = build_utils.ParseGnList(
+      options.jar_info_exclude_globs)
+
+  additional_jar_files = []
+  for arg in options.additional_jar_files or []:
+    filepath, jar_filepath = arg.split(':')
+    additional_jar_files.append((filepath, jar_filepath))
+  options.additional_jar_files = additional_jar_files
+
+  java_files = []
+  for arg in args:
+    # Interpret a path prefixed with @ as a file containing a list of sources.
+    if arg.startswith('@'):
+      java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+    else:
+      java_files.append(arg)
+
+  return options, java_files
+
+
+def main(argv):
+  build_utils.InitLogging('JAVAC_DEBUG')
+  argv = build_utils.ExpandFileArgs(argv)
+  options, java_files = _ParseOptions(argv)
+
+  # Only use the build server for errorprone runs.
+  if (options.enable_errorprone and not options.skip_build_server
+      and server_utils.MaybeRunCommand(name=options.target_name,
+                                       argv=sys.argv,
+                                       stamp_file=options.jar_path)):
+    return
+
+  colorama.init()
+  javac_cmd = []
+  if options.gomacc_path:
+    javac_cmd.append(options.gomacc_path)
+  javac_cmd.append(build_utils.JAVAC_PATH)
+
+  javac_args = [
+      '-g',
+      # Chromium only allows UTF8 source files.  Being explicit avoids
+      # javac pulling a default encoding from the user's environment.
+      '-encoding',
+      'UTF-8',
+      # Prevent compiler from compiling .java files not listed as inputs.
+      # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+      '-sourcepath',
+      ':',
+  ]
+
+  if options.enable_errorprone:
+    # All errorprone args are passed space-separated in a single arg.
+    errorprone_flags = ['-Xplugin:ErrorProne']
+    # Make everything a warning so that when treat_warnings_as_errors is false,
+    # they do not fail the build.
+    errorprone_flags += ['-XepAllErrorsAsWarnings']
+    # Don't check generated files.
+    errorprone_flags += ['-XepDisableWarningsInGeneratedCode']
+    errorprone_flags.extend('-Xep:{}:OFF'.format(x)
+                            for x in ERRORPRONE_WARNINGS_TO_DISABLE)
+    errorprone_flags.extend('-Xep:{}:WARN'.format(x)
+                            for x in ERRORPRONE_WARNINGS_TO_ENABLE)
+
+    if ERRORPRONE_CHECKS_TO_APPLY:
+      errorprone_flags += [
+          '-XepPatchLocation:IN_PLACE',
+          '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY)
+      ]
+
+    javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)]
+
+    # This flag quits errorprone after checks and before code generation, since
+    # we do not need errorprone outputs, this speeds up errorprone by 4 seconds
+    # for chrome_java.
+    if not ERRORPRONE_CHECKS_TO_APPLY:
+      javac_args += ['-XDshould-stop.ifNoError=FLOW']
+
+  if options.java_version:
+    javac_args.extend([
+        '-source',
+        options.java_version,
+        '-target',
+        options.java_version,
+    ])
+  if options.java_version == '1.8':
+    # Android's boot jar doesn't contain all java 8 classes.
+    options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+  if options.processors:
+    javac_args.extend(['-processor', ','.join(options.processors)])
+  else:
+    # This effectively disables all annotation processors, even including
+    # annotation processors in service provider configuration files named
+    # META-INF/. See the following link for reference:
+    #     https://docs.oracle.com/en/java/javase/11/tools/javac.html
+    javac_args.extend(['-proc:none'])
+
+  if options.bootclasspath:
+    javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)])
+
+  if options.processorpath:
+    javac_args.extend(['-processorpath', ':'.join(options.processorpath)])
+  if options.processor_args:
+    for arg in options.processor_args:
+      javac_args.extend(['-A%s' % arg])
+
+  javac_args.extend(options.javac_arg)
+
+  classpath_inputs = (
+      options.bootclasspath + options.classpath + options.processorpath)
+
+  depfile_deps = classpath_inputs
+  # Files that are already inputs in GN should go in input_paths.
+  input_paths = depfile_deps + options.java_srcjars + java_files
+  if options.header_jar:
+    input_paths.append(options.header_jar)
+  input_paths += [x[0] for x in options.additional_jar_files]
+
+  output_paths = [options.jar_path]
+  if not options.enable_errorprone:
+    output_paths += [options.jar_path + '.info']
+
+  input_strings = javac_cmd + javac_args + options.classpath + java_files + [
+      options.warnings_as_errors, options.jar_info_exclude_globs
+  ]
+
+  # Keep md5_check since we plan to use its changes feature to implement a build
+  # speed improvement for non-signature compiles: https://crbug.com/1170778
+  md5_check.CallAndWriteDepfileIfStale(
+      lambda: _OnStaleMd5(options, javac_cmd, javac_args, java_files),
+      options,
+      depfile_deps=depfile_deps,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/compile_java.pydeps b/src/build/android/gyp/compile_java.pydeps
new file mode 100644
index 0000000..f14fd0b
--- /dev/null
+++ b/src/build/android/gyp/compile_java.pydeps
@@ -0,0 +1,16 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_java.pydeps build/android/gyp/compile_java.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../gn_helpers.py
+../../print_python_deps.py
+compile_java.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/server_utils.py
diff --git a/src/build/android/gyp/compile_resources.py b/src/build/android/gyp/compile_resources.py
new file mode 100755
index 0000000..8a668e7
--- /dev/null
+++ b/src/build/android/gyp/compile_resources.py
@@ -0,0 +1,1179 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import contextlib
+import filecmp
+import hashlib
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import diff_utils
+from util import manifest_utils
+from util import parallel
+from util import protoresources
+from util import resource_utils
+
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
+    # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+    r'.*star_gray\.png',
+    # Android requires pngs for 9-patch images.
+    r'.*\.9\.png',
+    # Daydream requires pngs for icon files.
+    r'.*daydream_icon_.*\.png'
+]))
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from argparse.ArgumentParser.parse_args()
+  """
+  parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+  input_opts.add_argument(
+      '--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
+  input_opts.add_argument(
+      '--android-manifest', required=True, help='AndroidManifest.xml path.')
+  input_opts.add_argument(
+      '--r-java-root-package-name',
+      default='base',
+      help='Short package name for this target\'s root R java file (ex. '
+      'input of "base" would become gen.base_module). Defaults to "base".')
+  group = input_opts.add_mutually_exclusive_group()
+  group.add_argument(
+      '--shared-resources',
+      action='store_true',
+      help='Make all resources in R.java non-final and allow the resource IDs '
+      'to be reset to a different package index when the apk is loaded by '
+      'another application at runtime.')
+  group.add_argument(
+      '--app-as-shared-lib',
+      action='store_true',
+      help='Same as --shared-resources, but also ensures all resource IDs are '
+      'directly usable from the APK loaded as an application.')
+
+  input_opts.add_argument(
+      '--package-id',
+      type=int,
+      help='Decimal integer representing custom package ID for resources '
+      '(instead of 127==0x7f). Cannot be used with --shared-resources.')
+
+  input_opts.add_argument(
+      '--package-name',
+      help='Package name that will be used to create R class.')
+
+  input_opts.add_argument(
+      '--rename-manifest-package', help='Package name to force AAPT to use.')
+
+  input_opts.add_argument(
+      '--arsc-package-name',
+      help='Package name to set in manifest of resources.arsc file. This is '
+      'only used for apks under test.')
+
+  input_opts.add_argument(
+      '--shared-resources-allowlist',
+      help='An R.txt file acting as a allowlist for resources that should be '
+      'non-final and have their package ID changed at runtime in R.java. '
+      'Implies and overrides --shared-resources.')
+
+  input_opts.add_argument(
+      '--shared-resources-allowlist-locales',
+      default='[]',
+      help='Optional GN-list of locales. If provided, all strings corresponding'
+      ' to this locale list will be kept in the final output for the '
+      'resources identified through --shared-resources-allowlist, even '
+      'if --locale-allowlist is being used.')
+
+  input_opts.add_argument(
+      '--use-resource-ids-path',
+      help='Use resource IDs generated by aapt --emit-ids.')
+
+  input_opts.add_argument(
+      '--extra-main-r-text-files',
+      help='Additional R.txt files that will be added to the root R.java file, '
+      'but not packaged in the generated resources.arsc. If these resources '
+      'entries contain duplicate resources with the generated R.txt file, they '
+      'must be identical.')
+
+  input_opts.add_argument(
+      '--support-zh-hk',
+      action='store_true',
+      help='Use zh-rTW resources for zh-rHK.')
+
+  input_opts.add_argument(
+      '--debuggable',
+      action='store_true',
+      help='Whether to add android:debuggable="true".')
+
+  input_opts.add_argument('--version-code', help='Version code for apk.')
+  input_opts.add_argument('--version-name', help='Version name for apk.')
+  input_opts.add_argument(
+      '--min-sdk-version', required=True, help='android:minSdkVersion for APK.')
+  input_opts.add_argument(
+      '--target-sdk-version',
+      required=True,
+      help="android:targetSdkVersion for APK.")
+  input_opts.add_argument(
+      '--max-sdk-version',
+      help="android:maxSdkVersion expected in AndroidManifest.xml.")
+  input_opts.add_argument(
+      '--manifest-package', help='Package name of the AndroidManifest.xml.')
+
+  input_opts.add_argument(
+      '--locale-allowlist',
+      default='[]',
+      help='GN list of languages to include. All other language configs will '
+      'be stripped out. List may include a combination of Android locales '
+      'or Chrome locales.')
+  input_opts.add_argument(
+      '--resource-exclusion-regex',
+      default='',
+      help='File-based filter for resources (applied before compiling)')
+  input_opts.add_argument(
+      '--resource-exclusion-exceptions',
+      default='[]',
+      help='GN list of globs that say which files to include even '
+      'when --resource-exclusion-regex is set.')
+
+  input_opts.add_argument(
+      '--dependencies-res-zip-overlays',
+      help='GN list with subset of --dependencies-res-zips to use overlay '
+      'semantics for.')
+
+  input_opts.add_argument(
+      '--values-filter-rules',
+      help='GN list of source_glob:regex for filtering resources after they '
+      'are compiled. Use this to filter out entries within values/ files.')
+
+  input_opts.add_argument('--png-to-webp', action='store_true',
+                          help='Convert png files to webp format.')
+
+  input_opts.add_argument('--webp-binary', default='',
+                          help='Path to the cwebp binary.')
+  input_opts.add_argument(
+      '--webp-cache-dir', help='The directory to store webp image cache.')
+
+  input_opts.add_argument(
+      '--no-xml-namespaces',
+      action='store_true',
+      help='Whether to strip xml namespaces from processed xml resources.')
+  input_opts.add_argument(
+      '--short-resource-paths',
+      action='store_true',
+      help='Whether to shorten resource paths inside the apk or module.')
+  input_opts.add_argument(
+      '--strip-resource-names',
+      action='store_true',
+      help='Whether to strip resource names from the resource table of the apk '
+      'or module.')
+
+  output_opts.add_argument('--arsc-path', help='Apk output for arsc format.')
+  output_opts.add_argument('--proto-path', help='Apk output for proto format.')
+  group = input_opts.add_mutually_exclusive_group()
+  group.add_argument(
+      '--optimized-arsc-path',
+      help='Output for `aapt2 optimize` for arsc format (enables the step).')
+  group.add_argument(
+      '--optimized-proto-path',
+      help='Output for `aapt2 optimize` for proto format (enables the step).')
+  input_opts.add_argument(
+      '--resources-config-paths',
+      default='[]',
+      help='GN list of paths to aapt2 resources config files.')
+
+  output_opts.add_argument(
+      '--info-path', help='Path to output info file for the partial apk.')
+
+  output_opts.add_argument(
+      '--srcjar-out',
+      required=True,
+      help='Path to srcjar to contain generated R.java.')
+
+  output_opts.add_argument('--r-text-out',
+                           help='Path to store the generated R.txt file.')
+
+  output_opts.add_argument(
+      '--proguard-file', help='Path to proguard.txt generated file.')
+
+  output_opts.add_argument(
+      '--proguard-file-main-dex',
+      help='Path to proguard.txt generated file for main dex.')
+
+  output_opts.add_argument(
+      '--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.')
+
+  output_opts.add_argument(
+      '--resources-path-map-out-path',
+      help='Path to file produced by aapt2 that maps original resource paths '
+      'to shortened resource paths inside the apk or module.')
+
+  input_opts.add_argument(
+      '--is-bundle-module',
+      action='store_true',
+      help='Whether resources are being generated for a bundle module.')
+
+  input_opts.add_argument(
+      '--uses-split',
+      help='Value to set uses-split to in the AndroidManifest.xml.')
+
+  input_opts.add_argument(
+      '--extra-verification-manifest',
+      help='Path to AndroidManifest.xml which should be merged into base '
+      'manifest when performing verification.')
+
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+
+  resource_utils.HandleCommonOptions(options)
+
+  options.locale_allowlist = build_utils.ParseGnList(options.locale_allowlist)
+  options.shared_resources_allowlist_locales = build_utils.ParseGnList(
+      options.shared_resources_allowlist_locales)
+  options.resource_exclusion_exceptions = build_utils.ParseGnList(
+      options.resource_exclusion_exceptions)
+  options.dependencies_res_zip_overlays = build_utils.ParseGnList(
+      options.dependencies_res_zip_overlays)
+  options.values_filter_rules = build_utils.ParseGnList(
+      options.values_filter_rules)
+  options.extra_main_r_text_files = build_utils.ParseGnList(
+      options.extra_main_r_text_files)
+  options.resources_config_paths = build_utils.ParseGnList(
+      options.resources_config_paths)
+
+  if options.optimized_proto_path and not options.proto_path:
+    # We could write to a temp file, but it's simpler to require it.
+    parser.error('--optimized-proto-path requires --proto-path')
+
+  if not options.arsc_path and not options.proto_path:
+    parser.error('One of --arsc-path or --proto-path is required.')
+
+  if options.resources_path_map_out_path and not options.short_resource_paths:
+    parser.error(
+        '--resources-path-map-out-path requires --short-resource-paths')
+
+  if options.package_id and options.shared_resources:
+    parser.error('--package-id and --shared-resources are mutually exclusive')
+
+  return options
+
+
+def _IterFiles(root_dir):
+  for root, _, files in os.walk(root_dir):
+    for f in files:
+      yield os.path.join(root, f)
+
+
+def _DuplicateZhResources(resource_dirs, path_info):
+  """Duplicate Taiwanese resources into Hong-Kong specific directory."""
+  for resource_dir in resource_dirs:
+    # We use zh-TW resources for zh-HK (if we have zh-TW resources).
+    for path in _IterFiles(resource_dir):
+      if 'zh-rTW' in path:
+        hk_path = path.replace('zh-rTW', 'zh-rHK')
+        build_utils.MakeDirectory(os.path.dirname(hk_path))
+        shutil.copyfile(path, hk_path)
+        path_info.RegisterRename(
+            os.path.relpath(path, resource_dir),
+            os.path.relpath(hk_path, resource_dir))
+
+
+def _RenameLocaleResourceDirs(resource_dirs, path_info):
+  """Rename locale resource directories into standard names when necessary.
+
+  This is necessary to deal with the fact that older Android releases only
+  support ISO 639-1 two-letter codes, and sometimes even obsolete versions
+  of them.
+
+  In practice it means:
+    * 3-letter ISO 639-2 qualifiers are renamed under a corresponding
+      2-letter one. E.g. for Filipino, strings under values-fil/ will be moved
+      to a new corresponding values-tl/ sub-directory.
+
+    * Modern ISO 639-1 codes will be renamed to their obsolete variant
+      for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/).
+
+    * Norwegian macrolanguage strings will be renamed to Bokmal (main
+      Norway language). See http://crbug.com/920960. In practice this
+      means that 'values-no/ -> values-nb/' unless 'values-nb/' already
+      exists.
+
+    * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1
+      locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS').
+      Though this is not necessary at the moment, because no third-party
+      package that Chromium links against uses these for the current list of
+      supported locales, this may change when the list is extended in the
+      future).
+
+  Args:
+    resource_dirs: list of top-level resource directories.
+  """
+  for resource_dir in resource_dirs:
+    for path in _IterFiles(resource_dir):
+      locale = resource_utils.FindLocaleInStringResourceFilePath(path)
+      if not locale:
+        continue
+      cr_locale = resource_utils.ToChromiumLocaleName(locale)
+      if not cr_locale:
+        continue  # Unsupported Android locale qualifier!?
+      locale2 = resource_utils.ToAndroidLocaleName(cr_locale)
+      if locale != locale2:
+        path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2)
+        if path == path2:
+          raise Exception('Could not substitute locale %s for %s in %s' %
+                          (locale, locale2, path))
+        if os.path.exists(path2):
+          # This happens sometimes, e.g. some libraries provide both
+          # values-nb/ and values-no/ with the same content.
+          continue
+        build_utils.MakeDirectory(os.path.dirname(path2))
+        shutil.move(path, path2)
+        path_info.RegisterRename(
+            os.path.relpath(path, resource_dir),
+            os.path.relpath(path2, resource_dir))
+
+
+def _ToAndroidLocales(locale_allowlist, support_zh_hk):
+  """Converts the list of Chrome locales to Android config locale qualifiers.
+
+  Args:
+    locale_allowlist: A list of Chromium locale names.
+    support_zh_hk: True if we need to support zh-HK by duplicating
+      the zh-TW strings.
+  Returns:
+    A set of matching Android config locale qualifier names.
+  """
+  ret = set()
+  for locale in locale_allowlist:
+    locale = resource_utils.ToAndroidLocaleName(locale)
+    if locale is None or ('-' in locale and '-r' not in locale):
+      raise Exception('Unsupported Chromium locale name: %s' % locale)
+    ret.add(locale)
+    # Always keep non-regional fall-backs.
+    language = locale.split('-')[0]
+    ret.add(language)
+
+  # We don't actually support zh-HK in Chrome on Android, but we mimic the
+  # native side behavior where we use zh-TW resources when the locale is set to
+  # zh-HK. See https://crbug.com/780847.
+  if support_zh_hk:
+    assert not any('HK' in l for l in locale_allowlist), (
+        'Remove special logic if zh-HK is now supported (crbug.com/780847).')
+    ret.add('zh-rHK')
+  return set(ret)
+
+
+def _MoveImagesToNonMdpiFolders(res_root, path_info):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not os.path.splitext(src_file_name)[1] in ('.png', '.webp', ''):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+      path_info.RegisterRename(
+          os.path.relpath(src_file, res_root),
+          os.path.relpath(dst_file, res_root))
+
+
+def _FixManifest(options, temp_dir, extra_manifest=None):
+  """Fix the APK's AndroidManifest.xml.
+
+  This adds any missing namespaces for 'android' and 'tools', and
+  sets certains elements like 'platformBuildVersionCode' or
+  'android:debuggable' depending on the content of |options|.
+
+  Args:
+    options: The command-line arguments tuple.
+    temp_dir: A temporary directory where the fixed manifest will be written to.
+    extra_manifest: Path to an AndroidManifest.xml file which will get merged
+        into the application node of the base manifest.
+  Returns:
+    Tuple of:
+     * Manifest path within |temp_dir|.
+     * Original package_name.
+  """
+  def maybe_extract_version(j):
+    try:
+      return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+    except build_utils.CalledProcessError:
+      return None
+
+  android_sdk_jars = [j for j in options.include_resources
+                      if os.path.basename(j) in ('android.jar',
+                                                 'android_system.jar')]
+  extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
+  successful_extractions = [x for x in extract_all if x]
+  if len(successful_extractions) == 0:
+    raise Exception(
+        'Unable to find android SDK jar among candidates: %s'
+            % ', '.join(android_sdk_jars))
+  elif len(successful_extractions) > 1:
+    raise Exception(
+        'Found multiple android SDK jars among candidates: %s'
+            % ', '.join(android_sdk_jars))
+  version_code, version_name = successful_extractions.pop()[:2]
+
+  debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+  doc, manifest_node, app_node = manifest_utils.ParseManifest(
+      options.android_manifest)
+
+  if extra_manifest:
+    _, extra_manifest_node, extra_app_node = manifest_utils.ParseManifest(
+        extra_manifest)
+    for node in extra_app_node:
+      app_node.append(node)
+    for node in extra_manifest_node:
+      # DFM manifests have a bunch of tags we don't care about inside
+      # <manifest>, so only take <queries>.
+      if node.tag == 'queries':
+        manifest_node.append(node)
+
+  manifest_utils.AssertUsesSdk(manifest_node, options.min_sdk_version,
+                               options.target_sdk_version)
+  # We explicitly check that maxSdkVersion is set in the manifest since we don't
+  # add it later like minSdkVersion and targetSdkVersion.
+  manifest_utils.AssertUsesSdk(
+      manifest_node,
+      max_sdk_version=options.max_sdk_version,
+      fail_if_not_exist=True)
+  manifest_utils.AssertPackage(manifest_node, options.manifest_package)
+
+  manifest_node.set('platformBuildVersionCode', version_code)
+  manifest_node.set('platformBuildVersionName', version_name)
+
+  orig_package = manifest_node.get('package')
+  if options.arsc_package_name:
+    manifest_node.set('package', options.arsc_package_name)
+
+  if options.debuggable:
+    app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'),
+                 'true')
+
+  if options.uses_split:
+    uses_split = ElementTree.SubElement(manifest_node, 'uses-split')
+    uses_split.set('{%s}name' % manifest_utils.ANDROID_NAMESPACE,
+                   options.uses_split)
+
+  # Make sure the min-sdk condition is not less than the min-sdk of the bundle.
+  for min_sdk_node in manifest_node.iter('{%s}min-sdk' %
+                                         manifest_utils.DIST_NAMESPACE):
+    dist_value = '{%s}value' % manifest_utils.DIST_NAMESPACE
+    if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version):
+      min_sdk_node.set(dist_value, options.min_sdk_version)
+
+  manifest_utils.SaveManifest(doc, debug_manifest_path)
+  return debug_manifest_path, orig_package
+
+
+def _CreateKeepPredicate(resource_exclusion_regex,
+                         resource_exclusion_exceptions):
+  """Return a predicate lambda to determine which resource files to keep.
+
+  Args:
+    resource_exclusion_regex: A regular expression describing all resources
+      to exclude, except if they are mip-maps, or if they are listed
+      in |resource_exclusion_exceptions|.
+    resource_exclusion_exceptions: A list of glob patterns corresponding
+      to exceptions to the |resource_exclusion_regex|.
+  Returns:
+    A lambda that takes a path, and returns true if the corresponding file
+    must be kept.
+  """
+  predicate = lambda path: os.path.basename(path)[0] != '.'
+  if resource_exclusion_regex == '':
+    # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+    return predicate
+
+  # A simple predicate that only removes (returns False for) paths covered by
+  # the exclusion regex or listed as exceptions.
+  return lambda path: (
+      not re.search(resource_exclusion_regex, path) or
+      build_utils.MatchesGlob(path, resource_exclusion_exceptions))
+
+
+def _ComputeSha1(path):
+  with open(path, 'rb') as f:
+    data = f.read()
+  return hashlib.sha1(data).hexdigest()
+
+
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+  sha1_hash = _ComputeSha1(png_path)
+
+  # The set of arguments that will appear in the cache key.
+  quality_args = ['-m', '6', '-q', '100', '-lossless']
+
+  webp_cache_path = os.path.join(
+      webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+                                        ''.join(quality_args)))
+  # No need to add .webp. Android can load images fine without them.
+  webp_path = os.path.splitext(png_path)[0]
+
+  cache_hit = os.path.exists(webp_cache_path)
+  if cache_hit:
+    os.link(webp_cache_path, webp_path)
+  else:
+    # We place the generated webp image to webp_path, instead of in the
+    # webp_cache_dir to avoid concurrency issues.
+    args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+    subprocess.check_call(args)
+
+    try:
+      os.link(webp_path, webp_cache_path)
+    except OSError:
+      # Because of concurrent run, a webp image may already exists in
+      # webp_cache_path.
+      pass
+
+  os.remove(png_path)
+  original_dir = os.path.dirname(os.path.dirname(png_path))
+  rename_tuple = (os.path.relpath(png_path, original_dir),
+                  os.path.relpath(webp_path, original_dir))
+  return rename_tuple, cache_hit
+
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+  cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+  shard_args = [(f, ) for f in png_paths
+                if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+  build_utils.MakeDirectory(webp_cache_dir)
+  results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+                                     shard_args,
+                                     cwebp_binary=cwebp_binary,
+                                     cwebp_version=cwebp_version,
+                                     webp_cache_dir=webp_cache_dir)
+  total_cache_hits = 0
+  for rename_tuple, cache_hit in results:
+    path_info.RegisterRename(*rename_tuple)
+    total_cache_hits += int(cache_hit)
+
+  logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
+
+
+def _RemoveImageExtensions(directory, path_info):
+  """Remove extensions from image files in the passed directory.
+
+  This reduces binary size but does not affect android's ability to load the
+  images.
+  """
+  for f in _IterFiles(directory):
+    if (f.endswith('.png') or f.endswith('.webp')) and not f.endswith('.9.png'):
+      path_with_extension = f
+      path_no_extension = os.path.splitext(path_with_extension)[0]
+      if path_no_extension != path_with_extension:
+        shutil.move(path_with_extension, path_no_extension)
+        path_info.RegisterRename(
+            os.path.relpath(path_with_extension, directory),
+            os.path.relpath(path_no_extension, directory))
+
+
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+                      partials_dir):
+  unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
+  partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
+
+  compile_command = [
+      aapt2_path,
+      'compile',
+      # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+      # '--no-crunch',
+      '--dir',
+      dep_subdir,
+      '-o',
+      partial_path
+  ]
+
+  # There are resources targeting API-versions lower than our minapi. For
+  # various reasons it's easier to let aapt2 ignore these than for us to
+  # remove them from our build (e.g. it's from a 3rd party library).
+  build_utils.CheckOutput(
+      compile_command,
+      stderr_filter=lambda output: build_utils.FilterLines(
+          output, r'ignoring configuration .* for (styleable|attribute)'))
+
+  # Filtering these files is expensive, so only apply filters to the partials
+  # that have been explicitly targeted.
+  if keep_predicate:
+    logging.debug('Applying .arsc filtering to %s', dep_subdir)
+    protoresources.StripUnwantedResources(partial_path, keep_predicate)
+  return partial_path
+
+
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
+  patterns = [
+      x[1] for x in exclusion_rules
+      if build_utils.MatchesGlob(dep_subdir, [x[0]])
+  ]
+  if not patterns:
+    return None
+
+  regexes = [re.compile(p) for p in patterns]
+  return lambda x: not any(r.search(x) for r in regexes)
+
+
+def _CompileDeps(aapt2_path, dep_subdirs, dep_subdir_overlay_set, temp_dir,
+                 exclusion_rules):
+  partials_dir = os.path.join(temp_dir, 'partials')
+  build_utils.MakeDirectory(partials_dir)
+
+  job_params = [(i, dep_subdir,
+                 _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+                for i, dep_subdir in enumerate(dep_subdirs)]
+
+  # Filtering is slow, so ensure jobs with keep_predicate are started first.
+  job_params.sort(key=lambda x: not x[2])
+  partials = list(
+      parallel.BulkForkAndCall(_CompileSingleDep,
+                               job_params,
+                               aapt2_path=aapt2_path,
+                               partials_dir=partials_dir))
+
+  partials_cmd = list()
+  for i, partial in enumerate(partials):
+    dep_subdir = job_params[i][1]
+    if dep_subdir in dep_subdir_overlay_set:
+      partials_cmd += ['-R']
+    partials_cmd += [partial]
+  return partials_cmd
+
+
+def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
+  for zip_file in dependencies_res_zips:
+    zip_info_file_path = zip_file + '.info'
+    if os.path.exists(zip_info_file_path):
+      path_info.MergeInfoFile(zip_info_file_path)
+  path_info.Write(info_path)
+
+
+def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
+  """Remove localized strings that should not go into the final output.
+
+  Args:
+    dep_subdirs: List of resource dependency directories.
+    options: Command-line options namespace.
+  """
+  # Collect locale and file paths from the existing subdirs.
+  # The following variable maps Android locale names to
+  # sets of corresponding xml file paths.
+  locale_to_files_map = collections.defaultdict(set)
+  for directory in dep_subdirs:
+    for f in _IterFiles(directory):
+      locale = resource_utils.FindLocaleInStringResourceFilePath(f)
+      if locale:
+        locale_to_files_map[locale].add(f)
+
+  all_locales = set(locale_to_files_map)
+
+  # Set A: wanted locales, either all of them or the
+  # list provided by --locale-allowlist.
+  wanted_locales = all_locales
+  if options.locale_allowlist:
+    wanted_locales = _ToAndroidLocales(options.locale_allowlist,
+                                       options.support_zh_hk)
+
+  # Set B: shared resources locales, which is either set A
+  # or the list provided by --shared-resources-allowlist-locales
+  shared_resources_locales = wanted_locales
+  shared_names_allowlist = set()
+  if options.shared_resources_allowlist_locales:
+    shared_names_allowlist = set(
+        resource_utils.GetRTxtStringResourceNames(
+            options.shared_resources_allowlist))
+
+    shared_resources_locales = _ToAndroidLocales(
+        options.shared_resources_allowlist_locales, options.support_zh_hk)
+
+  # Remove any file that belongs to a locale not covered by
+  # either A or B.
+  removable_locales = (all_locales - wanted_locales - shared_resources_locales)
+  for locale in removable_locales:
+    for path in locale_to_files_map[locale]:
+      os.remove(path)
+
+  # For any locale in B but not in A, only keep the shared
+  # resource strings in each file.
+  for locale in shared_resources_locales - wanted_locales:
+    for path in locale_to_files_map[locale]:
+      resource_utils.FilterAndroidResourceStringsXml(
+          path, lambda x: x in shared_names_allowlist)
+
+  # For any locale in A but not in B, only keep the strings
+  # that are _not_ from shared resources in the file.
+  for locale in wanted_locales - shared_resources_locales:
+    for path in locale_to_files_map[locale]:
+      resource_utils.FilterAndroidResourceStringsXml(
+          path, lambda x: x not in shared_names_allowlist)
+
+
+def _FilterResourceFiles(dep_subdirs, keep_predicate):
+  # Create a function that selects which resource files should be packaged
+  # into the final output. Any file that does not pass the predicate will
+  # be removed below.
+  png_paths = []
+  for directory in dep_subdirs:
+    for f in _IterFiles(directory):
+      if not keep_predicate(f):
+        os.remove(f)
+      elif f.endswith('.png'):
+        png_paths.append(f)
+
+  return png_paths
+
+
+def _PackageApk(options, build):
+  """Compile and link resources with aapt2.
+
+  Args:
+    options: The command-line options.
+    build: BuildContext object.
+  Returns:
+    The manifest package name for the APK.
+  """
+  logging.debug('Extracting resource .zips')
+  dep_subdirs = []
+  dep_subdir_overlay_set = set()
+  for dependency_res_zip in options.dependencies_res_zips:
+    extracted_dep_subdirs = resource_utils.ExtractDeps([dependency_res_zip],
+                                                       build.deps_dir)
+    dep_subdirs += extracted_dep_subdirs
+    if dependency_res_zip in options.dependencies_res_zip_overlays:
+      dep_subdir_overlay_set.update(extracted_dep_subdirs)
+
+  logging.debug('Applying locale transformations')
+  path_info = resource_utils.ResourceInfoFile()
+  if options.support_zh_hk:
+    _DuplicateZhResources(dep_subdirs, path_info)
+  _RenameLocaleResourceDirs(dep_subdirs, path_info)
+
+  logging.debug('Applying file-based exclusions')
+  keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex,
+                                        options.resource_exclusion_exceptions)
+  png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate)
+
+  if options.locale_allowlist or options.shared_resources_allowlist_locales:
+    logging.debug('Applying locale-based string exclusions')
+    _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
+  if png_paths and options.png_to_webp:
+    logging.debug('Converting png->webp')
+    _ConvertToWebP(options.webp_binary, png_paths, path_info,
+                   options.webp_cache_dir)
+  logging.debug('Applying drawable transformations')
+  for directory in dep_subdirs:
+    _MoveImagesToNonMdpiFolders(directory, path_info)
+    _RemoveImageExtensions(directory, path_info)
+
+  logging.debug('Running aapt2 compile')
+  exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules]
+  partials = _CompileDeps(options.aapt2_path, dep_subdirs,
+                          dep_subdir_overlay_set, build.temp_dir,
+                          exclusion_rules)
+
+  link_command = [
+      options.aapt2_path,
+      'link',
+      '--auto-add-overlay',
+      '--no-version-vectors',
+      # Set SDK versions in case they are not set in the Android manifest.
+      '--min-sdk-version',
+      options.min_sdk_version,
+      '--target-sdk-version',
+      options.target_sdk_version,
+  ]
+
+  for j in options.include_resources:
+    link_command += ['-I', j]
+  if options.version_code:
+    link_command += ['--version-code', options.version_code]
+  if options.version_name:
+    link_command += ['--version-name', options.version_name]
+  if options.proguard_file:
+    link_command += ['--proguard', build.proguard_path]
+    link_command += ['--proguard-minimal-keep-rules']
+  if options.proguard_file_main_dex:
+    link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
+  if options.emit_ids_out:
+    link_command += ['--emit-ids', build.emit_ids_path]
+  if options.r_text_in:
+    shutil.copyfile(options.r_text_in, build.r_txt_path)
+  else:
+    link_command += ['--output-text-symbols', build.r_txt_path]
+
+  # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
+  #       can be used with recent versions of aapt2.
+  if options.shared_resources:
+    link_command.append('--shared-lib')
+
+  if options.no_xml_namespaces:
+    link_command.append('--no-xml-namespaces')
+
+  if options.package_id:
+    link_command += [
+        '--package-id',
+        hex(options.package_id),
+        '--allow-reserved-package-id',
+    ]
+
+  fixed_manifest, desired_manifest_package_name = _FixManifest(
+      options, build.temp_dir)
+  if options.rename_manifest_package:
+    desired_manifest_package_name = options.rename_manifest_package
+
+  link_command += [
+      '--manifest', fixed_manifest, '--rename-manifest-package',
+      desired_manifest_package_name
+  ]
+
+  # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+  # Also creates R.txt
+  if options.use_resource_ids_path:
+    _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
+                         desired_manifest_package_name)
+    link_command += ['--stable-ids', build.stable_ids_path]
+
+  link_command += partials
+
+  # We always create a binary arsc file first, then convert to proto, so flags
+  # such as --shared-lib can be supported.
+  arsc_path = build.arsc_path
+  if arsc_path is None:
+    _, arsc_path = tempfile.mkstmp()
+  link_command += ['-o', build.arsc_path]
+
+  logging.debug('Starting: aapt2 link')
+  link_proc = subprocess.Popen(link_command)
+
+  # Create .res.info file in parallel.
+  _CreateResourceInfoFile(path_info, build.info_path,
+                          options.dependencies_res_zips)
+  logging.debug('Created .res.info file')
+
+  exit_code = link_proc.wait()
+  logging.debug('Finished: aapt2 link')
+  if exit_code:
+    raise subprocess.CalledProcessError(exit_code, link_command)
+
+  if options.proguard_file and (options.shared_resources
+                                or options.app_as_shared_lib):
+    # Make sure the R class associated with the manifest package does not have
+    # its onResourcesLoaded method obfuscated or removed, so that the framework
+    # can call it in the case where the APK is being loaded as a library.
+    with open(build.proguard_path, 'a') as proguard_file:
+      keep_rule = '''
+                  -keep class {package}.R {{
+                    public static void onResourcesLoaded(int);
+                  }}
+                  '''.format(package=desired_manifest_package_name)
+      proguard_file.write(textwrap.dedent(keep_rule))
+
+  logging.debug('Running aapt2 convert')
+  build_utils.CheckOutput([
+      options.aapt2_path, 'convert', '--output-format', 'proto', '-o',
+      build.proto_path, build.arsc_path
+  ])
+
+  # Workaround for b/147674078. This is only needed for WebLayer and does not
+  # affect WebView usage, since WebView does not used dynamic attributes.
+  if options.shared_resources:
+    logging.debug('Hardcoding dynamic attributes')
+    protoresources.HardcodeSharedLibraryDynamicAttributes(
+        build.proto_path, options.is_bundle_module,
+        options.shared_resources_allowlist)
+
+    build_utils.CheckOutput([
+        options.aapt2_path, 'convert', '--output-format', 'binary', '-o',
+        build.arsc_path, build.proto_path
+    ])
+
+  if build.arsc_path is None:
+    os.remove(arsc_path)
+
+  if options.optimized_proto_path:
+    _OptimizeApk(build.optimized_proto_path, options, build.temp_dir,
+                 build.proto_path, build.r_txt_path)
+  elif options.optimized_arsc_path:
+    _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
+                 build.arsc_path, build.r_txt_path)
+
+  return desired_manifest_package_name
+
+
+def _CombineResourceConfigs(resources_config_paths, out_config_path):
+  with open(out_config_path, 'w') as out_config:
+    for config_path in resources_config_paths:
+      with open(config_path) as config:
+        out_config.write(config.read())
+        out_config.write('\n')
+
+
+def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path):
+  """Optimize intermediate .ap_ file with aapt2.
+
+  Args:
+    output: Path to write to.
+    options: The command-line options.
+    temp_dir: A temporary directory.
+    unoptimized_path: path of the apk to optimize.
+    r_txt_path: path to the R.txt file of the unoptimized apk.
+  """
+  optimize_command = [
+      options.aapt2_path,
+      'optimize',
+      unoptimized_path,
+      '-o',
+      output,
+  ]
+
+  # Optimize the resources.arsc file by obfuscating resource names and only
+  # allow usage via R.java constant.
+  if options.strip_resource_names:
+    no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path)
+    gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+    if options.resources_config_paths:
+      _CombineResourceConfigs(options.resources_config_paths, gen_config_path)
+    with open(gen_config_path, 'a') as config:
+      for resource in no_collapse_resources:
+        config.write('{}#no_collapse\n'.format(resource))
+
+    optimize_command += [
+        '--collapse-resource-names',
+        '--resources-config-path',
+        gen_config_path,
+    ]
+
+  if options.short_resource_paths:
+    optimize_command += ['--shorten-resource-paths']
+  if options.resources_path_map_out_path:
+    optimize_command += [
+        '--resource-path-shortening-map', options.resources_path_map_out_path
+    ]
+
+  logging.debug('Running aapt2 optimize')
+  build_utils.CheckOutput(
+      optimize_command, print_stdout=False, print_stderr=False)
+
+
+def _ExtractNonCollapsableResources(rtxt_path):
+  """Extract resources that should not be collapsed from the R.txt file
+
+  Resources of type ID are references to UI elements/views. They are used by
+  UI automation testing frameworks. They are kept in so that they don't break
+  tests, even though they may not actually be used during runtime. See
+  https://crbug.com/900993
+  App icons (aka mipmaps) are sometimes referenced by other apps by name so must
+  be keps as well. See https://b/161564466
+
+  Args:
+    rtxt_path: Path to R.txt file with all the resources
+  Returns:
+    List of resources in the form of <resource_type>/<resource_name>
+  """
+  resources = []
+  _NO_COLLAPSE_TYPES = ['id', 'mipmap']
+  with open(rtxt_path) as rtxt:
+    for line in rtxt:
+      for resource_type in _NO_COLLAPSE_TYPES:
+        if ' {} '.format(resource_type) in line:
+          resource_name = line.split()[2]
+          resources.append('{}/{}'.format(resource_type, resource_name))
+  return resources
+
+
+@contextlib.contextmanager
+def _CreateStableIdsFile(in_path, out_path, package_name):
+  """Transforms a file generated by --emit-ids from another package.
+
+  --stable-ids is generally meant to be used by different versions of the same
+  package. To make it work for other packages, we need to transform the package
+  name references to match the package that resources are being generated for.
+
+  Note: This will fail if the package ID of the resources in
+  |options.use_resource_ids_path| does not match the package ID of the
+  resources being linked.
+  """
+  with open(in_path) as stable_ids_file:
+    with open(out_path, 'w') as output_ids_file:
+      output_stable_ids = re.sub(
+          r'^.*?:',
+          package_name + ':',
+          stable_ids_file.read(),
+          flags=re.MULTILINE)
+      output_ids_file.write(output_stable_ids)
+
+
+def _WriteOutputs(options, build):
+  possible_outputs = [
+      (options.srcjar_out, build.srcjar_path),
+      (options.r_text_out, build.r_txt_path),
+      (options.arsc_path, build.arsc_path),
+      (options.proto_path, build.proto_path),
+      (options.optimized_arsc_path, build.optimized_arsc_path),
+      (options.optimized_proto_path, build.optimized_proto_path),
+      (options.proguard_file, build.proguard_path),
+      (options.proguard_file_main_dex, build.proguard_main_dex_path),
+      (options.emit_ids_out, build.emit_ids_path),
+      (options.info_path, build.info_path),
+  ]
+
+  for final, temp in possible_outputs:
+    # Write file only if it's changed.
+    if final and not (os.path.exists(final) and filecmp.cmp(final, temp)):
+      shutil.move(temp, final)
+
+
+def _CreateNormalizedManifestForVerification(options):
+  with build_utils.TempDir() as tempdir:
+    fixed_manifest, _ = _FixManifest(
+        options, tempdir, extra_manifest=options.extra_verification_manifest)
+    with open(fixed_manifest) as f:
+      return manifest_utils.NormalizeManifest(f.read())
+
+
+def main(args):
+  build_utils.InitLogging('RESOURCE_DEBUG')
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  if options.expected_file:
+    actual_data = _CreateNormalizedManifestForVerification(options)
+    diff_utils.CheckExpectations(actual_data, options)
+    if options.only_verify_expectations:
+      return
+
+  path = options.arsc_path or options.proto_path
+  debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR')
+  if debug_temp_resources_dir:
+    path = os.path.join(debug_temp_resources_dir, os.path.basename(path))
+  else:
+    # Use a deterministic temp directory since .pb files embed the absolute
+    # path of resources: crbug.com/939984
+    path = path + '.tmpdir'
+  build_utils.DeleteDirectory(path)
+
+  with resource_utils.BuildContext(
+      temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build:
+
+    manifest_package_name = _PackageApk(options, build)
+
+    # If --shared-resources-allowlist is used, all the resources listed in the
+    # corresponding R.txt file will be non-final, and an onResourcesLoaded()
+    # will be generated to adjust them at runtime.
+    #
+    # Otherwise, if --shared-resources is used, the all resources will be
+    # non-final, and an onResourcesLoaded() method will be generated too.
+    #
+    # Otherwise, all resources will be final, and no method will be generated.
+    #
+    rjava_build_options = resource_utils.RJavaBuildOptions()
+    if options.shared_resources_allowlist:
+      rjava_build_options.ExportSomeResources(
+          options.shared_resources_allowlist)
+      rjava_build_options.GenerateOnResourcesLoaded()
+      if options.shared_resources:
+        # The final resources will only be used in WebLayer, so hardcode the
+        # package ID to be what WebLayer expects.
+        rjava_build_options.SetFinalPackageId(
+            protoresources.SHARED_LIBRARY_HARDCODED_ID)
+    elif options.shared_resources or options.app_as_shared_lib:
+      rjava_build_options.ExportAllResources()
+      rjava_build_options.GenerateOnResourcesLoaded()
+
+    custom_root_package_name = options.r_java_root_package_name
+    grandparent_custom_package_name = None
+
+    # Always generate an R.java file for the package listed in
+    # AndroidManifest.xml because this is where Android framework looks to find
+    # onResourcesLoaded() for shared library apks. While not actually necessary
+    # for application apks, it also doesn't hurt.
+    apk_package_name = manifest_package_name
+
+    if options.package_name and not options.arsc_package_name:
+      # Feature modules have their own custom root package name and should
+      # inherit from the appropriate base module package. This behaviour should
+      # not be present for test apks with an apk under test. Thus,
+      # arsc_package_name is used as it is only defined for test apks with an
+      # apk under test.
+      custom_root_package_name = options.package_name
+      grandparent_custom_package_name = options.r_java_root_package_name
+      # Feature modules have the same manifest package as the base module but
+      # they should not create an R.java for said manifest package because it
+      # will be created in the base module.
+      apk_package_name = None
+
+    logging.debug('Creating R.srcjar')
+    resource_utils.CreateRJavaFiles(
+        build.srcjar_dir, apk_package_name, build.r_txt_path,
+        options.extra_res_packages, rjava_build_options, options.srcjar_out,
+        custom_root_package_name, grandparent_custom_package_name,
+        options.extra_main_r_text_files)
+    build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
+
+    # Sanity check that the created resources have the expected package ID.
+    logging.debug('Performing sanity check')
+    if options.package_id:
+      expected_id = options.package_id
+    elif options.shared_resources:
+      expected_id = 0
+    else:
+      expected_id = 127  # == '0x7f'.
+    _, package_id = resource_utils.ExtractArscPackage(
+        options.aapt2_path,
+        build.arsc_path if options.arsc_path else build.proto_path)
+    if package_id != expected_id:
+      raise Exception(
+          'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
+
+    logging.debug('Copying outputs')
+    _WriteOutputs(options, build)
+
+  if options.depfile:
+    depfile_deps = (options.dependencies_res_zips +
+                    options.dependencies_res_zip_overlays +
+                    options.extra_main_r_text_files + options.include_resources)
+    build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/compile_resources.pydeps b/src/build/android/gyp/compile_resources.pydeps
new file mode 100644
index 0000000..174b526
--- /dev/null
+++ b/src/build/android/gyp/compile_resources.pydeps
@@ -0,0 +1,61 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../../third_party/protobuf/python/google/__init__.py
+../../../third_party/protobuf/python/google/protobuf/__init__.py
+../../../third_party/protobuf/python/google/protobuf/descriptor.py
+../../../third_party/protobuf/python/google/protobuf/descriptor_database.py
+../../../third_party/protobuf/python/google/protobuf/descriptor_pool.py
+../../../third_party/protobuf/python/google/protobuf/internal/__init__.py
+../../../third_party/protobuf/python/google/protobuf/internal/api_implementation.py
+../../../third_party/protobuf/python/google/protobuf/internal/containers.py
+../../../third_party/protobuf/python/google/protobuf/internal/decoder.py
+../../../third_party/protobuf/python/google/protobuf/internal/encoder.py
+../../../third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py
+../../../third_party/protobuf/python/google/protobuf/internal/extension_dict.py
+../../../third_party/protobuf/python/google/protobuf/internal/message_listener.py
+../../../third_party/protobuf/python/google/protobuf/internal/python_message.py
+../../../third_party/protobuf/python/google/protobuf/internal/type_checkers.py
+../../../third_party/protobuf/python/google/protobuf/internal/well_known_types.py
+../../../third_party/protobuf/python/google/protobuf/internal/wire_format.py
+../../../third_party/protobuf/python/google/protobuf/message.py
+../../../third_party/protobuf/python/google/protobuf/message_factory.py
+../../../third_party/protobuf/python/google/protobuf/reflection.py
+../../../third_party/protobuf/python/google/protobuf/symbol_database.py
+../../../third_party/protobuf/python/google/protobuf/text_encoding.py
+../../../third_party/protobuf/python/google/protobuf/text_format.py
+../../../third_party/six/src/six.py
+../../gn_helpers.py
+compile_resources.py
+proto/Configuration_pb2.py
+proto/Resources_pb2.py
+proto/__init__.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/manifest_utils.py
+util/parallel.py
+util/protoresources.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/copy_ex.py b/src/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..41604c4
--- /dev/null
+++ b/src/build/android/gyp/copy_ex.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+from __future__ import print_function
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+  """Returns a list of all the files in |base|. Each entry is relative to the
+  last path entry of |base|."""
+  result = []
+  dirname = os.path.dirname(base)
+  for root, _, files in os.walk(base):
+    result.extend([os.path.join(root[len(dirname):], f) for f in files])
+  return result
+
+def CopyFile(f, dest, deps):
+  """Copy file or directory and update deps."""
+  if os.path.isdir(f):
+    shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+    deps.extend(_get_all_files(f))
+  else:
+    if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+      dest = os.path.join(dest, os.path.basename(f))
+
+    deps.append(f)
+
+    if os.path.isfile(dest):
+      if filecmp.cmp(dest, f, shallow=False):
+        return
+      # The shutil.copy() below would fail if the file does not have write
+      # permissions. Deleting the file has similar costs to modifying the
+      # permissions.
+      os.unlink(dest)
+
+    shutil.copy(f, dest)
+
+def DoCopy(options, deps):
+  """Copy files or directories given in options.files and update deps."""
+  files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
+                                             for f in options.files))
+
+  for f in files:
+    if os.path.isdir(f) and not options.clear:
+      print('To avoid stale files you must use --clear when copying '
+            'directories')
+      sys.exit(-1)
+    CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+  """Copy and rename files given in options.renaming_sources and update deps."""
+  src_files = list(itertools.chain.from_iterable(
+                   build_utils.ParseGnList(f)
+                   for f in options.renaming_sources))
+
+  dest_files = list(itertools.chain.from_iterable(
+                    build_utils.ParseGnList(f)
+                    for f in options.renaming_destinations))
+
+  if (len(src_files) != len(dest_files)):
+    print('Renaming source and destination files not match.')
+    sys.exit(-1)
+
+  for src, dest in zip(src_files, dest_files):
+    if os.path.isdir(src):
+      print('renaming diretory is not supported.')
+      sys.exit(-1)
+    else:
+      CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--renaming-sources',
+                    action='append',
+                    help='List of files need to be renamed while being '
+                         'copied to dest directory')
+  parser.add_option('--renaming-destinations',
+                    action='append',
+                    help='List of destination file name without path, the '
+                         'number of elements must match rename-sources.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  deps = []
+
+  if options.files:
+    DoCopy(options, deps)
+
+  if options.renaming_sources:
+    DoRenaming(options, deps)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, options.stamp, deps)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/copy_ex.pydeps b/src/build/android/gyp/copy_ex.pydeps
new file mode 100644
index 0000000..3735251
--- /dev/null
+++ b/src/build/android/gyp/copy_ex.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_apk_operations_script.py b/src/build/android/gyp/create_apk_operations_script.py
new file mode 100755
index 0000000..660567f
--- /dev/null
+++ b/src/build/android/gyp/create_apk_operations_script.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_apk_operations_script.py
+
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+  resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+      script_directory, p))
+  sys.path.append(resolve(${APK_OPERATIONS_DIR}))
+  import apk_operations
+  output_dir = resolve(${OUTPUT_DIR})
+  try:
+    apk_operations.Run(
+        output_dir,
+        resolve(${APK_PATH}),
+        [resolve(p) for p in ${ADDITIONAL_APK_PATHS}],
+        resolve(${INC_JSON_PATH}),
+        ${FLAGS_FILE},
+        ${TARGET_CPU},
+        resolve(${MAPPING_PATH}))
+  except TypeError:
+    rel_output_dir = os.path.relpath(output_dir)
+    rel_script_path = os.path.relpath(sys.argv[0], output_dir)
+    sys.stderr.write('Script out-of-date. Rebuild via:\\n')
+    sys.stderr.write('  ninja -C %s %s\\n' % (rel_output_dir, rel_script_path))
+    return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+""")
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path',
+                      help='Output path for executable script.')
+  parser.add_argument('--apk-path')
+  parser.add_argument('--incremental-install-json-path')
+  parser.add_argument('--command-line-flags-file')
+  parser.add_argument('--target-cpu')
+  parser.add_argument(
+      '--additional-apk-path',
+      action='append',
+      dest='additional_apk_paths',
+      default=[],
+      help='Paths to APKs to be installed prior to --apk-path.')
+  parser.add_argument('--proguard-mapping-path')
+  args = parser.parse_args(args)
+
+  def relativize(path):
+    """Returns the path relative to the output script directory."""
+    if path is None:
+      return path
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+  apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+  apk_operations_dir = relativize(apk_operations_dir)
+
+  with open(args.script_output_path, 'w') as script:
+    script_dict = {
+        'APK_OPERATIONS_DIR': repr(apk_operations_dir),
+        'OUTPUT_DIR': repr(relativize('.')),
+        'APK_PATH': repr(relativize(args.apk_path)),
+        'ADDITIONAL_APK_PATHS':
+        [relativize(p) for p in args.additional_apk_paths],
+        'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)),
+        'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)),
+        'FLAGS_FILE': repr(args.command_line_flags_file),
+        'TARGET_CPU': repr(args.target_cpu),
+    }
+    script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+  os.chmod(args.script_output_path, 0o750)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_apk_operations_script.pydeps b/src/build/android/gyp/create_apk_operations_script.pydeps
new file mode 100644
index 0000000..e09bb72
--- /dev/null
+++ b/src/build/android/gyp/create_apk_operations_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py
+../../gn_helpers.py
+create_apk_operations_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_app_bundle.py b/src/build/android/gyp/create_app_bundle.py
new file mode 100755
index 0000000..0b44c16
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle.py
@@ -0,0 +1,532 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create an Android application bundle from one or more bundle modules."""
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import zipfile
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import dexdump
+
+from util import build_utils
+from util import manifest_utils
+from util import resource_utils
+from xml.etree import ElementTree
+
+import bundletool
+
+# Location of language-based assets in bundle modules.
+_LOCALES_SUBDIR = 'assets/locales/'
+
+# The fallback locale should always have its .pak file included in
+# the base apk, i.e. not use language-based asset targetting. This ensures
+# that Chrome won't crash on startup if its bundle is installed on a device
+# with an unsupported system locale (e.g. fur-rIT).
+_FALLBACK_LOCALE = 'en-US'
+
+# List of split dimensions recognized by this tool.
+_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ]
+
+# Due to historical reasons, certain languages identified by Chromium with a
+# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters
+# ISO 639-1 code instead (due to the fact that older Android releases only
+# supported the latter when matching resources).
+#
+# the same conversion as for Java resources.
+_SHORTEN_LANGUAGE_CODE_MAP = {
+  'fil': 'tl',  # Filipino to Tagalog.
+}
+
+# A list of extensions corresponding to files that should never be compressed
+# in the bundle. This used to be handled by bundletool automatically until
+# release 0.8.0, which required that this be passed to the BundleConfig
+# file instead.
+#
+# This is the original list, which was taken from aapt2, with 'webp' added to
+# it (which curiously was missing from the list).
+_UNCOMPRESSED_FILE_EXTS = [
+    '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet',
+    'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4',
+    'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv',
+    'xmf'
+]
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--out-bundle', required=True,
+                      help='Output bundle zip archive.')
+  parser.add_argument('--module-zips', required=True,
+                      help='GN-list of module zip archives.')
+  parser.add_argument(
+      '--pathmap-in-paths',
+      action='append',
+      help='List of module pathmap files.')
+  parser.add_argument(
+      '--module-name',
+      action='append',
+      dest='module_names',
+      help='List of module names.')
+  parser.add_argument(
+      '--pathmap-out-path', help='Path to combined pathmap file for bundle.')
+  parser.add_argument(
+      '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+  parser.add_argument(
+      '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
+  parser.add_argument('--uncompressed-assets', action='append',
+                      help='GN-list of uncompressed assets.')
+  parser.add_argument(
+      '--compress-shared-libraries',
+      action='store_true',
+      help='Whether to store native libraries compressed.')
+  parser.add_argument('--split-dimensions',
+                      help="GN-list of split dimensions to support.")
+  parser.add_argument(
+      '--base-module-rtxt-path',
+      help='Optional path to the base module\'s R.txt file, only used with '
+      'language split dimension.')
+  parser.add_argument(
+      '--base-allowlist-rtxt-path',
+      help='Optional path to an R.txt file, string resources '
+      'listed there _and_ in --base-module-rtxt-path will '
+      'be kept in the base bundle module, even if language'
+      ' splitting is enabled.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+
+  parser.add_argument(
+      '--validate-services',
+      action='store_true',
+      help='Check if services are in base module if isolatedSplits is enabled.')
+
+  options = parser.parse_args(args)
+  options.module_zips = build_utils.ParseGnList(options.module_zips)
+  options.rtxt_in_paths = build_utils.ParseGnList(options.rtxt_in_paths)
+  options.pathmap_in_paths = build_utils.ParseGnList(options.pathmap_in_paths)
+
+  if len(options.module_zips) == 0:
+    raise Exception('The module zip list cannot be empty.')
+
+  # Merge all uncompressed assets into a set.
+  uncompressed_list = []
+  if options.uncompressed_assets:
+    for l in options.uncompressed_assets:
+      for entry in build_utils.ParseGnList(l):
+        # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+        pos = entry.find(':')
+        if pos >= 0:
+          uncompressed_list.append(entry[pos + 1:])
+        else:
+          uncompressed_list.append(entry)
+
+  options.uncompressed_assets = set(uncompressed_list)
+
+  # Check that all split dimensions are valid
+  if options.split_dimensions:
+    options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+    for dim in options.split_dimensions:
+      if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
+        parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
+            dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS)))
+
+  # As a special case, --base-allowlist-rtxt-path can be empty to indicate
+  # that the module doesn't need such a allowlist. That's because it is easier
+  # to check this condition here than through GN rules :-(
+  if options.base_allowlist_rtxt_path == '':
+    options.base_module_rtxt_path = None
+
+  # Check --base-module-rtxt-path and --base-allowlist-rtxt-path usage.
+  if options.base_module_rtxt_path:
+    if not options.base_allowlist_rtxt_path:
+      parser.error(
+          '--base-module-rtxt-path requires --base-allowlist-rtxt-path')
+    if 'language' not in options.split_dimensions:
+      parser.error('--base-module-rtxt-path is only valid with '
+                   'language-based splits.')
+
+  return options
+
+
+def _MakeSplitDimension(value, enabled):
+  """Return dict modelling a BundleConfig splitDimension entry."""
+  return {'value': value, 'negate': not enabled}
+
+
+def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
+                              split_dimensions, base_master_resource_ids):
+  """Generate a dictionary that can be written to a JSON BuildConfig.
+
+  Args:
+    uncompressed_assets: A list or set of file paths under assets/ that always
+      be stored uncompressed.
+    compress_shared_libraries: Boolean, whether to compress native libs.
+    split_dimensions: list of split dimensions.
+    base_master_resource_ids: Optional list of 32-bit resource IDs to keep
+      inside the base module, even when split dimensions are enabled.
+  Returns:
+    A dictionary that can be written as a json file.
+  """
+  # Compute splitsConfig list. Each item is a dictionary that can have
+  # the following keys:
+  #    'value': One of ['LANGUAGE', 'DENSITY', 'ABI']
+  #    'negate': Boolean, True to indicate that the bundle should *not* be
+  #              split (unused at the moment by this script).
+
+  split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions)
+                       for dim in _ALL_SPLIT_DIMENSIONS ]
+
+  # Native libraries loaded by the crazy linker.
+  # Whether other .so files are compressed is controlled by
+  # "uncompressNativeLibraries".
+  uncompressed_globs = ['lib/*/crazy.*']
+  # Locale-specific pak files stored in bundle splits need not be compressed.
+  uncompressed_globs.extend(
+      ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'])
+  uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+  # NOTE: Use '**' instead of '*' to work through directories!
+  uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+
+  data = {
+      'optimizations': {
+          'splitsConfig': {
+              'splitDimension': split_dimensions,
+          },
+          'uncompressNativeLibraries': {
+              'enabled': not compress_shared_libraries,
+          },
+          'uncompressDexFiles': {
+              'enabled': True,  # Applies only for P+.
+          }
+      },
+      'compression': {
+          'uncompressedGlob': sorted(uncompressed_globs),
+      },
+  }
+
+  if base_master_resource_ids:
+    data['master_resources'] = {
+        'resource_ids': list(base_master_resource_ids),
+    }
+
+  return json.dumps(data, indent=2)
+
+
+def _RewriteLanguageAssetPath(src_path):
+  """Rewrite the destination path of a locale asset for language-based splits.
+
+  Should only be used when generating bundles with language-based splits.
+  This will rewrite paths that look like locales/<locale>.pak into
+  locales#<language>/<locale>.pak, where <language> is the language code
+  from the locale.
+
+  Returns new path.
+  """
+  if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
+    return [src_path]
+
+  locale = src_path[len(_LOCALES_SUBDIR):-4]
+  android_locale = resource_utils.ToAndroidLocaleName(locale)
+
+  # The locale format is <lang>-<region> or <lang> or BCP-47 (e.g b+sr+Latn).
+  # Extract the language.
+  pos = android_locale.find('-')
+  if android_locale.startswith('b+'):
+    # If locale is in BCP-47 the language is the second tag (e.g. b+sr+Latn)
+    android_language = android_locale.split('+')[1]
+  elif pos >= 0:
+    android_language = android_locale[:pos]
+  else:
+    android_language = android_locale
+
+  if locale == _FALLBACK_LOCALE:
+    # Fallback locale .pak files must be placed in a different directory
+    # to ensure they are always stored in the base module.
+    result_path = 'assets/fallback-locales/%s.pak' % locale
+  else:
+    # Other language .pak files go into a language-specific asset directory
+    # that bundletool will store in separate split APKs.
+    result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+
+  return result_path
+
+
+def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
+  """Splits assets in a module if needed.
+
+  Args:
+    src_module_zip: input zip module path.
+    tmp_dir: Path to temporary directory, where the new output module might
+      be written to.
+    split_dimensions: list of split dimensions.
+
+  Returns:
+    If the module doesn't need asset targeting, doesn't do anything and
+    returns src_module_zip. Otherwise, create a new module zip archive under
+    tmp_dir with the same file name, but which contains assets paths targeting
+    the proper dimensions.
+  """
+  split_language = 'LANGUAGE' in split_dimensions
+  if not split_language:
+    # Nothing to target, so return original module path.
+    return src_module_zip
+
+  with zipfile.ZipFile(src_module_zip, 'r') as src_zip:
+    language_files = [
+      f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)]
+
+    if not language_files:
+      # Not language-based assets to split in this module.
+      return src_module_zip
+
+    tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip))
+    with zipfile.ZipFile(tmp_zip, 'w') as dst_zip:
+      for info in src_zip.infolist():
+        src_path = info.filename
+        is_compressed = info.compress_type != zipfile.ZIP_STORED
+
+        dst_path = src_path
+        if src_path in language_files:
+          dst_path = _RewriteLanguageAssetPath(src_path)
+
+        build_utils.AddToZipHermetic(
+            dst_zip,
+            dst_path,
+            data=src_zip.read(src_path),
+            compress=is_compressed)
+
+    return tmp_zip
+
+
+def _GenerateBaseResourcesAllowList(base_module_rtxt_path,
+                                    base_allowlist_rtxt_path):
+  """Generate a allowlist of base master resource ids.
+
+  Args:
+    base_module_rtxt_path: Path to base module R.txt file.
+    base_allowlist_rtxt_path: Path to base allowlist R.txt file.
+  Returns:
+    list of resource ids.
+  """
+  ids_map = resource_utils.GenerateStringResourcesAllowList(
+      base_module_rtxt_path, base_allowlist_rtxt_path)
+  return ids_map.keys()
+
+
+def _ConcatTextFiles(in_paths, out_path):
+  """Concatenate the contents of multiple text files into one.
+
+  The each file contents is preceded by a line containing the original filename.
+
+  Args:
+    in_paths: List of input file paths.
+    out_path: Path to output file.
+  """
+  with open(out_path, 'w') as out_file:
+    for in_path in in_paths:
+      if not os.path.exists(in_path):
+        continue
+      with open(in_path, 'r') as in_file:
+        out_file.write('-- Contents of {}\n'.format(os.path.basename(in_path)))
+        out_file.write(in_file.read())
+
+
+def _LoadPathmap(pathmap_path):
+  """Load the pathmap of obfuscated resource paths.
+
+  Returns: A dict mapping from obfuscated paths to original paths or an
+           empty dict if passed a None |pathmap_path|.
+  """
+  if pathmap_path is None:
+    return {}
+
+  pathmap = {}
+  with open(pathmap_path, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if line.startswith('--') or line == '':
+        continue
+      original, renamed = line.split(' -> ')
+      pathmap[renamed] = original
+  return pathmap
+
+
+def _WriteBundlePathmap(module_pathmap_paths, module_names,
+                        bundle_pathmap_path):
+  """Combine the contents of module pathmaps into a bundle pathmap.
+
+  This rebases the resource paths inside the module pathmap before adding them
+  to the bundle pathmap. So res/a.xml inside the base module pathmap would be
+  base/res/a.xml in the bundle pathmap.
+  """
+  with open(bundle_pathmap_path, 'w') as bundle_pathmap_file:
+    for module_pathmap_path, module_name in zip(module_pathmap_paths,
+                                                module_names):
+      if not os.path.exists(module_pathmap_path):
+        continue
+      module_pathmap = _LoadPathmap(module_pathmap_path)
+      for short_path, long_path in module_pathmap.items():
+        rebased_long_path = '{}/{}'.format(module_name, long_path)
+        rebased_short_path = '{}/{}'.format(module_name, short_path)
+        line = '{} -> {}\n'.format(rebased_long_path, rebased_short_path)
+        bundle_pathmap_file.write(line)
+
+
+def _GetManifestForModule(bundle_path, module_name):
+  return ElementTree.fromstring(
+      bundletool.RunBundleTool([
+          'dump', 'manifest', '--bundle', bundle_path, '--module', module_name
+      ]))
+
+
+def _GetComponentNames(manifest, tag_name):
+  android_name = '{%s}name' % manifest_utils.ANDROID_NAMESPACE
+  return [s.attrib.get(android_name) for s in manifest.iter(tag_name)]
+
+
+def _MaybeCheckServicesAndProvidersPresentInBase(bundle_path, module_zips):
+  """Checks bundles with isolated splits define all services in the base module.
+
+  Due to b/169196314, service classes are not found if they are not present in
+  the base module. Providers are also checked because they are loaded early in
+  startup, and keeping them in the base module gives more time for the chrome
+  split to load.
+  """
+  base_manifest = _GetManifestForModule(bundle_path, 'base')
+  isolated_splits = base_manifest.get('{%s}isolatedSplits' %
+                                      manifest_utils.ANDROID_NAMESPACE)
+  if isolated_splits != 'true':
+    return
+
+  # Collect service names from all split manifests.
+  base_zip = None
+  service_names = _GetComponentNames(base_manifest, 'service')
+  provider_names = _GetComponentNames(base_manifest, 'provider')
+  for module_zip in module_zips:
+    name = os.path.basename(module_zip)[:-len('.zip')]
+    if name == 'base':
+      base_zip = module_zip
+    else:
+      service_names.extend(
+          _GetComponentNames(_GetManifestForModule(bundle_path, name),
+                             'service'))
+      module_providers = _GetComponentNames(
+          _GetManifestForModule(bundle_path, name), 'provider')
+      if module_providers:
+        raise Exception("Providers should all be declared in the base manifest."
+                        " '%s' module declared: %s" % (name, module_providers))
+
+  # Extract classes from the base module's dex.
+  classes = set()
+  base_package_name = manifest_utils.GetPackage(base_manifest)
+  for package in dexdump.Dump(base_zip):
+    for name, package_dict in package.items():
+      if not name:
+        name = base_package_name
+      classes.update('%s.%s' % (name, c)
+                     for c in package_dict['classes'].keys())
+
+  ignored_service_names = {
+      # Defined in the chime DFM manifest, but unused.
+      # org.chromium.chrome.browser.chime.ScheduledTaskService is used instead.
+      ("com.google.android.libraries.notifications.entrypoints.scheduled."
+       "ScheduledTaskService"),
+
+      # Defined in the chime DFM manifest, only used pre-O (where isolated
+      # splits are not supported).
+      ("com.google.android.libraries.notifications.executor.impl.basic."
+       "ChimeExecutorApiService"),
+  }
+
+  # Ensure all services are present in base module.
+  for service_name in service_names:
+    if service_name not in classes:
+      if service_name in ignored_service_names:
+        continue
+      raise Exception("Service %s should be present in the base module's dex."
+                      " See b/169196314 for more details." % service_name)
+
+  # Ensure all providers are present in base module.
+  for provider_name in provider_names:
+    if provider_name not in classes:
+      raise Exception(
+          "Provider %s should be present in the base module's dex." %
+          provider_name)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  split_dimensions = []
+  if options.split_dimensions:
+    split_dimensions = [x.upper() for x in options.split_dimensions]
+
+
+  with build_utils.TempDir() as tmp_dir:
+    module_zips = [
+        _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
+        for module in options.module_zips]
+
+    base_master_resource_ids = None
+    if options.base_module_rtxt_path:
+      base_master_resource_ids = _GenerateBaseResourcesAllowList(
+          options.base_module_rtxt_path, options.base_allowlist_rtxt_path)
+
+    bundle_config = _GenerateBundleConfigJson(
+        options.uncompressed_assets, options.compress_shared_libraries,
+        split_dimensions, base_master_resource_ids)
+
+    tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
+
+    # Important: bundletool requires that the bundle config file is
+    # named with a .pb.json extension.
+    tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'
+
+    with open(tmp_bundle_config, 'w') as f:
+      f.write(bundle_config)
+
+    cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
+        '-jar',
+        bundletool.BUNDLETOOL_JAR_PATH,
+        'build-bundle',
+        '--modules=' + ','.join(module_zips),
+        '--output=' + tmp_bundle,
+        '--config=' + tmp_bundle_config,
+    ]
+
+    build_utils.CheckOutput(
+        cmd_args,
+        print_stdout=True,
+        print_stderr=True,
+        stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+        fail_on_output=options.warnings_as_errors)
+
+    if options.validate_services:
+      # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with
+      # isolated splits disabled and 2s for bundles with isolated splits
+      # enabled.  Consider making this run in parallel or move into a separate
+      # step before enabling isolated splits by default.
+      _MaybeCheckServicesAndProvidersPresentInBase(tmp_bundle, module_zips)
+
+    shutil.move(tmp_bundle, options.out_bundle)
+
+  if options.rtxt_out_path:
+    _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)
+
+  if options.pathmap_out_path:
+    _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
+                        options.pathmap_out_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/create_app_bundle.pydeps b/src/build/android/gyp/create_app_bundle.pydeps
new file mode 100644
index 0000000..cbb471a
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle.pydeps
@@ -0,0 +1,48 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/dexdump.py
+bundletool.py
+create_app_bundle.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_app_bundle_apks.py b/src/build/android/gyp/create_app_bundle_apks.py
new file mode 100755
index 0000000..5950696
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle_apks.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an .apks from an .aab."""
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import app_bundle_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument(
+      '--bundle', required=True, help='Path to input .aab file.')
+  parser.add_argument(
+      '--output', required=True, help='Path to output .apks file.')
+  parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.')
+  parser.add_argument(
+      '--keystore-path', required=True, help='Path to keystore.')
+  parser.add_argument(
+      '--keystore-password', required=True, help='Keystore password.')
+  parser.add_argument(
+      '--keystore-name', required=True, help='Key name within keystore')
+  parser.add_argument(
+      '--minimal',
+      action='store_true',
+      help='Create APKs archive with minimal language support.')
+
+  args = parser.parse_args()
+
+  app_bundle_utils.GenerateBundleApks(
+      args.bundle,
+      args.output,
+      args.aapt2_path,
+      args.keystore_path,
+      args.keystore_password,
+      args.keystore_name,
+      minimal=args.minimal,
+      check_for_noop=False)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/create_app_bundle_apks.pydeps b/src/build/android/gyp/create_app_bundle_apks.pydeps
new file mode 100644
index 0000000..20d8ffe
--- /dev/null
+++ b/src/build/android/gyp/create_app_bundle_apks.pydeps
@@ -0,0 +1,36 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+../pylib/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+bundletool.py
+create_app_bundle_apks.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_bundle_wrapper_script.py b/src/build/android/gyp/create_bundle_wrapper_script.py
new file mode 100755
index 0000000..282e206
--- /dev/null
+++ b/src/build/android/gyp/create_bundle_wrapper_script.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a wrapper script to manage an Android App Bundle."""
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
+
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+  resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+      script_directory, p))
+  sys.path.append(resolve(${WRAPPED_SCRIPT_DIR}))
+  import apk_operations
+
+  additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APK_PATHS}]
+  apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}),
+                              bundle_path=resolve(${BUNDLE_PATH}),
+                              bundle_apks_path=resolve(${BUNDLE_APKS_PATH}),
+                              additional_apk_paths=additional_apk_paths,
+                              aapt2_path=resolve(${AAPT2_PATH}),
+                              keystore_path=resolve(${KEYSTORE_PATH}),
+                              keystore_password=${KEYSTORE_PASSWORD},
+                              keystore_alias=${KEY_NAME},
+                              package_name=${PACKAGE_NAME},
+                              command_line_flags_file=${FLAGS_FILE},
+                              proguard_mapping_path=resolve(${MAPPING_PATH}),
+                              target_cpu=${TARGET_CPU},
+                              system_image_locales=${SYSTEM_IMAGE_LOCALES},
+                              default_modules=${DEFAULT_MODULES})
+
+if __name__ == '__main__':
+  sys.exit(main())
+""")
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path', required=True,
+                      help='Output path for executable script.')
+  parser.add_argument('--bundle-path', required=True)
+  parser.add_argument('--bundle-apks-path', required=True)
+  parser.add_argument(
+      '--additional-apk-path',
+      action='append',
+      dest='additional_apk_paths',
+      default=[],
+      help='Paths to APKs to be installed prior to --apk-path.')
+  parser.add_argument('--package-name', required=True)
+  parser.add_argument('--aapt2-path', required=True)
+  parser.add_argument('--keystore-path', required=True)
+  parser.add_argument('--keystore-password', required=True)
+  parser.add_argument('--key-name', required=True)
+  parser.add_argument('--command-line-flags-file')
+  parser.add_argument('--proguard-mapping-path')
+  parser.add_argument('--target-cpu')
+  parser.add_argument('--system-image-locales')
+  parser.add_argument('--default-modules', nargs='*', default=[])
+  args = parser.parse_args(args)
+
+  def relativize(path):
+    """Returns the path relative to the output script directory."""
+    if path is None:
+      return path
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+  wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+  wrapped_script_dir = relativize(wrapped_script_dir)
+  with open(args.script_output_path, 'w') as script:
+    script_dict = {
+        'WRAPPED_SCRIPT_DIR':
+        repr(wrapped_script_dir),
+        'OUTPUT_DIR':
+        repr(relativize('.')),
+        'BUNDLE_PATH':
+        repr(relativize(args.bundle_path)),
+        'BUNDLE_APKS_PATH':
+        repr(relativize(args.bundle_apks_path)),
+        'ADDITIONAL_APK_PATHS':
+        [relativize(p) for p in args.additional_apk_paths],
+        'PACKAGE_NAME':
+        repr(args.package_name),
+        'AAPT2_PATH':
+        repr(relativize(args.aapt2_path)),
+        'KEYSTORE_PATH':
+        repr(relativize(args.keystore_path)),
+        'KEYSTORE_PASSWORD':
+        repr(args.keystore_password),
+        'KEY_NAME':
+        repr(args.key_name),
+        'MAPPING_PATH':
+        repr(relativize(args.proguard_mapping_path)),
+        'FLAGS_FILE':
+        repr(args.command_line_flags_file),
+        'TARGET_CPU':
+        repr(args.target_cpu),
+        'SYSTEM_IMAGE_LOCALES':
+        repr(build_utils.ParseGnList(args.system_image_locales)),
+        'DEFAULT_MODULES':
+        repr(args.default_modules),
+    }
+    script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+  os.chmod(args.script_output_path, 0o750)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_bundle_wrapper_script.pydeps b/src/build/android/gyp/create_bundle_wrapper_script.pydeps
new file mode 100644
index 0000000..7758ed6
--- /dev/null
+++ b/src/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../gn_helpers.py
+create_bundle_wrapper_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_java_binary_script.py b/src/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000..5bc9d08
--- /dev/null
+++ b/src/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import argparse
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+extra_program_args = {extra_program_args}
+java_path = {java_path}
+if os.getcwd() != self_dir:
+  offset = os.path.relpath(self_dir, os.getcwd())
+  fix_path = lambda p: os.path.normpath(os.path.join(offset, p))
+  classpath = [fix_path(p) for p in classpath]
+  java_path = fix_path(java_path)
+java_cmd = [java_path]
+# This is a simple argparser for jvm, jar, and classpath arguments.
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument('--jar-args')
+parser.add_argument('--jvm-args')
+parser.add_argument('--classpath')
+# Test_runner parses the classpath for sharding junit tests.
+parser.add_argument('--print-classpath', action='store_true',
+                    help='Prints the classpass. Used by test_runner.')
+known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+
+if known_args.print_classpath:
+  sys.stdout.write(':'.join(classpath))
+  sys.exit(0)
+
+if known_args.jvm_args:
+  jvm_arguments = known_args.jvm_args.strip('"').split()
+  java_cmd.extend(jvm_arguments)
+if known_args.jar_args:
+  jar_arguments = known_args.jar_args.strip('"').split()
+  if unknown_args:
+    raise Exception('There are unknown arguments')
+else:
+  jar_arguments = unknown_args
+
+if known_args.classpath:
+  classpath += [known_args.classpath]
+
+{extra_flags}
+java_cmd.extend(
+    ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(jar_arguments)
+os.execvp(java_cmd[0], java_cmd)
+"""
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = optparse.OptionParser()
+  parser.add_option('--output', help='Output path for executable script.')
+  parser.add_option('--main-class',
+      help='Name of the java class with the "main" entry point.')
+  parser.add_option('--classpath', action='append', default=[],
+      help='Classpath for running the jar.')
+  parser.add_option('--noverify', action='store_true',
+      help='JVM flag: noverify.')
+  parser.add_option('--tiered-stop-at-level-one',
+                    action='store_true',
+                    help='JVM flag: -XX:TieredStopAtLevel=1.')
+
+  options, extra_program_args = parser.parse_args(argv)
+
+  extra_flags = []
+  if options.noverify:
+    extra_flags.append('java_cmd.append("-noverify")')
+  if options.tiered_stop_at_level_one:
+    extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")')
+
+  classpath = []
+  for cp_arg in options.classpath:
+    classpath += build_utils.ParseGnList(cp_arg)
+
+  run_dir = os.path.dirname(options.output)
+  classpath = [os.path.relpath(p, run_dir) for p in classpath]
+  java_path = os.path.relpath(
+      os.path.join(build_utils.JAVA_HOME, 'bin', 'java'), run_dir)
+
+  with build_utils.AtomicOutput(options.output, mode='w') as script:
+    script.write(
+        script_template.format(classpath=('"%s"' % '", "'.join(classpath)),
+                               java_path=repr(java_path),
+                               main_class=options.main_class,
+                               extra_program_args=repr(extra_program_args),
+                               extra_flags='\n'.join(extra_flags)))
+
+  os.chmod(options.output, 0o750)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_java_binary_script.pydeps b/src/build/android/gyp/create_java_binary_script.pydeps
new file mode 100644
index 0000000..6bc21fa
--- /dev/null
+++ b/src/build/android/gyp/create_java_binary_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../gn_helpers.py
+create_java_binary_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/create_r_java.py b/src/build/android/gyp/create_r_java.py
new file mode 100755
index 0000000..97e512d
--- /dev/null
+++ b/src/build/android/gyp/create_r_java.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.java file from a list of R.txt files."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+
+def _ConcatRTxts(rtxt_in_paths, combined_out_path):
+  all_lines = set()
+  for rtxt_in_path in rtxt_in_paths:
+    with open(rtxt_in_path) as rtxt_in:
+      all_lines.update(rtxt_in.read().splitlines())
+  with open(combined_out_path, 'w') as combined_out:
+    combined_out.write('\n'.join(sorted(all_lines)))
+
+
+def _CreateRJava(rtxts, package_name, srcjar_out):
+  with resource_utils.BuildContext() as build:
+    _ConcatRTxts(rtxts, build.r_txt_path)
+    rjava_build_options = resource_utils.RJavaBuildOptions()
+    rjava_build_options.ExportAllResources()
+    rjava_build_options.ExportAllStyleables()
+    rjava_build_options.GenerateOnResourcesLoaded(fake=True)
+    resource_utils.CreateRJavaFiles(build.srcjar_dir,
+                                    package_name,
+                                    build.r_txt_path,
+                                    extra_res_packages=[],
+                                    rjava_build_options=rjava_build_options,
+                                    srcjar_out=srcjar_out,
+                                    ignore_mismatched_values=True)
+    build_utils.ZipDir(srcjar_out, build.srcjar_dir)
+
+
+def main(args):
+  parser = argparse.ArgumentParser(description='Create an R.java srcjar.')
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--srcjar-out',
+                      required=True,
+                      help='Path to output srcjar.')
+  parser.add_argument('--deps-rtxts',
+                      required=True,
+                      help='List of rtxts of resource dependencies.')
+  parser.add_argument('--r-package',
+                      required=True,
+                      help='R.java package to use.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  options.deps_rtxts = build_utils.ParseGnList(options.deps_rtxts)
+
+  _CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out)
+  build_utils.WriteDepfile(options.depfile,
+                           options.srcjar_out,
+                           inputs=options.deps_rtxts)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_r_java.pydeps b/src/build/android/gyp/create_r_java.pydeps
new file mode 100644
index 0000000..45121e3
--- /dev/null
+++ b/src/build/android/gyp/create_r_java.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_java.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/create_r_txt.py b/src/build/android/gyp/create_r_txt.py
new file mode 100755
index 0000000..2adde5d
--- /dev/null
+++ b/src/build/android/gyp/create_r_txt.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.txt file from a resource zip."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+from util import resources_parser
+
+
+def main(args):
+  parser = argparse.ArgumentParser(
+      description='Create an R.txt from resources.')
+  parser.add_argument('--resources-zip-path',
+                      required=True,
+                      help='Path to input resources zip.')
+  parser.add_argument('--rtxt-path',
+                      required=True,
+                      help='Path to output R.txt file.')
+  options = parser.parse_args(build_utils.ExpandFileArgs(args))
+  with build_utils.TempDir() as temp:
+    dep_subdirs = resource_utils.ExtractDeps([options.resources_zip_path], temp)
+    resources_parser.RTxtGenerator(dep_subdirs).WriteRTxtFile(options.rtxt_path)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/create_r_txt.pydeps b/src/build/android/gyp/create_r_txt.pydeps
new file mode 100644
index 0000000..c7698ee
--- /dev/null
+++ b/src/build/android/gyp/create_r_txt.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_txt.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/src/build/android/gyp/create_size_info_files.py b/src/build/android/gyp/create_size_info_files.py
new file mode 100755
index 0000000..c60b02d
--- /dev/null
+++ b/src/build/android/gyp/create_size_info_files.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python3
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates size-info/*.info files used by SuperSize."""
+
+import argparse
+import collections
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+
+
+_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)')
+
+
+def _RemoveDuplicatesFromList(source_list):
+  return collections.OrderedDict.fromkeys(source_list).keys()
+
+
+def _TransformAarPaths(path):
+  # .aar files within //third_party/android_deps have a version suffix.
+  # The suffix changes each time .aar files are updated, which makes size diffs
+  # hard to compare (since the before/after have different source paths).
+  # Rather than changing how android_deps works, we employ this work-around
+  # to normalize the paths.
+  # From: .../androidx_appcompat_appcompat/appcompat-1.1.0.aar/res/...
+  #   To: .../androidx_appcompat_appcompat.aar/res/...
+  # https://crbug.com/1056455
+  if 'android_deps' not in path:
+    return path
+  return _AAR_VERSION_PATTERN.sub(r'\1', path)
+
+
+def _MergeResInfoFiles(res_info_path, info_paths):
+  # Concatenate them all.
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(res_info_path, only_if_changed=False,
+                                mode='w+') as dst:
+    for p in info_paths:
+      with open(p) as src:
+        dst.writelines(_TransformAarPaths(l) for l in src)
+
+
+def _PakInfoPathsForAssets(assets):
+  return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')]
+
+
+def _MergePakInfoFiles(merged_path, pak_infos):
+  info_lines = set()
+  for pak_info_path in pak_infos:
+    with open(pak_info_path, 'r') as src_info_file:
+      info_lines.update(_TransformAarPaths(x) for x in src_info_file)
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(merged_path, only_if_changed=False,
+                                mode='w+') as f:
+    f.writelines(sorted(info_lines))
+
+
+def _FullJavaNameFromClassFilePath(path):
+  # Input:  base/android/java/src/org/chromium/Foo.class
+  # Output: base.android.java.src.org.chromium.Foo
+  if not path.endswith('.class'):
+    return ''
+  path = os.path.splitext(path)[0]
+  parts = []
+  while path:
+    # Use split to be platform independent.
+    head, tail = os.path.split(path)
+    path = head
+    parts.append(tail)
+  parts.reverse()  # Package comes first
+  return '.'.join(parts)
+
+
+def _MergeJarInfoFiles(output, inputs):
+  """Merge several .jar.info files to generate an .apk.jar.info.
+
+  Args:
+    output: output file path.
+    inputs: List of .jar.info or .jar files.
+  """
+  info_data = dict()
+  for path in inputs:
+    # For non-prebuilts: .jar.info files are written by compile_java.py and map
+    # .class files to .java source paths.
+    #
+    # For prebuilts: No .jar.info file exists, we scan the .jar files here and
+    # map .class files to the .jar.
+    #
+    # For .aar files: We look for a "source.info" file in the containing
+    # directory in order to map classes back to the .aar (rather than mapping
+    # them to the extracted .jar file).
+    if path.endswith('.info'):
+      info_data.update(jar_info_utils.ParseJarInfoFile(path))
+    else:
+      attributed_path = path
+      if not path.startswith('..'):
+        parent_path = os.path.dirname(path)
+        # See if it's an sub-jar within the .aar.
+        if os.path.basename(parent_path) == 'libs':
+          parent_path = os.path.dirname(parent_path)
+        aar_source_info_path = os.path.join(parent_path, 'source.info')
+        # source.info files exist only for jars from android_aar_prebuilt().
+        # E.g. Could have an java_prebuilt() pointing to a generated .jar.
+        if os.path.exists(aar_source_info_path):
+          attributed_path = jar_info_utils.ReadAarSourceInfo(
+              aar_source_info_path)
+
+      with zipfile.ZipFile(path) as zip_info:
+        for name in zip_info.namelist():
+          fully_qualified_name = _FullJavaNameFromClassFilePath(name)
+          if fully_qualified_name:
+            info_data[fully_qualified_name] = _TransformAarPaths('{}/{}'.format(
+                attributed_path, name))
+
+  # only_if_changed=False since no build rules depend on this as an input.
+  with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+    jar_info_utils.WriteJarInfoFile(f, info_data)
+
+
+def _FindJarInputs(jar_paths):
+  ret = []
+  for jar_path in jar_paths:
+    jar_info_path = jar_path + '.info'
+    if os.path.exists(jar_info_path):
+      ret.append(jar_info_path)
+    else:
+      ret.append(jar_path)
+  return ret
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--jar-info-path', required=True, help='Output .jar.info file')
+  parser.add_argument(
+      '--pak-info-path', required=True, help='Output .pak.info file')
+  parser.add_argument(
+      '--res-info-path', required=True, help='Output .res.info file')
+  parser.add_argument(
+      '--jar-files',
+      required=True,
+      action='append',
+      help='GN-list of .jar file paths')
+  parser.add_argument(
+      '--assets',
+      required=True,
+      action='append',
+      help='GN-list of files to add as assets in the form '
+      '"srcPath:zipPath", where ":zipPath" is optional.')
+  parser.add_argument(
+      '--uncompressed-assets',
+      required=True,
+      action='append',
+      help='Same as --assets, except disables compression.')
+  parser.add_argument(
+      '--in-res-info-path',
+      required=True,
+      action='append',
+      help='Paths to .ap_.info files')
+
+  options = parser.parse_args(args)
+
+  options.jar_files = build_utils.ParseGnList(options.jar_files)
+  options.assets = build_utils.ParseGnList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGnList(
+      options.uncompressed_assets)
+
+  jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files))
+  pak_inputs = _PakInfoPathsForAssets(options.assets +
+                                      options.uncompressed_assets)
+  res_inputs = options.in_res_info_path
+
+  # Just create the info files every time. See https://crbug.com/1045024
+  _MergeJarInfoFiles(options.jar_info_path, jar_inputs)
+  _MergePakInfoFiles(options.pak_info_path, pak_inputs)
+  _MergeResInfoFiles(options.res_info_path, res_inputs)
+
+  all_inputs = jar_inputs + pak_inputs + res_inputs
+  build_utils.WriteDepfile(options.depfile,
+                           options.jar_info_path,
+                           inputs=all_inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/create_size_info_files.pydeps b/src/build/android/gyp/create_size_info_files.pydeps
new file mode 100644
index 0000000..1a69c55
--- /dev/null
+++ b/src/build/android/gyp/create_size_info_files.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../gn_helpers.py
+create_size_info_files.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
diff --git a/src/build/android/gyp/create_ui_locale_resources.py b/src/build/android/gyp/create_ui_locale_resources.py
new file mode 100755
index 0000000..772dab7
--- /dev/null
+++ b/src/build/android/gyp/create_ui_locale_resources.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a zip archive containing localized locale name Android resource
+strings!
+
+This script takes a list of input Chrome-specific locale names, as well as an
+output zip file path.
+
+Each output file will contain the definition of a single string resource,
+named 'current_locale', whose value will be the matching Chromium locale name.
+E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+sys.path.insert(
+    0,
+    os.path.join(
+        os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+# A small string template for the content of each strings.xml file.
+# NOTE: The name is chosen to avoid any conflicts with other string defined
+# by other resource archives.
+_TEMPLATE = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="current_detected_ui_locale_name">{resource_text}</string>
+</resources>
+"""
+
+# The default Chrome locale value.
+_DEFAULT_CHROME_LOCALE = 'en-US'
+
+
+def _GenerateLocaleStringsXml(locale):
+  return _TEMPLATE.format(resource_text=locale)
+
+
+def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
+  locale_data = _GenerateLocaleStringsXml(locale)
+  if android_locale:
+    zip_path = 'values-%s/strings.xml' % android_locale
+  else:
+    zip_path = 'values/strings.xml'
+  build_utils.AddToZipHermetic(
+      out_zip, zip_path, data=locale_data, compress=False)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  parser.add_argument(
+      '--locale-list',
+      required=True,
+      help='GN-list of Chrome-specific locale names.')
+  parser.add_argument(
+      '--output-zip', required=True, help='Output zip archive path.')
+
+  args = parser.parse_args()
+
+  locale_list = build_utils.ParseGnList(args.locale_list)
+  if not locale_list:
+    raise Exception('Locale list cannot be empty!')
+
+  with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+    with zipfile.ZipFile(tmp_file, 'w') as out_zip:
+      # First, write the default value, since aapt requires one.
+      _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
+
+      for locale in locale_list:
+        android_locale = resource_utils.ToAndroidLocaleName(locale)
+        _AddLocaleResourceFileToZip(out_zip, android_locale, locale)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/create_ui_locale_resources.pydeps b/src/build/android/gyp/create_ui_locale_resources.pydeps
new file mode 100644
index 0000000..6bb98dd
--- /dev/null
+++ b/src/build/android/gyp/create_ui_locale_resources.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_ui_locale_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/desugar.py b/src/build/android/gyp/desugar.py
new file mode 100755
index 0000000..87eb159
--- /dev/null
+++ b/src/build/android/gyp/desugar.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--desugar-jar', required=True,
+                      help='Path to Desugar.jar.')
+  parser.add_argument('--input-jar', required=True,
+                      help='Jar input path to include .class files from.')
+  parser.add_argument('--output-jar', required=True,
+                      help='Jar output path.')
+  parser.add_argument('--classpath',
+                      action='append',
+                      required=True,
+                      help='Classpath.')
+  parser.add_argument('--bootclasspath', required=True,
+                      help='Path to javac bootclasspath interface jar.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options = parser.parse_args(args)
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+
+  cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+      '-jar',
+      options.desugar_jar,
+      '--input',
+      options.input_jar,
+      '--output',
+      options.output_jar,
+      '--generate_base_classes_for_default_methods',
+      # Don't include try-with-resources files in every .jar. Instead, they
+      # are included via //third_party/bazel/desugar:desugar_runtime_java.
+      '--desugar_try_with_resources_omit_runtime_classes',
+  ]
+  for path in options.bootclasspath:
+    cmd += ['--bootclasspath_entry', path]
+  for path in options.classpath:
+    cmd += ['--classpath_entry', path]
+  build_utils.CheckOutput(
+      cmd,
+      print_stdout=False,
+      stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+      fail_on_output=options.warnings_as_errors)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             options.output_jar,
+                             inputs=options.bootclasspath + options.classpath)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/desugar.pydeps b/src/build/android/gyp/desugar.pydeps
new file mode 100644
index 0000000..3e5c9ea
--- /dev/null
+++ b/src/build/android/gyp/desugar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
+../../gn_helpers.py
+desugar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dex.py b/src/build/android/gyp/dex.py
new file mode 100755
index 0000000..9664922
--- /dev/null
+++ b/src/build/android/gyp/dex.py
@@ -0,0 +1,644 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import zipalign
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
+
+import convert_dex_profile
+
+
+_IGNORE_WARNINGS = (
+    # Caused by Play Services:
+    r'Type `libcore.io.Memory` was not found',
+    # Caused by a missing final class in flogger:
+    r'Type `dalvik.system.VMStack` was not found',
+    # Caused by jacoco code coverage:
+    r'Type `java.lang.management.ManagementFactory` was not found',
+    # TODO(wnwen): Remove this after R8 version 3.0.26-dev:
+    r'Missing class sun.misc.Unsafe',
+    # Caused when the test apk and the apk under test do not having native libs.
+    r'Missing class org.chromium.build.NativeLibraries',
+    # Caused by internal annotation: https://crbug.com/1180222
+    r'Missing class com.google.errorprone.annotations.RestrictedInheritance',
+    # Caused by internal protobuf package: https://crbug.com/1183971
+    r'referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension',  # pylint: disable=line-too-long
+    # Caused by using Bazel desugar instead of D8 for desugar, since Bazel
+    # desugar doesn't preserve interfaces in the same way. This should be
+    # removed when D8 is used for desugaring.
+    r'Warning: Cannot emulate interface ',
+)
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--output', required=True, help='Dex output path.')
+  parser.add_argument(
+      '--class-inputs',
+      action='append',
+      help='GN-list of .jars with .class files.')
+  parser.add_argument(
+      '--class-inputs-filearg',
+      action='append',
+      help='GN-list of .jars with .class files (added to depfile).')
+  parser.add_argument(
+      '--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
+  parser.add_argument(
+      '--dex-inputs-filearg',
+      action='append',
+      help='GN-list of .jars with .dex files (added to depfile).')
+  parser.add_argument(
+      '--incremental-dir',
+      help='Path of directory to put intermediate dex files.')
+  parser.add_argument('--main-dex-rules-path',
+                      action='append',
+                      help='Path to main dex rules for multidex.')
+  parser.add_argument(
+      '--multi-dex',
+      action='store_true',
+      help='Allow multiple dex files within output.')
+  parser.add_argument('--library',
+                      action='store_true',
+                      help='Allow numerous dex files within output.')
+  parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
+  parser.add_argument('--skip-custom-d8',
+                      action='store_true',
+                      help='When rebuilding the CustomD8 jar, this may be '
+                      'necessary to avoid incompatibility with the new r8 '
+                      'jar.')
+  parser.add_argument('--custom-d8-jar-path',
+                      required=True,
+                      help='Path to our customized d8 jar.')
+  parser.add_argument('--desugar-dependencies',
+                      help='Path to store desugar dependencies.')
+  parser.add_argument('--desugar', action='store_true')
+  parser.add_argument(
+      '--bootclasspath',
+      action='append',
+      help='GN-list of bootclasspath. Needed for --desugar')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument('--show-desugar-default-interface-warnings',
+                      action='store_true',
+                      help='Enable desugaring warnings.')
+  parser.add_argument(
+      '--classpath',
+      action='append',
+      help='GN-list of full classpath. Needed for --desugar')
+  parser.add_argument(
+      '--release',
+      action='store_true',
+      help='Run D8 in release mode. Release mode maximises main dex and '
+      'deletes non-essential line number information (vs debug which minimizes '
+      'main dex and keeps all line number information, and then some.')
+  parser.add_argument(
+      '--min-api', help='Minimum Android API level compatibility.')
+  parser.add_argument('--force-enable-assertions',
+                      action='store_true',
+                      help='Forcefully enable javac generated assertion code.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--dump-inputs',
+                      action='store_true',
+                      help='Use when filing D8 bugs to capture inputs.'
+                      ' Stores inputs to d8inputs.zip')
+
+  group = parser.add_argument_group('Dexlayout')
+  group.add_argument(
+      '--dexlayout-profile',
+      help=('Text profile for dexlayout. If present, a dexlayout '
+            'pass will happen'))
+  group.add_argument(
+      '--profman-path',
+      help=('Path to ART profman binary. There should be a lib/ directory at '
+            'the same path with shared libraries (shared with dexlayout).'))
+  group.add_argument(
+      '--dexlayout-path',
+      help=('Path to ART dexlayout binary. There should be a lib/ directory at '
+            'the same path with shared libraries (shared with dexlayout).'))
+  group.add_argument('--dexdump-path', help='Path to dexdump binary.')
+  group.add_argument(
+      '--proguard-mapping-path',
+      help=('Path to proguard map from obfuscated symbols in the jar to '
+            'unobfuscated symbols present in the code. If not present, the jar '
+            'is assumed not to be obfuscated.'))
+
+  options = parser.parse_args(args)
+
+  if options.dexlayout_profile:
+    build_utils.CheckOptions(
+        options,
+        parser,
+        required=('profman_path', 'dexlayout_path', 'dexdump_path'))
+  elif options.proguard_mapping_path is not None:
+    parser.error('Unexpected proguard mapping without dexlayout')
+
+  if options.main_dex_rules_path and not options.multi_dex:
+    parser.error('--main-dex-rules-path is unused if multidex is not enabled')
+
+  options.class_inputs = build_utils.ParseGnList(options.class_inputs)
+  options.class_inputs_filearg = build_utils.ParseGnList(
+      options.class_inputs_filearg)
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
+  options.dex_inputs_filearg = build_utils.ParseGnList(
+      options.dex_inputs_filearg)
+
+  return options
+
+
+def CreateStderrFilter(show_desugar_default_interface_warnings):
+  def filter_stderr(output):
+    patterns = list(_IGNORE_WARNINGS)
+
+    # When using Bazel's Desugar tool to desugar lambdas and interface methods,
+    # we do not provide D8 with a classpath, which causes a lot of warnings from
+    # D8's default interface desugaring pass. Not having a classpath makes
+    # incremental dexing much more effective. D8 still does backported method
+    # desugaring.
+    # These warnings are also turned off when bytecode checks are turned off.
+    if not show_desugar_default_interface_warnings:
+      patterns += ['default or static interface methods']
+
+    combined_pattern = '|'.join(re.escape(p) for p in patterns)
+    output = build_utils.FilterLines(output, combined_pattern)
+
+    # Each warning has a prefix line of the file it's from. If we've filtered
+    # out the warning, then also filter out the file header.
+    # E.g.:
+    # Warning in path/to/Foo.class:
+    #   Error message #1 indented here.
+    #   Error message #2 indented here.
+    output = re.sub(r'^Warning in .*?:\n(?!  )', '', output, flags=re.MULTILINE)
+    return output
+
+  return filter_stderr
+
+
+def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors,
+           show_desugar_default_interface_warnings):
+  dex_cmd = dex_cmd + ['--output', output_path] + input_paths
+
+  stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings)
+
+  with tempfile.NamedTemporaryFile(mode='w') as flag_file:
+    # Chosen arbitrarily. Needed to avoid command-line length limits.
+    MAX_ARGS = 50
+    if len(dex_cmd) > MAX_ARGS:
+      flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
+      flag_file.flush()
+      dex_cmd = dex_cmd[:MAX_ARGS]
+      dex_cmd.append('@' + flag_file.name)
+
+    # stdout sometimes spams with things like:
+    # Stripped invalid locals information from 1 method.
+    build_utils.CheckOutput(dex_cmd,
+                            stderr_filter=stderr_filter,
+                            fail_on_output=warnings_as_errors)
+
+
+def _EnvWithArtLibPath(binary_path):
+  """Return an environment dictionary for ART host shared libraries.
+
+  Args:
+    binary_path: the path to an ART host binary.
+
+  Returns:
+    An environment dictionary where LD_LIBRARY_PATH has been augmented with the
+    shared library path for the binary. This assumes that there is a lib/
+    directory in the same location as the binary.
+  """
+  lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
+  env = os.environ.copy()
+  libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
+  libraries.append(lib_path)
+  env['LD_LIBRARY_PATH'] = ':'.join(libraries)
+  return env
+
+
+def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
+  """Create a binary profile for dexlayout.
+
+  Args:
+    text_profile: The ART text profile that will be converted to a binary
+        profile.
+    input_dex: The input dex file to layout.
+    profman_path: Path to the profman binary.
+    temp_dir: Directory to work in.
+
+  Returns:
+    The name of the binary profile, which will live in temp_dir.
+  """
+  binary_profile = os.path.join(
+      temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
+  open(binary_profile, 'w').close()  # Touch binary_profile.
+  profman_cmd = [profman_path,
+                 '--apk=' + input_dex,
+                 '--dex-location=' + input_dex,
+                 '--create-profile-from=' + text_profile,
+                 '--reference-profile-file=' + binary_profile]
+  build_utils.CheckOutput(
+    profman_cmd,
+    env=_EnvWithArtLibPath(profman_path),
+    stderr_filter=lambda output:
+        build_utils.FilterLines(output, '|'.join(
+            [r'Could not find (method_id|proto_id|name):',
+             r'Could not create type list'])))
+  return binary_profile
+
+
+def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
+  """Layout a dexfile using a profile.
+
+  Args:
+    binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
+    input_dex: The dex file used to create the binary profile.
+    dexlayout_path: Path to the dexlayout binary.
+    temp_dir: Directory to work in.
+
+  Returns:
+    List of output files produced by dexlayout. This will be one if the input
+    was a single dexfile, or multiple files if the input was a multidex
+    zip. These output files are located in temp_dir.
+  """
+  dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
+  os.mkdir(dexlayout_output_dir)
+  dexlayout_cmd = [ dexlayout_path,
+                    '-u',  # Update checksum
+                    '-p', binary_profile,
+                    '-w', dexlayout_output_dir,
+                    input_dex ]
+  build_utils.CheckOutput(
+      dexlayout_cmd,
+      env=_EnvWithArtLibPath(dexlayout_path),
+      stderr_filter=lambda output:
+          build_utils.FilterLines(output,
+                                  r'Can.t mmap dex file.*please zipalign'))
+  output_files = os.listdir(dexlayout_output_dir)
+  if not output_files:
+    raise Exception('dexlayout unexpectedly produced no output')
+  return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files])
+
+
+def _ZipMultidex(file_dir, dex_files):
+  """Zip dex files into a multidex.
+
+  Args:
+    file_dir: The directory into which to write the output.
+    dex_files: The dexfiles forming the multizip. Their names must end with
+      classes.dex, classes2.dex, ...
+
+  Returns:
+    The name of the multidex file, which will live in file_dir.
+  """
+  ordered_files = []  # List of (archive name, file name)
+  for f in dex_files:
+    if f.endswith('dex.jar'):
+      ordered_files.append(('classes.dex', f))
+      break
+  if not ordered_files:
+    raise Exception('Could not find classes.dex multidex file in %s',
+                    dex_files)
+  for dex_idx in xrange(2, len(dex_files) + 1):
+    archive_name = 'classes%d.dex' % dex_idx
+    for f in dex_files:
+      if f.endswith(archive_name):
+        ordered_files.append((archive_name, f))
+        break
+    else:
+      raise Exception('Could not find classes%d.dex multidex file in %s',
+                      dex_files)
+  if len(set(f[1] for f in ordered_files)) != len(ordered_files):
+    raise Exception('Unexpected clashing filenames for multidex in %s',
+                    dex_files)
+
+  zip_name = os.path.join(file_dir, 'multidex_classes.zip')
+  build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
+                     for archive_name, file_name in ordered_files),
+                    zip_name)
+  return zip_name
+
+
+def _ZipAligned(dex_files, output_path):
+  """Creates a .dex.jar with 4-byte aligned files.
+
+  Args:
+    dex_files: List of dex files.
+    output_path: The output file in which to write the zip.
+  """
+  with zipfile.ZipFile(output_path, 'w') as z:
+    for i, dex_file in enumerate(dex_files):
+      name = 'classes{}.dex'.format(i + 1 if i > 0 else '')
+      zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
+
+
+def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
+  if options.proguard_mapping_path is not None:
+    matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
+    convert_dex_profile.ObfuscateProfile(
+        options.dexlayout_profile, tmp_dex_output,
+        options.proguard_mapping_path, options.dexdump_path, matching_profile)
+  else:
+    logging.warning('No obfuscation for %s', options.dexlayout_profile)
+    matching_profile = options.dexlayout_profile
+  binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
+                                        options.profman_path, tmp_dir)
+  output_files = _LayoutDex(binary_profile, tmp_dex_output,
+                            options.dexlayout_path, tmp_dir)
+  if len(output_files) > 1:
+    return _ZipMultidex(tmp_dir, output_files)
+
+  if zipfile.is_zipfile(output_files[0]):
+    return output_files[0]
+
+  final_output = os.path.join(tmp_dir, 'dex_classes.zip')
+  _ZipAligned(output_files, final_output)
+  return final_output
+
+
+def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
+  tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
+  needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
+  needs_dexmerge = output.endswith('.dex') or not (options and options.library)
+  if needs_dexing or needs_dexmerge:
+    if options and options.main_dex_rules_path:
+      for main_dex_rule in options.main_dex_rules_path:
+        dex_cmd = dex_cmd + ['--main-dex-rules', main_dex_rule]
+
+    tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
+    os.mkdir(tmp_dex_dir)
+
+    _RunD8(dex_cmd, d8_inputs, tmp_dex_dir,
+           (not options or options.warnings_as_errors),
+           (options and options.show_desugar_default_interface_warnings))
+    logging.debug('Performed dex merging')
+
+    dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
+
+    if output.endswith('.dex'):
+      if len(dex_files) > 1:
+        raise Exception('%d files created, expected 1' % len(dex_files))
+      tmp_dex_output = dex_files[0]
+    else:
+      _ZipAligned(sorted(dex_files), tmp_dex_output)
+  else:
+    # Skip dexmerger. Just put all incrementals into the .jar individually.
+    _ZipAligned(sorted(d8_inputs), tmp_dex_output)
+    logging.debug('Quick-zipped %d files', len(d8_inputs))
+
+  if options and options.dexlayout_profile:
+    tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+
+  # The dex file is complete and can be moved out of tmp_dir.
+  shutil.move(tmp_dex_output, output)
+
+
+def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
+  """Returns a list of all intermediate dex file paths."""
+  dex_files = []
+  for jar in class_inputs:
+    with zipfile.ZipFile(jar, 'r') as z:
+      for subpath in z.namelist():
+        if subpath.endswith('.class'):
+          subpath = subpath[:-5] + 'dex'
+          dex_files.append(os.path.join(incremental_dir, subpath))
+  return dex_files
+
+
+def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
+  """Deletes intermediate .dex files that are no longer needed."""
+  all_files = build_utils.FindInDirectory(dex_dir)
+  desired_files = set(dex_files)
+  for path in all_files:
+    if path not in desired_files:
+      os.unlink(path)
+
+
+def _ParseDesugarDeps(desugar_dependencies_file):
+  dependents_from_dependency = collections.defaultdict(set)
+  if desugar_dependencies_file and os.path.exists(desugar_dependencies_file):
+    with open(desugar_dependencies_file, 'r') as f:
+      for line in f:
+        dependent, dependency = line.rstrip().split(' -> ')
+        dependents_from_dependency[dependency].add(dependent)
+  return dependents_from_dependency
+
+
+def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file,
+                                   class_inputs, classpath):
+  dependents_from_dependency = _ParseDesugarDeps(desugar_dependencies_file)
+  required_classes = set()
+  # Gather classes that need to be re-desugared from changes in the classpath.
+  for jar in classpath:
+    for subpath in changes.IterChangedSubpaths(jar):
+      dependency = '{}:{}'.format(jar, subpath)
+      required_classes.update(dependents_from_dependency[dependency])
+
+  for jar in class_inputs:
+    for subpath in changes.IterChangedSubpaths(jar):
+      required_classes.update(dependents_from_dependency[subpath])
+
+  return required_classes
+
+
+def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set):
+  classes_list = []
+  for jar in class_inputs:
+    if changes:
+      changed_class_list = (set(changes.IterChangedSubpaths(jar))
+                            | required_classes_set)
+      predicate = lambda x: x in changed_class_list and x.endswith('.class')
+    else:
+      predicate = lambda x: x.endswith('.class')
+
+    classes_list.extend(
+        build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
+  return classes_list
+
+
+def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
+  # Create temporary directory for classes to be extracted to.
+  tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
+  os.mkdir(tmp_extract_dir)
+
+  # Do a full rebuild when changes occur in non-input files.
+  allowed_changed = set(options.class_inputs)
+  allowed_changed.update(options.dex_inputs)
+  allowed_changed.update(options.classpath)
+  strings_changed = changes.HasStringChanges()
+  non_direct_input_changed = next(
+      (p for p in changes.IterChangedPaths() if p not in allowed_changed), None)
+
+  if strings_changed or non_direct_input_changed:
+    logging.debug('Full dex required: strings_changed=%s path_changed=%s',
+                  strings_changed, non_direct_input_changed)
+    changes = None
+
+  if changes:
+    required_desugar_classes_set = _ComputeRequiredDesugarClasses(
+        changes, options.desugar_dependencies, options.class_inputs,
+        options.classpath)
+    logging.debug('Class files needing re-desugar: %d',
+                  len(required_desugar_classes_set))
+  else:
+    required_desugar_classes_set = set()
+  class_files = _ExtractClassFiles(changes, tmp_extract_dir,
+                                   options.class_inputs,
+                                   required_desugar_classes_set)
+  logging.debug('Extracted class files: %d', len(class_files))
+
+  # If the only change is deleting a file, class_files will be empty.
+  if class_files:
+    # Dex necessary classes into intermediate dex files.
+    dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
+    if options.desugar_dependencies and not options.skip_custom_d8:
+      dex_cmd += ['--file-tmp-prefix', tmp_extract_dir]
+    _RunD8(dex_cmd, class_files, options.incremental_dir,
+           options.warnings_as_errors,
+           options.show_desugar_default_interface_warnings)
+    logging.debug('Dexed class files.')
+
+
+def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
+  logging.debug('_OnStaleMd5')
+  with build_utils.TempDir() as tmp_dir:
+    if options.incremental_dir:
+      # Create directory for all intermediate dex files.
+      if not os.path.exists(options.incremental_dir):
+        os.makedirs(options.incremental_dir)
+
+      _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
+      logging.debug('Stale files deleted')
+      _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
+
+    _CreateFinalDex(
+        final_dex_inputs, options.output, tmp_dir, dex_cmd, options=options)
+
+
+def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar,
+                                  min_api):
+  dex_cmd = build_utils.JavaCmd(verify=False) + [
+      '-cp',
+      r8_jar_path,
+      'com.android.tools.r8.D8',
+      '--min-api',
+      min_api,
+  ]
+  with build_utils.TempDir() as tmp_dir:
+    _CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
+
+
+def main(args):
+  build_utils.InitLogging('DEX_DEBUG')
+  options = _ParseArgs(args)
+
+  options.class_inputs += options.class_inputs_filearg
+  options.dex_inputs += options.dex_inputs_filearg
+
+  input_paths = options.class_inputs + options.dex_inputs
+  input_paths.append(options.r8_jar_path)
+  input_paths.append(options.custom_d8_jar_path)
+  if options.main_dex_rules_path:
+    input_paths.extend(options.main_dex_rules_path)
+
+  depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg
+
+  output_paths = [options.output]
+
+  track_subpaths_allowlist = []
+  if options.incremental_dir:
+    final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
+        options.class_inputs, options.incremental_dir)
+    output_paths += final_dex_inputs
+    track_subpaths_allowlist += options.class_inputs
+  else:
+    final_dex_inputs = list(options.class_inputs)
+  final_dex_inputs += options.dex_inputs
+
+  dex_cmd = build_utils.JavaCmd(options.warnings_as_errors)
+
+  if options.dump_inputs:
+    dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip']
+
+  if not options.skip_custom_d8:
+    dex_cmd += [
+        '-cp',
+        '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path),
+        'org.chromium.build.CustomD8',
+    ]
+  else:
+    dex_cmd += [
+        '-cp',
+        options.r8_jar_path,
+        'com.android.tools.r8.D8',
+    ]
+
+  if options.release:
+    dex_cmd += ['--release']
+  if options.min_api:
+    dex_cmd += ['--min-api', options.min_api]
+
+  if not options.desugar:
+    dex_cmd += ['--no-desugaring']
+  elif options.classpath:
+    # The classpath is used by D8 to for interface desugaring.
+    if options.desugar_dependencies and not options.skip_custom_d8:
+      dex_cmd += ['--desugar-dependencies', options.desugar_dependencies]
+      if track_subpaths_allowlist:
+        track_subpaths_allowlist += options.classpath
+    depfile_deps += options.classpath
+    input_paths += options.classpath
+    # Still pass the entire classpath in case a new dependency is needed by
+    # desugar, so that desugar_dependencies will be updated for the next build.
+    for path in options.classpath:
+      dex_cmd += ['--classpath', path]
+
+  if options.classpath or options.main_dex_rules_path:
+    # --main-dex-rules requires bootclasspath.
+    dex_cmd += ['--lib', build_utils.JAVA_HOME]
+    for path in options.bootclasspath:
+      dex_cmd += ['--lib', path]
+    depfile_deps += options.bootclasspath
+    input_paths += options.bootclasspath
+
+
+  if options.desugar_jdk_libs_json:
+    dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
+  if options.force_enable_assertions:
+    dex_cmd += ['--force-enable-assertions']
+
+  # The changes feature from md5_check allows us to only re-dex the class files
+  # that have changed and the class files that need to be re-desugared by D8.
+  md5_check.CallAndWriteDepfileIfStale(
+      lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
+      options,
+      input_paths=input_paths,
+      input_strings=dex_cmd + [bool(options.incremental_dir)],
+      output_paths=output_paths,
+      pass_changes=True,
+      track_subpaths_allowlist=track_subpaths_allowlist,
+      depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/dex.pydeps b/src/build/android/gyp/dex.pydeps
new file mode 100644
index 0000000..23856f3
--- /dev/null
+++ b/src/build/android/gyp/dex.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+dex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/src/build/android/gyp/dex_jdk_libs.py b/src/build/android/gyp/dex_jdk_libs.py
new file mode 100755
index 0000000..6304779
--- /dev/null
+++ b/src/build/android/gyp/dex_jdk_libs.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--output', required=True, help='Dex output path.')
+  parser.add_argument('--r8-path', required=True, help='Path to R8 jar.')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument(
+      '--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.')
+  parser.add_argument('--desugar-jdk-libs-configuration-jar',
+                      help='Path to desugar_jdk_libs_configuration.jar.')
+  parser.add_argument('--min-api', help='minSdkVersion', required=True)
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options = parser.parse_args(args)
+  return options
+
+
+def DexJdkLibJar(r8_path,
+                 min_api,
+                 desugar_jdk_libs_json,
+                 desugar_jdk_libs_jar,
+                 desugar_jdk_libs_configuration_jar,
+                 output,
+                 warnings_as_errors,
+                 config_paths=None):
+  # TODO(agrieve): Spews a lot of stderr about missing classes.
+  with build_utils.TempDir() as tmp_dir:
+    cmd = build_utils.JavaCmd(warnings_as_errors) + [
+        '-cp',
+        r8_path,
+        'com.android.tools.r8.L8',
+        '--min-api',
+        min_api,
+        '--lib',
+        build_utils.JAVA_HOME,
+        '--desugared-lib',
+        desugar_jdk_libs_json,
+    ]
+
+    # If no desugaring is required, no keep rules are generated, and the keep
+    # file will not be created.
+    if config_paths is not None:
+      for path in config_paths:
+        cmd += ['--pg-conf', path]
+
+    cmd += [
+        '--output', tmp_dir, desugar_jdk_libs_jar,
+        desugar_jdk_libs_configuration_jar
+    ]
+
+    build_utils.CheckOutput(cmd,
+                            print_stdout=True,
+                            fail_on_output=warnings_as_errors)
+    if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
+      raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
+
+    # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used
+    # at all.
+    if os.path.exists(os.path.join(tmp_dir, 'classes.dex')):
+      shutil.move(os.path.join(tmp_dir, 'classes.dex'), output)
+      return True
+    return False
+
+
+def main(args):
+  options = _ParseArgs(args)
+  DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+               options.desugar_jdk_libs_jar,
+               options.desugar_jdk_libs_configuration_jar, options.output,
+               options.warnings_as_errors)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/dex_jdk_libs.pydeps b/src/build/android/gyp/dex_jdk_libs.pydeps
new file mode 100644
index 0000000..28d181f
--- /dev/null
+++ b/src/build/android/gyp/dex_jdk_libs.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex_jdk_libs.pydeps build/android/gyp/dex_jdk_libs.py
+../../gn_helpers.py
+dex_jdk_libs.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dexsplitter.py b/src/build/android/gyp/dexsplitter.py
new file mode 100755
index 0000000..149e994
--- /dev/null
+++ b/src/build/android/gyp/dexsplitter.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--depfile', help='Path to the depfile to write to.')
+  parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+  parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+  parser.add_argument(
+      '--input-dex-zip', help='Path to dex files in zip being split.')
+  parser.add_argument(
+      '--proguard-mapping-file', help='Path to proguard mapping file.')
+  parser.add_argument(
+      '--feature-name',
+      action='append',
+      dest='feature_names',
+      help='The name of the feature module.')
+  parser.add_argument(
+      '--feature-jars',
+      action='append',
+      help='GN list of path to jars which compirse the corresponding feature.')
+  parser.add_argument(
+      '--dex-dest',
+      action='append',
+      dest='dex_dests',
+      help='Destination for dex file of the corresponding feature.')
+  options = parser.parse_args(args)
+
+  assert len(options.feature_names) == len(options.feature_jars) and len(
+      options.feature_names) == len(options.dex_dests)
+  options.features = {}
+  for i, name in enumerate(options.feature_names):
+    options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+  return options
+
+
+def _RunDexsplitter(options, output_dir):
+  cmd = build_utils.JavaCmd() + [
+      '-cp',
+      options.r8_path,
+      'com.android.tools.r8.dexsplitter.DexSplitter',
+      '--output',
+      output_dir,
+      '--proguard-map',
+      options.proguard_mapping_file,
+  ]
+
+  for base_jar in options.features['base']:
+    cmd += ['--base-jar', base_jar]
+
+  base_jars_lookup = set(options.features['base'])
+  for feature in options.features:
+    if feature == 'base':
+      continue
+    for feature_jar in options.features[feature]:
+      if feature_jar not in base_jars_lookup:
+        cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+  with build_utils.TempDir() as temp_dir:
+    unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+    for file_name in unzipped_files:
+      cmd += ['--input', file_name]
+    build_utils.CheckOutput(cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseOptions(args)
+
+  input_paths = [options.input_dex_zip]
+  for feature_jars in options.features.itervalues():
+    for feature_jar in feature_jars:
+      input_paths.append(feature_jar)
+
+  with build_utils.TempDir() as dexsplitter_output_dir:
+    curr_location_to_dest = []
+    if len(options.features) == 1:
+      # Don't run dexsplitter since it needs at least 1 feature module.
+      curr_location_to_dest.append((options.input_dex_zip,
+                                    options.dex_dests[0]))
+    else:
+      _RunDexsplitter(options, dexsplitter_output_dir)
+
+      for i, dest in enumerate(options.dex_dests):
+        module_dex_file = os.path.join(dexsplitter_output_dir,
+                                       options.feature_names[i], 'classes.dex')
+        if os.path.exists(module_dex_file):
+          curr_location_to_dest.append((module_dex_file, dest))
+        else:
+          module_dex_file += '.jar'
+          assert os.path.exists(
+              module_dex_file), 'Dexsplitter tool output not found.'
+          curr_location_to_dest.append((module_dex_file + '.jar', dest))
+
+    for curr_location, dest in curr_location_to_dest:
+      with build_utils.AtomicOutput(dest) as f:
+        if curr_location.endswith('.jar'):
+          if dest.endswith('.jar'):
+            shutil.copy(curr_location, f.name)
+          else:
+            with zipfile.ZipFile(curr_location, 'r') as z:
+              namelist = z.namelist()
+              assert len(namelist) == 1, (
+                  'Unzipping to single dex file, but not single dex file in ' +
+                  options.input_dex_zip)
+              z.extract(namelist[0], f.name)
+        else:
+          if dest.endswith('.jar'):
+            build_utils.ZipDir(
+                f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+          else:
+            shutil.move(curr_location, f.name)
+
+  build_utils.Touch(options.stamp)
+  build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/dexsplitter.pydeps b/src/build/android/gyp/dexsplitter.pydeps
new file mode 100644
index 0000000..cefc572
--- /dev/null
+++ b/src/build/android/gyp/dexsplitter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
+../../gn_helpers.py
+dexsplitter.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/dist_aar.py b/src/build/android/gyp/dist_aar.py
new file mode 100755
index 0000000..7f0de1d
--- /dev/null
+++ b/src/build/android/gyp/dist_aar.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an Android .aar file."""
+
+import argparse
+import os
+import posixpath
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import filter_zip
+from util import build_utils
+
+
+_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+def _MergeRTxt(r_paths, include_globs):
+  """Merging the given R.txt files and returns them as a string."""
+  all_lines = set()
+  for r_path in r_paths:
+    if include_globs and not build_utils.MatchesGlob(r_path, include_globs):
+      continue
+    with open(r_path) as f:
+      all_lines.update(f.readlines())
+  return ''.join(sorted(all_lines))
+
+
+def _MergeProguardConfigs(proguard_configs):
+  """Merging the given proguard config files and returns them as a string."""
+  ret = []
+  for config in proguard_configs:
+    ret.append('# FROM: {}'.format(config))
+    with open(config) as f:
+      ret.append(f.read())
+  return '\n'.join(ret)
+
+
+def _AddResources(aar_zip, resource_zips, include_globs):
+  """Adds all resource zips to the given aar_zip.
+
+  Ensures all res/values/* files have unique names by prefixing them.
+  """
+  for i, path in enumerate(resource_zips):
+    if include_globs and not build_utils.MatchesGlob(path, include_globs):
+      continue
+    with zipfile.ZipFile(path) as res_zip:
+      for info in res_zip.infolist():
+        data = res_zip.read(info)
+        dirname, basename = posixpath.split(info.filename)
+        if 'values' in dirname:
+          root, ext = os.path.splitext(basename)
+          basename = '{}_{}{}'.format(root, i, ext)
+          info.filename = posixpath.join(dirname, basename)
+        info.filename = posixpath.join('res', info.filename)
+        aar_zip.writestr(info, data)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--output', required=True, help='Path to output aar.')
+  parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
+  parser.add_argument('--dependencies-res-zips', required=True,
+                      help='GN list of resource zips')
+  parser.add_argument('--r-text-files', required=True,
+                      help='GN list of R.txt files to merge')
+  parser.add_argument('--proguard-configs', required=True,
+                      help='GN list of ProGuard flag files to merge.')
+  parser.add_argument(
+      '--android-manifest',
+      help='Path to AndroidManifest.xml to include.',
+      default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml'))
+  parser.add_argument('--native-libraries', default='',
+                      help='GN list of native libraries. If non-empty then '
+                      'ABI must be specified.')
+  parser.add_argument('--abi',
+                      help='ABI (e.g. armeabi-v7a) for native libraries.')
+  parser.add_argument(
+      '--jar-excluded-globs',
+      help='GN-list of globs for paths to exclude in jar.')
+  parser.add_argument(
+      '--jar-included-globs',
+      help='GN-list of globs for paths to include in jar.')
+  parser.add_argument(
+      '--resource-included-globs',
+      help='GN-list of globs for paths to include in R.txt and resources zips.')
+
+  options = parser.parse_args(args)
+
+  if options.native_libraries and not options.abi:
+    parser.error('You must provide --abi if you have native libs')
+
+  options.jars = build_utils.ParseGnList(options.jars)
+  options.dependencies_res_zips = build_utils.ParseGnList(
+      options.dependencies_res_zips)
+  options.r_text_files = build_utils.ParseGnList(options.r_text_files)
+  options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+  options.jar_excluded_globs = build_utils.ParseGnList(
+      options.jar_excluded_globs)
+  options.jar_included_globs = build_utils.ParseGnList(
+      options.jar_included_globs)
+  options.resource_included_globs = build_utils.ParseGnList(
+      options.resource_included_globs)
+
+  with tempfile.NamedTemporaryFile(delete=False) as staging_file:
+    try:
+      with zipfile.ZipFile(staging_file.name, 'w') as z:
+        build_utils.AddToZipHermetic(
+            z, 'AndroidManifest.xml', src_path=options.android_manifest)
+
+        path_transform = filter_zip.CreatePathTransform(
+            options.jar_excluded_globs, options.jar_included_globs, [])
+        with tempfile.NamedTemporaryFile() as jar_file:
+          build_utils.MergeZips(
+              jar_file.name, options.jars, path_transform=path_transform)
+          build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
+
+        build_utils.AddToZipHermetic(
+            z,
+            'R.txt',
+            data=_MergeRTxt(options.r_text_files,
+                            options.resource_included_globs))
+        build_utils.AddToZipHermetic(z, 'public.txt', data='')
+
+        if options.proguard_configs:
+          build_utils.AddToZipHermetic(
+              z, 'proguard.txt',
+              data=_MergeProguardConfigs(options.proguard_configs))
+
+        _AddResources(z, options.dependencies_res_zips,
+                      options.resource_included_globs)
+
+        for native_library in options.native_libraries:
+          libname = os.path.basename(native_library)
+          build_utils.AddToZipHermetic(
+              z, os.path.join('jni', options.abi, libname),
+              src_path=native_library)
+    except:
+      os.unlink(staging_file.name)
+      raise
+    shutil.move(staging_file.name, options.output)
+
+  if options.depfile:
+    all_inputs = (options.jars + options.dependencies_res_zips +
+                  options.r_text_files + options.proguard_configs)
+    build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/dist_aar.pydeps b/src/build/android/gyp/dist_aar.pydeps
new file mode 100644
index 0000000..3182580
--- /dev/null
+++ b/src/build/android/gyp/dist_aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../gn_helpers.py
+dist_aar.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/extract_unwind_tables.py b/src/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 0000000..25c3130
--- /dev/null
+++ b/src/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the number of entries in UNW_INDEX.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+  1. First column 4 byte rows of all the function start address as offset from
+     start of the binary, in sorted order.
+  2. For each function addr, the second column contains 2 byte indices in order.
+     The indices are offsets (in count of 2 bytes) of the CFI data from start of
+     UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+  2 bytes: N - number of rows that belong to current function.
+  N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+                               14 bits : CFA offset / 4.
+                                2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+    0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+    offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+    use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+    functions which have variable arguments can have offset upto 16.
+    TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+    we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+    functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+  extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+      --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+  """Writes a 32 bit unsigned integer to the given output file."""
+  output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+  """Writes a 16 bit unsigned integer to the given output file."""
+  output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+  """Returns the postfix expression as string for a given register.
+
+  Breakpad CFI row format specifies rules for unwinding each register in postfix
+  expression form separated by space. Each rule starts with register name and a
+  colon. Eg: "CFI R1: <rule> R2: <rule>".
+  """
+  out = []
+  found_register = False
+  for part in cfi_row:
+    if found_register:
+      if part[-1] == ':':
+        break
+      out.append(part)
+    elif part == reg + ':':
+      found_register = True
+  return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+  """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+  Returns right values if rule matches the predefined criteria. Returns (0, 0)
+  otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+  is postfix form "CFA -<val> + ^".
+  """
+  cfa_offset = 0
+  ra_offset = 0
+  cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+  ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+  if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+    cfa_offset = int(cfa_rule.split()[1], 10)
+  if ra_rule:
+    if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+      return (0, 0)
+    ra_offset = -1 * int(ra_rule.split()[1], 10)
+  return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+  """Returns parsed CFI data from given symbol_file.
+
+  Each entry in the cfi data dictionary returned is a map from function start
+  address to array of function rows, starting with FUNCTION type, followed by
+  one or more CFI rows.
+  """
+  cfi_data = {}
+  current_func = []
+  for line in symbol_file:
+    line = line.decode('utf8')
+    if 'STACK CFI' not in line:
+      continue
+
+    parts = line.split()
+    data = {}
+    if parts[2] == 'INIT':
+      # Add the previous function to the output
+      if len(current_func) > 1:
+        cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+      current_func = []
+
+      # The function line is of format "STACK CFI INIT <addr> <length> ..."
+      data[_ADDR_ENTRY] = int(parts[3], 16)
+      data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+      # Condition C1: Skip if length is large.
+      if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+        continue  # Skip the current function.
+    else:
+      # The current function is skipped.
+      if len(current_func) == 0:
+        continue
+
+      # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+      data[_ADDR_ENTRY] = int(parts[2], 16)
+      (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+      # Condition C2 and C3: Skip based on limits on offsets.
+      if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+        current_func = []
+        continue
+      assert data[_CFA_REG] % 4 == 0
+      # Since we skipped functions with code size larger than 0xffff, we should
+      # have no function offset larger than the same value.
+      assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+    if data[_ADDR_ENTRY] == 0:
+      # Skip current function, delete all previous entries.
+      current_func = []
+      continue
+    assert data[_ADDR_ENTRY] % 2 == 0
+    current_func.append(data)
+
+  # Condition C4: Skip function without CFI rows.
+  if len(current_func) > 1:
+    cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+  return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+  """Writes the CFI data in defined format to out_file."""
+  # Stores the final data that will be written to UNW_DATA table, in order
+  # with 2 byte items.
+  unw_data = []
+
+  # Represent all the CFI data of functions as set of numbers and map them to an
+  # index in the |unw_data|. This index is later written to the UNW_INDEX table
+  # for each function. This map is used to find index of the data for functions.
+  data_to_index = {}
+  # Store mapping between the functions to the index.
+  func_addr_to_index = {}
+  previous_func_end = 0
+  for addr, function in sorted(cfi_data.items()):
+    # Add an empty function entry when functions CFIs are missing between 2
+    # functions.
+    if previous_func_end != 0 and addr - previous_func_end  > 4:
+      func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+    previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+    assert len(function) > 1
+    func_data_arr = []
+    func_data = 0
+    # The first row contains the function address and length. The rest of the
+    # rows have CFI data. Create function data array as given in the format.
+    for row in function[1:]:
+      addr_offset = row[_ADDR_ENTRY] - addr
+      cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] // 4)
+
+      func_data_arr.append(addr_offset)
+      func_data_arr.append(cfa_offset)
+
+    # Consider all the rows in the data as one large integer and add it as a key
+    # to the |data_to_index|.
+    for data in func_data_arr:
+      func_data = (func_data << 16) | data
+
+    row_count = len(func_data_arr) // 2
+    if func_data not in data_to_index:
+      # When data is not found, create a new index = len(unw_data), and write
+      # the data to |unw_data|.
+      index = len(unw_data)
+      data_to_index[func_data] = index
+      unw_data.append(row_count)
+      for row in func_data_arr:
+        unw_data.append(row)
+    else:
+      # If the data was found, then use the same index for the function.
+      index = data_to_index[func_data]
+      assert row_count == unw_data[index]
+    func_addr_to_index[addr] = data_to_index[func_data]
+
+  # Mark the end end of last function entry.
+  func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+  # Write the size of UNW_INDEX file in bytes.
+  _Write4Bytes(out_file, len(func_addr_to_index))
+
+  # Write the UNW_INDEX table. First list of addresses and then indices.
+  sorted_unw_index = sorted(func_addr_to_index.items())
+  for addr, index in sorted_unw_index:
+    _Write4Bytes(out_file, addr)
+  for addr, index in sorted_unw_index:
+    _Write2Bytes(out_file, index)
+
+  # Write the UNW_DATA table.
+  for data in unw_data:
+    _Write2Bytes(out_file, data)
+
+
+def _ParseCfiData(sym_stream, output_path):
+  cfi_data = _GetAllCfiRows(sym_stream)
+  with open(output_path, 'wb') as out_file:
+    _WriteCfiData(cfi_data, out_file)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--input_path', required=True,
+      help='The input path of the unstripped binary')
+  parser.add_argument(
+      '--output_path', required=True,
+      help='The path of the output file')
+  parser.add_argument(
+      '--dump_syms_path', required=True,
+      help='The path of the dump_syms binary')
+
+  args = parser.parse_args()
+  cmd = ['./' + args.dump_syms_path, args.input_path]
+  proc = subprocess.Popen(cmd, bufsize=-1, stdout=subprocess.PIPE)
+  _ParseCfiData(proc.stdout, args.output_path)
+  assert proc.wait() == 0
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/extract_unwind_tables_tests.py b/src/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 0000000..59436ff
--- /dev/null
+++ b/src/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+  def testExtractCfi(self):
+    with tempfile.NamedTemporaryFile() as output_file:
+      test_data_lines = """
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""".splitlines()
+      extract_unwind_tables._ParseCfiData(
+          [l.encode('utf8') for l in test_data_lines], output_file.name)
+
+      expected_cfi_data = {
+        0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+        0xe1a296 : [],
+        0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+        0xe1a990 : [],
+        0x3b92e24: [0x28, 0x13],
+        0x3b92e62: [],
+      }
+      expected_function_count = len(expected_cfi_data)
+
+      actual_output = []
+      with open(output_file.name, 'rb') as f:
+        while True:
+          read = f.read(2)
+          if not read:
+            break
+          actual_output.append(struct.unpack('H', read)[0])
+
+      # First value is size of unw_index table.
+      unw_index_size = actual_output[1] << 16 | actual_output[0]
+      # |unw_index_size| should match entry count.
+      self.assertEqual(expected_function_count, unw_index_size)
+      # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+      # size.
+      unw_index_start = 2
+      unw_index_addr_end = unw_index_start + expected_function_count * 2
+      unw_index_end = unw_index_addr_end + expected_function_count
+      unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+      unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+      unw_data_start = unw_index_end
+      unw_data = actual_output[unw_data_start:]
+
+      for func_iter in range(0, expected_function_count):
+        func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+                     unw_index_addr_col[func_iter * 2])
+        index = unw_index_index_col[func_iter]
+        # If index is CANT_UNWIND then invalid function.
+        if index == 0xFFFF:
+          self.assertEqual(expected_cfi_data[func_addr], [])
+          continue
+
+        func_start = index + 1
+        func_end = func_start + unw_data[index] * 2
+        self.assertEqual(len(expected_cfi_data[func_addr]),
+                         func_end - func_start)
+        func_cfi = unw_data[func_start : func_end]
+        self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/filter_zip.py b/src/build/android/gyp/filter_zip.py
new file mode 100755
index 0000000..068ff03
--- /dev/null
+++ b/src/build/android/gyp/filter_zip.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import shutil
+import sys
+
+from util import build_utils
+
+
+_RESOURCE_CLASSES = [
+    "R.class",
+    "R##*.class",
+    "Manifest.class",
+    "Manifest##*.class",
+]
+
+
+def CreatePathTransform(exclude_globs, include_globs,
+                        strip_resource_classes_for):
+  """Returns a function to strip paths for the given patterns.
+
+  Args:
+    exclude_globs: List of globs that if matched should be excluded.
+    include_globs: List of globs that if not matched should be excluded.
+    strip_resource_classes_for: List of Java packages for which to strip
+       R.java classes from.
+
+  Returns:
+    * None if no filters are needed.
+    * A function "(path) -> path" that returns None when |path| should be
+          stripped, or |path| otherwise.
+  """
+  if not (exclude_globs or include_globs or strip_resource_classes_for):
+    return None
+  exclude_globs = list(exclude_globs or [])
+  if strip_resource_classes_for:
+    exclude_globs.extend(p.replace('.', '/') + '/' + f
+                         for p in strip_resource_classes_for
+                         for f in _RESOURCE_CLASSES)
+  def path_transform(path):
+    # Exclude filters take precidence over include filters.
+    if build_utils.MatchesGlob(path, exclude_globs):
+      return None
+    if include_globs and not build_utils.MatchesGlob(path, include_globs):
+      return None
+    return path
+
+  return path_transform
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--input', required=True,
+      help='Input zip file.')
+  parser.add_argument('--output', required=True,
+      help='Output zip file')
+  parser.add_argument('--exclude-globs',
+      help='GN list of exclude globs')
+  parser.add_argument('--include-globs',
+      help='GN list of include globs')
+  parser.add_argument('--strip-resource-classes-for',
+      help='GN list of java package names exclude R.class files in.')
+
+  argv = build_utils.ExpandFileArgs(sys.argv[1:])
+  args = parser.parse_args(argv)
+
+  args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+  args.include_globs = build_utils.ParseGnList(args.include_globs)
+  args.strip_resource_classes_for = build_utils.ParseGnList(
+      args.strip_resource_classes_for)
+
+  path_transform = CreatePathTransform(args.exclude_globs, args.include_globs,
+                                       args.strip_resource_classes_for)
+  with build_utils.AtomicOutput(args.output) as f:
+    if path_transform:
+      build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+    else:
+      shutil.copy(args.input, f.name)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/filter_zip.pydeps b/src/build/android/gyp/filter_zip.pydeps
new file mode 100644
index 0000000..f561e05
--- /dev/null
+++ b/src/build/android/gyp/filter_zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../gn_helpers.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/finalize_apk.py b/src/build/android/gyp/finalize_apk.py
new file mode 100644
index 0000000..b465f71
--- /dev/null
+++ b/src/build/android/gyp/finalize_apk.py
@@ -0,0 +1,78 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and aligns an APK."""
+
+import argparse
+import logging
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from util import build_utils
+
+
+def FinalizeApk(apksigner_path,
+                zipalign_path,
+                unsigned_apk_path,
+                final_apk_path,
+                key_path,
+                key_passwd,
+                key_name,
+                min_sdk_version,
+                warnings_as_errors=False):
+  # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime
+  # and a corrupted state.
+  with tempfile.NamedTemporaryFile() as staging_file:
+    if zipalign_path:
+      # v2 signing requires that zipalign happen first.
+      logging.debug('Running zipalign')
+      zipalign_cmd = [
+          zipalign_path, '-p', '-f', '4', unsigned_apk_path, staging_file.name
+      ]
+      build_utils.CheckOutput(zipalign_cmd,
+                              print_stdout=True,
+                              fail_on_output=warnings_as_errors)
+      signer_input_path = staging_file.name
+    else:
+      signer_input_path = unsigned_apk_path
+
+    sign_cmd = build_utils.JavaCmd(warnings_as_errors) + [
+        '-jar',
+        apksigner_path,
+        'sign',
+        '--in',
+        signer_input_path,
+        '--out',
+        staging_file.name,
+        '--ks',
+        key_path,
+        '--ks-key-alias',
+        key_name,
+        '--ks-pass',
+        'pass:' + key_passwd,
+    ]
+    # V3 signing adds security niceties, which are irrelevant for local builds.
+    sign_cmd += ['--v3-signing-enabled', 'false']
+
+    if min_sdk_version >= 24:
+      # Disable v1 signatures when v2 signing can be used (it's much faster).
+      # By default, both v1 and v2 signing happen.
+      sign_cmd += ['--v1-signing-enabled', 'false']
+    else:
+      # Force SHA-1 (makes signing faster; insecure is fine for local builds).
+      # Leave v2 signing enabled since it verifies faster on device when
+      # supported.
+      sign_cmd += ['--min-sdk-version', '1']
+
+    logging.debug('Signing apk')
+    build_utils.CheckOutput(sign_cmd,
+                            print_stdout=True,
+                            fail_on_output=warnings_as_errors)
+    shutil.move(staging_file.name, final_apk_path)
+    # TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+    if sys.version_info.major == 2:
+      staging_file.delete = False
+    else:
+      staging_file._closer.delete = False
diff --git a/src/build/android/gyp/find.py b/src/build/android/gyp/find.py
new file mode 100755
index 0000000..b05874b
--- /dev/null
+++ b/src/build/android/gyp/find.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+from __future__ import print_function
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    if not os.path.exists(d):
+      print('%s does not exist' % d, file=sys.stderr)
+      return 1
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print(os.path.join(root, f))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/gcc_preprocess.py b/src/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..70ae10f
--- /dev/null
+++ b/src/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import posixpath
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParsePackageName(data):
+  m = re.match(r'^\s*package\s+(.*?)\s*;', data, re.MULTILINE)
+  return m.group(1) if m else ''
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--include-dirs', help='GN list of include directories.')
+  parser.add_argument('--output', help='Path for .srcjar.')
+  parser.add_argument('--define',
+                      action='append',
+                      dest='defines',
+                      help='List of -D args')
+  parser.add_argument('templates', nargs='+', help='Template files.')
+  options = parser.parse_args(args)
+
+  options.defines = build_utils.ParseGnList(options.defines)
+  options.include_dirs = build_utils.ParseGnList(options.include_dirs)
+
+  gcc_cmd = [
+      'gcc',
+      '-E',  # stop after preprocessing.
+      '-DANDROID',  # Specify ANDROID define for pre-processor.
+      '-x',
+      'c-header',  # treat sources as C header files
+      '-P',  # disable line markers, i.e. '#line 309'
+  ]
+  gcc_cmd.extend('-D' + x for x in options.defines)
+  gcc_cmd.extend('-I' + x for x in options.include_dirs)
+
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f, 'w') as z:
+      for template in options.templates:
+        data = build_utils.CheckOutput(gcc_cmd + [template])
+        package_name = _ParsePackageName(data)
+        if not package_name:
+          raise Exception('Could not find java package of ' + template)
+        zip_path = posixpath.join(
+            package_name.replace('.', '/'),
+            os.path.splitext(os.path.basename(template))[0]) + '.java'
+        build_utils.AddToZipHermetic(z, zip_path, data=data)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/gcc_preprocess.pydeps b/src/build/android/gyp/gcc_preprocess.pydeps
new file mode 100644
index 0000000..39e56f7
--- /dev/null
+++ b/src/build/android/gyp/gcc_preprocess.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../gn_helpers.py
+gcc_preprocess.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/generate_android_wrapper.py b/src/build/android/gyp/generate_android_wrapper.py
new file mode 100755
index 0000000..c8b762c
--- /dev/null
+++ b/src/build/android/gyp/generate_android_wrapper.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+from util import build_utils
+
+sys.path.append(
+    os.path.abspath(
+        os.path.join(os.path.dirname(__file__), '..', '..', 'util')))
+
+import generate_wrapper
+
+_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)')
+
+
+def ExpandWrappedPathLists(args):
+  expanded_args = []
+  for arg in args:
+    m = _WRAPPED_PATH_LIST_RE.match(arg)
+    if m:
+      for p in build_utils.ParseGnList(m.group(2)):
+        expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
+    else:
+      expanded_args.append(arg)
+  return expanded_args
+
+
+def main(raw_args):
+  parser = generate_wrapper.CreateArgumentParser()
+  expanded_raw_args = build_utils.ExpandFileArgs(raw_args)
+  expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args)
+  args = parser.parse_args(expanded_raw_args)
+  return generate_wrapper.Wrap(args)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/generate_linker_version_script.py b/src/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 0000000..995fcd7
--- /dev/null
+++ b/src/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE.  DO NOT MODIFY.
+#
+# See: %s
+
+{
+  global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+  local:
+    *;
+};
+"""
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--output',
+      required=True,
+      help='Path to output linker version script file.')
+  parser.add_argument(
+      '--export-java-symbols',
+      action='store_true',
+      help='Export Java_* JNI methods')
+  parser.add_argument(
+      '--export-symbol-allowlist-file',
+      action='append',
+      default=[],
+      dest='allowlists',
+      help='Path to an input file containing an allowlist of extra symbols to '
+      'export, one symbol per line. Multiple files may be specified.')
+  parser.add_argument(
+      '--export-feature-registrations',
+      action='store_true',
+      help='Export JNI_OnLoad_* methods')
+  options = parser.parse_args()
+
+  # JNI_OnLoad is always exported.
+  # CrashpadHandlerMain() is the entry point to the Crashpad handler, required
+  # for libcrashpad_handler_trampoline.so.
+  symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
+
+  if options.export_java_symbols:
+    symbol_list.append('Java_*')
+
+  if options.export_feature_registrations:
+    symbol_list.append('JNI_OnLoad_*')
+
+  for allowlist in options.allowlists:
+    with open(allowlist, 'rt') as f:
+      for line in f:
+        line = line.strip()
+        if not line or line[0] == '#':
+          continue
+        symbol_list.append(line)
+
+  script_content = [_SCRIPT_HEADER]
+  for symbol in symbol_list:
+    script_content.append('    %s;\n' % symbol)
+  script_content.append(_SCRIPT_FOOTER)
+
+  script = ''.join(script_content)
+
+  with build_utils.AtomicOutput(options.output, mode='w') as f:
+    f.write(script)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/generate_linker_version_script.pydeps b/src/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 0000000..de9fa56
--- /dev/null
+++ b/src/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/ijar.py b/src/build/android/gyp/ijar.py
new file mode 100755
index 0000000..45413f6
--- /dev/null
+++ b/src/build/android/gyp/ijar.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+# python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')"
+# du -b test.jar
+_EMPTY_JAR_SIZE = 22
+
+
+def main():
+  # The point of this wrapper is to use AtomicOutput so that output timestamps
+  # are not updated when outputs are unchanged.
+  ijar_bin, in_jar, out_jar = sys.argv[1:]
+  with build_utils.AtomicOutput(out_jar) as f:
+    # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162
+    if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE:
+      with open(in_jar, 'rb') as in_f:
+        f.write(in_f.read())
+    else:
+      build_utils.CheckOutput([ijar_bin, in_jar, f.name])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/ijar.pydeps b/src/build/android/gyp/ijar.pydeps
new file mode 100644
index 0000000..e9ecb66
--- /dev/null
+++ b/src/build/android/gyp/ijar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../gn_helpers.py
+ijar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/jacoco_instr.py b/src/build/android/gyp/jacoco_instr.py
new file mode 100755
index 0000000..8e5f29c
--- /dev/null
+++ b/src/build/android/gyp/jacoco_instr.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'jacoco_instr' action in the Java build process.
+Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will
+call the instrument command which accepts a jar and instruments it using
+jacococli.jar.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+  """Adds arguments related to instrumentation to parser.
+
+  Args:
+    parser: ArgumentParser object.
+  """
+  parser.add_argument(
+      '--input-path',
+      required=True,
+      help='Path to input file(s). Either the classes '
+      'directory, or the path to a jar.')
+  parser.add_argument(
+      '--output-path',
+      required=True,
+      help='Path to output final file(s) to. Either the '
+      'final classes directory, or the directory in '
+      'which to place the instrumented/copied jar.')
+  parser.add_argument(
+      '--sources-json-file',
+      required=True,
+      help='File to create with the list of source directories '
+      'and input path.')
+  parser.add_argument(
+      '--java-sources-file',
+      required=True,
+      help='File containing newline-separated .java paths')
+  parser.add_argument(
+      '--jacococli-jar', required=True, help='Path to jacococli.jar.')
+  parser.add_argument(
+      '--files-to-instrument',
+      help='Path to a file containing which source files are affected.')
+
+
+def _GetSourceDirsFromSourceFiles(source_files):
+  """Returns list of directories for the files in |source_files|.
+
+  Args:
+    source_files: List of source files.
+
+  Returns:
+    List of source directories.
+  """
+  return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file,
+                           src_root):
+  """Adds all normalized source directories and input path to
+  |sources_json_file|.
+
+  Args:
+    source_dirs: List of source directories.
+    input_path: The input path to non-instrumented class files.
+    sources_json_file: File into which to write the list of source directories
+    and input path.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  relative_sources = []
+  for s in source_dirs:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print('Error: found source directory not under repository root: %s %s' %
+            (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  data = {}
+  data['source_dirs'] = relative_sources
+  data['input_path'] = []
+  if input_path:
+    data['input_path'].append(os.path.abspath(input_path))
+  with open(sources_json_file, 'w') as f:
+    json.dump(data, f)
+
+
+def _GetAffectedClasses(jar_file, source_files):
+  """Gets affected classes by affected source files to a jar.
+
+  Args:
+    jar_file: The jar file to get all members.
+    source_files: The list of affected source files.
+
+  Returns:
+    A tuple of affected classes and unaffected members.
+  """
+  with zipfile.ZipFile(jar_file) as f:
+    members = f.namelist()
+
+  affected_classes = []
+  unaffected_members = []
+
+  for member in members:
+    if not member.endswith('.class'):
+      unaffected_members.append(member)
+      continue
+
+    is_affected = False
+    index = member.find('$')
+    if index == -1:
+      index = member.find('.class')
+    for source_file in source_files:
+      if source_file.endswith(member[:index] + '.java'):
+        affected_classes.append(member)
+        is_affected = True
+        break
+    if not is_affected:
+      unaffected_members.append(member)
+
+  return affected_classes, unaffected_members
+
+
+def _InstrumentClassFiles(instrument_cmd,
+                          input_path,
+                          output_path,
+                          temp_dir,
+                          affected_source_files=None):
+  """Instruments class files from input jar.
+
+  Args:
+    instrument_cmd: JaCoCo instrument command.
+    input_path: The input path to non-instrumented jar.
+    output_path: The output path to instrumented jar.
+    temp_dir: The temporary directory.
+    affected_source_files: The affected source file paths to input jar.
+      Default is None, which means instrumenting all class files in jar.
+  """
+  affected_classes = None
+  unaffected_members = None
+  if affected_source_files:
+    affected_classes, unaffected_members = _GetAffectedClasses(
+        input_path, affected_source_files)
+
+  # Extract affected class files.
+  with zipfile.ZipFile(input_path) as f:
+    f.extractall(temp_dir, affected_classes)
+
+  instrumented_dir = os.path.join(temp_dir, 'instrumented')
+
+  # Instrument extracted class files.
+  instrument_cmd.extend([temp_dir, '--dest', instrumented_dir])
+  build_utils.CheckOutput(instrument_cmd)
+
+  if affected_source_files and unaffected_members:
+    # Extract unaffected members to instrumented_dir.
+    with zipfile.ZipFile(input_path) as f:
+      f.extractall(instrumented_dir, unaffected_members)
+
+  # Zip all files to output_path
+  build_utils.ZipDir(output_path, instrumented_dir)
+
+
+def _RunInstrumentCommand(parser):
+  """Instruments class or Jar files using JaCoCo.
+
+  Args:
+    parser: ArgumentParser object.
+
+  Returns:
+    An exit code.
+  """
+  args = parser.parse_args()
+
+  source_files = []
+  if args.java_sources_file:
+    source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
+
+  with build_utils.TempDir() as temp_dir:
+    instrument_cmd = build_utils.JavaCmd() + [
+        '-jar', args.jacococli_jar, 'instrument'
+    ]
+
+    if not args.files_to_instrument:
+      _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+                            temp_dir)
+    else:
+      affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
+      source_set = set(source_files)
+      affected_source_files = [f for f in affected_files if f in source_set]
+
+      # Copy input_path to output_path and return if no source file affected.
+      if not affected_source_files:
+        shutil.copyfile(args.input_path, args.output_path)
+        # Create a dummy sources_json_file.
+        _CreateSourcesJsonFile([], None, args.sources_json_file,
+                               build_utils.DIR_SOURCE_ROOT)
+        return 0
+      else:
+        _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+                              temp_dir, affected_source_files)
+
+  source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+  # TODO(GYP): In GN, we are passed the list of sources, detecting source
+  # directories, then walking them to re-establish the list of sources.
+  # This can obviously be simplified!
+  _CreateSourcesJsonFile(source_dirs, args.input_path, args.sources_json_file,
+                         build_utils.DIR_SOURCE_ROOT)
+
+  return 0
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  _AddArguments(parser)
+  _RunInstrumentCommand(parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/jacoco_instr.pydeps b/src/build/android/gyp/jacoco_instr.pydeps
new file mode 100644
index 0000000..d7fec19
--- /dev/null
+++ b/src/build/android/gyp/jacoco_instr.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py
+../../gn_helpers.py
+jacoco_instr.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/java_cpp_enum.py b/src/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..08a381a
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,437 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import textwrap
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_ALLOWLIST = [
+    'char', 'unsigned char', 'short', 'unsigned short', 'int', 'int8_t',
+    'int16_t', 'int32_t', 'uint8_t', 'uint16_t'
+]
+
+
+class EnumDefinition(object):
+  def __init__(self, original_enum_name=None, class_name_override=None,
+               enum_package=None, entries=None, comments=None, fixed_type=None):
+    self.original_enum_name = original_enum_name
+    self.class_name_override = class_name_override
+    self.enum_package = enum_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.comments = collections.OrderedDict(comments or [])
+    self.prefix_to_strip = None
+    self.fixed_type = fixed_type
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  def AppendEntryComment(self, key, value):
+    if key in self.comments:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.comments[key] = value
+
+  @property
+  def class_name(self):
+    return self.class_name_override or self.original_enum_name
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+    self._NormalizeNames()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.enum_package
+    assert self.entries
+    if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_ALLOWLIST:
+      raise Exception('Fixed type %s for enum %s not in allowlist.' %
+                      (self.fixed_type, self.class_name))
+
+  def _AssignEntryIndices(self):
+    # Enums, if given no value, are given the value of the previous enum + 1.
+    if not all(self.entries.values()):
+      prev_enum_value = -1
+      for key, value in self.entries.items():
+        if not value:
+          self.entries[key] = prev_enum_value + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          try:
+            self.entries[key] = int(value)
+          except ValueError:
+            raise Exception('Could not interpret integer from enum value "%s" '
+                            'for key %s.' % (value, key))
+        prev_enum_value = self.entries[key]
+
+
+  def _StripPrefix(self):
+    prefix_to_strip = self.prefix_to_strip
+    if not prefix_to_strip:
+      shout_case = self.original_enum_name
+      shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper()
+      shout_case += '_'
+
+      prefixes = [shout_case, self.original_enum_name,
+                  'k' + self.original_enum_name]
+
+      for prefix in prefixes:
+        if all([w.startswith(prefix) for w in self.entries.keys()]):
+          prefix_to_strip = prefix
+          break
+      else:
+        prefix_to_strip = ''
+
+    def StripEntries(entries):
+      ret = collections.OrderedDict()
+      for k, v in entries.items():
+        stripped_key = k.replace(prefix_to_strip, '', 1)
+        if isinstance(v, str):
+          stripped_value = v.replace(prefix_to_strip, '')
+        else:
+          stripped_value = v
+        ret[stripped_key] = stripped_value
+
+      return ret
+
+    self.entries = StripEntries(self.entries)
+    self.comments = StripEntries(self.comments)
+
+  def _NormalizeNames(self):
+    self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty)
+    self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty)
+
+
+def _TransformKeys(d, func):
+  """Normalize keys in |d| and update references to old keys in |d| values."""
+  keys_map = {k: func(k) for k in d}
+  ret = collections.OrderedDict()
+  for k, v in d.items():
+    # Need to transform values as well when the entry value was explicitly set
+    # (since it could contain references to other enum entry values).
+    if isinstance(v, str):
+      # First check if a full replacement is available. This avoids issues when
+      # one key is a substring of another.
+      if v in d:
+        v = keys_map[v]
+      else:
+        for old_key, new_key in keys_map.items():
+          v = v.replace(old_key, new_key)
+    ret[keys_map[k]] = v
+  return ret
+
+
+class DirectiveSet(object):
+  class_name_override_key = 'CLASS_NAME_OVERRIDE'
+  enum_package_key = 'ENUM_PACKAGE'
+  prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+  known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+  def __init__(self):
+    self._directives = {}
+
+  def Update(self, key, value):
+    if key not in DirectiveSet.known_keys:
+      raise Exception("Unknown directive: " + key)
+    self._directives[key] = value
+
+  @property
+  def empty(self):
+    return len(self._directives) == 0
+
+  def UpdateDefinition(self, definition):
+    definition.class_name_override = self._directives.get(
+        DirectiveSet.class_name_override_key, '')
+    definition.enum_package = self._directives.get(
+        DirectiveSet.enum_package_key)
+    definition.prefix_to_strip = self._directives.get(
+        DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+  enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+  generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+  multi_line_generator_directive_start_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+  multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$')
+  multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$')
+
+  optional_class_or_struct_re = r'(class|struct)?'
+  enum_name_re = r'(\w+)'
+  optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+  enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+      optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+      optional_fixed_type_re + '\s*{\s*')
+  enum_single_line_re = re.compile(
+      r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$')
+
+  def __init__(self, lines, path=''):
+    self._lines = lines
+    self._path = path
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._current_comments = []
+    self._generator_directives = DirectiveSet()
+    self._multi_line_generator_directive = None
+    self._current_enum_entry = ''
+
+  def _ApplyGeneratorDirectives(self):
+    self._generator_directives.UpdateDefinition(self._current_definition)
+    self._generator_directives = DirectiveSet()
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if self._multi_line_generator_directive:
+      self._ParseMultiLineDirectiveLine(line)
+    elif not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported in ' +
+                      self._path)
+
+    enum_comment = HeaderParser.single_line_comment_re.match(line)
+    if enum_comment:
+      comment = enum_comment.groups()[0]
+      if comment:
+        self._current_comments.append(comment)
+    elif HeaderParser.enum_end_re.match(line):
+      self._FinalizeCurrentEnumDefinition()
+    else:
+      self._AddToCurrentEnumEntry(line)
+      if ',' in line:
+        self._ParseCurrentEnumEntry()
+
+  def _ParseSingleLineEnum(self, line):
+    for entry in line.split(','):
+      self._AddToCurrentEnumEntry(entry)
+      self._ParseCurrentEnumEntry()
+
+    self._FinalizeCurrentEnumDefinition()
+
+  def _ParseCurrentEnumEntry(self):
+    if not self._current_enum_entry:
+      return
+
+    enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry)
+    if not enum_entry:
+      raise Exception('Unexpected error while attempting to parse %s as enum '
+                      'entry.' % self._current_enum_entry)
+
+    enum_key = enum_entry.groups()[0]
+    enum_value = enum_entry.groups()[2]
+    self._current_definition.AppendEntry(enum_key, enum_value)
+    if self._current_comments:
+      self._current_definition.AppendEntryComment(
+          enum_key, ' '.join(self._current_comments))
+      self._current_comments = []
+    self._current_enum_entry = ''
+
+  def _AddToCurrentEnumEntry(self, line):
+    self._current_enum_entry += ' ' + line.strip()
+
+  def _FinalizeCurrentEnumDefinition(self):
+    if self._current_enum_entry:
+      self._ParseCurrentEnumEntry()
+    self._ApplyGeneratorDirectives()
+    self._current_definition.Finalize()
+    self._enum_definitions.append(self._current_definition)
+    self._current_definition = None
+    self._in_enum = False
+
+  def _ParseMultiLineDirectiveLine(self, line):
+    multi_line_directive_continuation = (
+        HeaderParser.multi_line_directive_continuation_re.match(line))
+    multi_line_directive_end = (
+        HeaderParser.multi_line_directive_end_re.match(line))
+
+    if multi_line_directive_continuation:
+      value_cont = multi_line_directive_continuation.groups()[0]
+      self._multi_line_generator_directive[1].append(value_cont)
+    elif multi_line_directive_end:
+      directive_name = self._multi_line_generator_directive[0]
+      directive_value = "".join(self._multi_line_generator_directive[1])
+      directive_value += multi_line_directive_end.groups()[0]
+      self._multi_line_generator_directive = None
+      self._generator_directives.Update(directive_name, directive_value)
+    else:
+      raise Exception('Malformed multi-line directive declaration in ' +
+                      self._path)
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive_error = HeaderParser.generator_error_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    multi_line_generator_directive_start = (
+        HeaderParser.multi_line_generator_directive_start_re.match(line))
+    single_line_enum = HeaderParser.enum_single_line_re.match(line)
+
+    if generator_directive_error:
+      raise Exception('Malformed directive declaration in ' + self._path +
+                      '. Use () for multi-line directives. E.g.\n' +
+                      '// GENERATED_JAVA_ENUM_PACKAGE: (\n' +
+                      '//   foo.package)')
+    elif generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives.Update(directive_name, directive_value)
+    elif multi_line_generator_directive_start:
+      directive_name = multi_line_generator_directive_start.groups()[0]
+      directive_value = multi_line_generator_directive_start.groups()[1]
+      self._multi_line_generator_directive = (directive_name, [directive_value])
+    elif enum_start or single_line_enum:
+      if self._generator_directives.empty:
+        return
+      self._current_definition = EnumDefinition(
+          original_enum_name=enum_start.groups()[1],
+          fixed_type=enum_start.groups()[3])
+      self._in_enum = True
+      if single_line_enum:
+        self._ParseSingleLineEnum(single_line_enum.group('enum_entries'))
+
+
+def DoGenerate(source_paths):
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    if not enum_definitions:
+      raise Exception('No enums found in %s\n'
+                      'Did you forget prefixing enums with '
+                      '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+                      source_path)
+    for enum_definition in enum_definitions:
+      output_path = java_cpp_utils.GetJavaFilePath(enum_definition.enum_package,
+                                                   enum_definition.class_name)
+      output = GenerateOutput(source_path, enum_definition)
+      yield output_path, output
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+${INT_DEF}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  enum_names = []
+  for enum_name, enum_value in enum_definition.entries.items():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_comments = enum_definition.comments.get(enum_name)
+    if enum_comments:
+      enum_comments_indent = '   * '
+      comments_line_wrapper = textwrap.TextWrapper(
+          initial_indent=enum_comments_indent,
+          subsequent_indent=enum_comments_indent,
+          width=100)
+      enum_entries_string.append('  /**')
+      enum_entries_string.append('\n'.join(
+          comments_line_wrapper.wrap(enum_comments)))
+      enum_entries_string.append('   */')
+    enum_entries_string.append(enum_template.substitute(values))
+    if enum_name != "NUM_ENTRIES":
+      enum_names.append(enum_definition.class_name + '.' + enum_name)
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  enum_names_indent = ' ' * 4
+  wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
+                                 subsequent_indent = enum_names_indent,
+                                 width = 100)
+  enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.enum_package,
+      'INT_DEF': enum_names_string,
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATH': source_path,
+      'YEAR': str(date.today().year)
+  }
+  return template.substitute(values)
+
+
+def DoMain(argv):
+  usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--srcjar',
+                    help='When specified, a .srcjar at the given path is '
+                    'created instead of individual .java files.')
+
+  options, args = parser.parse_args(argv)
+
+  if not args:
+    parser.error('Need to specify at least one input file')
+  input_paths = args
+
+  with build_utils.AtomicOutput(options.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      for output_path, data in DoGenerate(input_paths):
+        build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_enum.pydeps b/src/build/android/gyp/java_cpp_enum.pydeps
new file mode 100644
index 0000000..e6aaeb7
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../gn_helpers.py
+java_cpp_enum.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_enum_tests.py b/src/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..6d5f150
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,783 @@
+#!/usr/bin/env python3
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite contains various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput
+from java_cpp_enum import HeaderParser
+from util import java_cpp_utils
+
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')],
+                                comments=[('E2', 'This is a comment.'),
+                                          ('E1', 'This is a multiple line '
+                                                 'comment that is really long. '
+                                                 'This is a multiple line '
+                                                 'comment that is really '
+                                                 'really long.')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     path/to/file
+
+package some.package;
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+    ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
+  /**
+   * %s
+   * really really long.
+   */
+  int E1 = 1;
+  /**
+   * This is a comment.
+   */
+  int E2 = 2 << 2;
+}
+"""
+    long_comment = ('This is a multiple line comment that is really long. '
+                    'This is a multiple line comment that is')
+    self.assertEqual(
+        expected % (date.today().year, java_cpp_utils.GetScriptName(),
+                    long_comment), output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseBitShifts(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE = 1 << 1,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        ENUM_NAME_ZERO = 1 << 0,
+        ENUM_NAME_ONE = 1 << 1,
+        ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+                                              ('VALUE_ONE', '1 << 1')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    expected_entries = collections.OrderedDict([
+        ('ZERO', '1 << 0'),
+        ('ONE', '1 << 1'),
+        ('TWO', 'ZERO | ONE')])
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseMultilineEnumEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE =
+            SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey,
+        VALUE_TWO = 1 << 18,
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1 << 0'),
+        ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+         'ControlKey'),
+        ('VALUE_TWO', '1 << 18')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseEnumEntryWithTrailingMultilineEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1,
+        VALUE_ONE =
+            SymbolKey | FnKey | AltGrKey | MetaKey |
+            AltKey | ControlKey | ShiftKey,
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1'),
+        ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+         'ControlKey | ShiftKey')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+
+  def testParseNoCommaAfterLastEntry(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+      enum Foo {
+        VALUE_ZERO = 1,
+
+        // This is a multiline
+        //
+        // comment with an empty line.
+        VALUE_ONE = 2
+      };
+    """.split('\n')
+    expected_entries = collections.OrderedDict([
+        ('VALUE_ZERO', '1'),
+        ('VALUE_ONE', '2')])
+    expected_comments = collections.OrderedDict([
+        ('VALUE_ONE', 'This is a multiline comment with an empty line.')])
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('bar.namespace', definition.enum_package)
+    self.assertEqual(expected_entries, definition.entries)
+    self.assertEqual(expected_comments, definition.comments)
+
+  def testParseClassNameOverride(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        FOO
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+      enum PrefixTest {
+        PREFIX_TEST_A,
+        PREFIX_TEST_B,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('OverrideName', definition.class_name)
+
+    definition = definitions[1]
+    self.assertEqual('OtherOverride', definition.class_name)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParsePreservesCommentsWhenPrefixStripping(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        // This comment spans
+        // two lines.
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+                     definition.comments)
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict(
+        [('B', 'This comment spans two lines.')]), definition.comments)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum AnEnum {
+        ENUM_ONE_A = 1,
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('AnEnum', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'),
+                                              ('ENUM_ONE_B', 'A')]),
+                     definition.entries)
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('P_A', 0),
+                                              ('P_B', 1)]),
+                     definition.entries)
+
+  def testParseSingleLineEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo { P_A, P_B };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseWithStrippingAndRelativeReferences(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A = 1,
+        // P_A is old-don't use P_A.
+        P_B = P_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]),
+                     definition.comments)
+
+  def testParseSingleLineAndRegularEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      enum EnumTwo { P_A, P_B };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        ENUM_NAME_FOO
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries)
+    self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+                     definition.comments)
+
+    self.assertEqual(3, len(definitions))
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries)
+
+    definition = definitions[2]
+    self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries)
+
+  def testParseWithCamelCaseNames(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumTest {
+        EnumTestA = 1,
+        // comment for EnumTestB.
+        EnumTestB = 2,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_PREFIX_TO_STRIP: Test
+      enum AnEnum {
+        TestHTTPOption,
+        TestHTTPSOption,
+      };
+
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('A', '1'), ('B', '2')]),
+        definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([('B', 'comment for B.')]),
+        definition.comments)
+
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]),
+        definition.entries)
+
+  def testParseWithKCamelCaseNames(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        kEnumOne = 1,
+        // comment for kEnumTwo.
+        kEnumTwo = 2,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        kEnumNameFoo,
+        kEnumNameBar
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        kEnumNameFoo,
+        kEnumBar,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Keys {
+        kSymbolKey = 1 << 0,
+        kAltKey = 1 << 1,
+        kUpKey = 1 << 2,
+        kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers,
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Mixed {
+        kTestVal,
+        kCodecMPEG2
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    definition = definitions[0]
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]),
+        definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]),
+        definition.comments)
+
+    definition = definitions[1]
+    self.assertEqual(
+        collections.OrderedDict([('FOO', 0), ('BAR', 1)]),
+        definition.entries)
+
+    definition = definitions[2]
+    self.assertEqual(
+        collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]),
+        definition.entries)
+
+    definition = definitions[3]
+    expected_entries = collections.OrderedDict([
+        ('SYMBOL_KEY', '1 << 0'),
+        ('ALT_KEY', '1 << 1'),
+        ('UP_KEY', '1 << 2'),
+        ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')])
+    self.assertEqual(expected_entries, definition.entries)
+
+    definition = definitions[4]
+    self.assertEqual(
+        collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]),
+        definition.entries)
+
+  def testParseThrowsOnUnknownDirective(self):
+    test_data = """
+      // GENERATED_JAVA_UNKNOWN: Value
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseReturnsEmptyListWithoutDirectives(self):
+    test_data = """
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+  def testParseEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseEnumClassOneValueSubstringOfAnother(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class SafeBrowsingStatus {
+        kChecking = 0,
+        kEnabled = 1,
+        kDisabled = 2,
+        kDisabledByAdmin = 3,
+        kDisabledByExtension = 4,
+        kEnabledStandard = 5,
+        kEnabledEnhanced = 6,
+        // New enum values must go above here.
+        kMaxValue = kEnabledEnhanced,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('SafeBrowsingStatus', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(
+        collections.OrderedDict([
+            ('CHECKING', '0'),
+            ('ENABLED', '1'),
+            ('DISABLED', '2'),
+            ('DISABLED_BY_ADMIN', '3'),
+            ('DISABLED_BY_EXTENSION', '4'),
+            ('ENABLED_STANDARD', '5'),
+            ('ENABLED_ENHANCED', '6'),
+            ('MAX_VALUE', 'ENABLED_ENHANCED'),
+        ]), definition.entries)
+    self.assertEqual(
+        collections.OrderedDict([
+            ('MAX_VALUE', 'New enum values must go above here.')
+        ]), definition.comments)
+
+  def testParseEnumStruct(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum struct Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Foo : int {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('int', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: unsigned short {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('unsigned short', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseUnknownFixedTypeRaises(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: foo_type {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseSimpleMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (te
+      //   st.name
+      //   space)
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+
+  def testParseMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+      //   Ba
+      //   r
+      //   )
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveShort(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveMissingBrackets(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE:
+      // test.namespace
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSet(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 2),
+                                              ('C', 3)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSetReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', 'B')
+    definition.AppendEntry('D', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 0),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'foo')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.AppendEntry('P_LAST', 'P_C')
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='p')
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition(original_enum_name='Name',
+                                enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], list(definition.entries.keys()))
+
+  def testGenerateThrowsOnEmptyInput(self):
+    with self.assertRaises(Exception):
+      original_do_parse = java_cpp_enum.DoParseHeaderFile
+      try:
+        java_cpp_enum.DoParseHeaderFile = lambda _: []
+        for _ in java_cpp_enum.DoGenerate(['file']):
+          pass
+      finally:
+        java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_cpp_features.py b/src/build/android/gyp/java_cpp_features.py
new file mode 100755
index 0000000..8e7c244
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+  # Ex. 'const base::Feature kConstantName{"StringNameOfTheFeature", ...};'
+  # would parse as:
+  #   ExtractConstantName() -> 'ConstantName'
+  #   ExtractValue() -> '"StringNameOfTheFeature"'
+  FEATURE_RE = re.compile(r'\s*const (?:base::)?Feature\s+k(\w+)\s*(?:=\s*)?{')
+  VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*,')
+
+  def ExtractConstantName(self, line):
+    match = FeatureParserDelegate.FEATURE_RE.match(line)
+    return match.group(1) if match else None
+
+  def ExtractValue(self, line):
+    match = FeatureParserDelegate.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def CreateJavaConstant(self, name, value, comments):
+    return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, features):
+  description_template = """
+    // This following string constants were inserted by
+    //     {SCRIPT_NAME}
+    // From
+    //     {SOURCE_PATHS}
+    // Into
+    //     {TEMPLATE_PATH}
+
+"""
+  values = {
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATHS': ',\n    //     '.join(source_paths),
+      'TEMPLATE_PATH': template_path,
+  }
+  description = description_template.format(**values)
+  native_features = '\n\n'.join(x.Format() for x in features)
+
+  values = {
+      'NATIVE_FEATURES': description + native_features,
+  }
+  return template.format(**values)
+
+
+def _ParseFeatureFile(path):
+  with open(path) as f:
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        FeatureParserDelegate(), f.readlines())
+  return feature_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+  with open(template_path) as f:
+    lines = f.readlines()
+
+  template = ''.join(lines)
+  package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+  output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+
+  features = []
+  for source_path in source_paths:
+    features.extend(_ParseFeatureFile(source_path))
+
+  output = _GenerateOutput(template, source_paths, template_path, features)
+  return output, output_path
+
+
+def _Main(argv):
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--srcjar',
+                      required=True,
+                      help='The path at which to generate the .srcjar file')
+
+  parser.add_argument('--template',
+                      required=True,
+                      help='The template file with which to generate the Java '
+                      'class. Must have "{NATIVE_FEATURES}" somewhere in '
+                      'the template.')
+
+  parser.add_argument('inputs',
+                      nargs='+',
+                      help='Input file(s)',
+                      metavar='INPUTFILE')
+  args = parser.parse_args(argv)
+
+  with build_utils.AtomicOutput(args.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      data, path = _Generate(args.inputs, args.template)
+      build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+  _Main(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_features.pydeps b/src/build/android/gyp/java_cpp_features.pydeps
new file mode 100644
index 0000000..acffae2
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py
+../../gn_helpers.py
+java_cpp_features.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_features_tests.py b/src/build/android/gyp/java_cpp_features_tests.py
new file mode 100755
index 0000000..5dcdcd8
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_features_tests.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_cpp_features.py.
+
+This test suite contains various tests for the C++ -> Java base::Feature
+generator.
+"""
+
+import unittest
+
+import java_cpp_features
+from util import java_cpp_utils
+
+
+class _TestFeaturesParser(unittest.TestCase):
+  def testParseComments(self):
+    test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const base::Feature kSomeFeature{"SomeFeature",
+                                 base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Real comment that spans
+// multiple lines.
+const base::Feature kSomeOtherFeature{"SomeOtherFeature",
+                                      base::FEATURE_ENABLED_BY_DEFAULT};
+
+// Comment followed by nothing.
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(2, len(features))
+    self.assertEqual('SOME_FEATURE', features[0].name)
+    self.assertEqual('"SomeFeature"', features[0].value)
+    self.assertEqual(1, len(features[0].comments.split('\n')))
+    self.assertEqual('SOME_OTHER_FEATURE', features[1].name)
+    self.assertEqual('"SomeOtherFeature"', features[1].value)
+    self.assertEqual(2, len(features[1].comments.split('\n')))
+
+  def testWhitespace(self):
+    test_data = """
+// 1 line
+const base::Feature kShort{"Short", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 2 lines
+const base::Feature kTwoLineFeatureA{"TwoLineFeatureA",
+                                     base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kTwoLineFeatureB{
+    "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 3 lines
+const base::Feature kFeatureWithAVeryLongNameThatWillHaveToWrap{
+    "FeatureWithAVeryLongNameThatWillHaveToWrap",
+    base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(4, len(features))
+    self.assertEqual('SHORT', features[0].name)
+    self.assertEqual('"Short"', features[0].value)
+    self.assertEqual('TWO_LINE_FEATURE_A', features[1].name)
+    self.assertEqual('"TwoLineFeatureA"', features[1].value)
+    self.assertEqual('TWO_LINE_FEATURE_B', features[2].name)
+    self.assertEqual('"TwoLineFeatureB"', features[2].value)
+    self.assertEqual('FEATURE_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+                     features[3].name)
+    self.assertEqual('"FeatureWithAVeryLongNameThatWillHaveToWrap"',
+                     features[3].value)
+
+  def testCppSyntax(self):
+    test_data = """
+// Mismatched name
+const base::Feature kMismatchedFeature{"MismatchedName",
+    base::FEATURE_DISABLED_BY_DEFAULT};
+
+namespace myfeature {
+// In a namespace
+const base::Feature kSomeFeature{"SomeFeature",
+                                 base::FEATURE_DISABLED_BY_DEFAULT};
+}
+
+// Defined with equals sign
+const base::Feature kFoo = {"Foo", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Build config-specific base::Feature
+#if defined(OS_ANDROID)
+const base::Feature kAndroidOnlyFeature{"AndroidOnlyFeature",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+#endif
+
+// Value depends on build config
+const base::Feature kMaybeEnabled{"MaybeEnabled",
+#if defined(OS_ANDROID)
+    base::FEATURE_DISABLED_BY_DEFAULT
+#else
+    base::FEATURE_ENABLED_BY_DEFAULT
+#endif
+};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(5, len(features))
+    self.assertEqual('MISMATCHED_FEATURE', features[0].name)
+    self.assertEqual('"MismatchedName"', features[0].value)
+    self.assertEqual('SOME_FEATURE', features[1].name)
+    self.assertEqual('"SomeFeature"', features[1].value)
+    self.assertEqual('FOO', features[2].name)
+    self.assertEqual('"Foo"', features[2].value)
+    self.assertEqual('ANDROID_ONLY_FEATURE', features[3].name)
+    self.assertEqual('"AndroidOnlyFeature"', features[3].value)
+    self.assertEqual('MAYBE_ENABLED', features[4].name)
+    self.assertEqual('"MaybeEnabled"', features[4].value)
+
+  def testNotYetSupported(self):
+    # Negative test for cases we don't yet support, to ensure we don't misparse
+    # these until we intentionally add proper support.
+    test_data = """
+// Not currently supported: name depends on C++ directive
+const base::Feature kNameDependsOnOs{
+#if defined(OS_ANDROID)
+    "MaybeName1",
+#else
+    "MaybeName2",
+#endif
+    base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Not currently supported: feature named with a constant instead of literal
+const base::Feature kNamedAfterConstant{kNamedStringConstant,
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual(0, len(features))
+
+  def testTreatWebViewLikeOneWord(self):
+    test_data = """
+const base::Feature kSomeWebViewFeature{"SomeWebViewFeature",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kWebViewOtherFeature{"WebViewOtherFeature",
+                                         base::FEATURE_ENABLED_BY_DEFAULT};
+const base::Feature kFeatureWithPluralWebViews{
+    "FeatureWithPluralWebViews",
+    base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('SOME_WEBVIEW_FEATURE', features[0].name)
+    self.assertEqual('"SomeWebViewFeature"', features[0].value)
+    self.assertEqual('WEBVIEW_OTHER_FEATURE', features[1].name)
+    self.assertEqual('"WebViewOtherFeature"', features[1].value)
+    self.assertEqual('FEATURE_WITH_PLURAL_WEBVIEWS', features[2].name)
+    self.assertEqual('"FeatureWithPluralWebViews"', features[2].value)
+
+  def testSpecialCharacters(self):
+    test_data = r"""
+const base::Feature kFeatureWithEscapes{"Weird\tfeature\"name\n",
+                                        base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kFeatureWithEscapes2{
+    "Weird\tfeature\"name\n",
+    base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('FEATURE_WITH_ESCAPES', features[0].name)
+    self.assertEqual(r'"Weird\tfeature\"name\n"', features[0].value)
+    self.assertEqual('FEATURE_WITH_ESCAPES2', features[1].name)
+    self.assertEqual(r'"Weird\tfeature\"name\n"', features[1].value)
+
+  def testNoBaseNamespacePrefix(self):
+    test_data = """
+const Feature kSomeFeature{"SomeFeature", FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+    feature_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_features.FeatureParserDelegate(), test_data)
+    features = feature_file_parser.Parse()
+    self.assertEqual('SOME_FEATURE', features[0].name)
+    self.assertEqual('"SomeFeature"', features[0].value)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_cpp_strings.py b/src/build/android/gyp/java_cpp_strings.py
new file mode 100755
index 0000000..d713599
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+  STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=')
+  VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*;')
+
+  def ExtractConstantName(self, line):
+    match = StringParserDelegate.STRING_RE.match(line)
+    return match.group(1) if match else None
+
+  def ExtractValue(self, line):
+    match = StringParserDelegate.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def CreateJavaConstant(self, name, value, comments):
+    return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, strings):
+  description_template = """
+    // This following string constants were inserted by
+    //     {SCRIPT_NAME}
+    // From
+    //     {SOURCE_PATHS}
+    // Into
+    //     {TEMPLATE_PATH}
+
+"""
+  values = {
+      'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+      'SOURCE_PATHS': ',\n    //     '.join(source_paths),
+      'TEMPLATE_PATH': template_path,
+  }
+  description = description_template.format(**values)
+  native_strings = '\n\n'.join(x.Format() for x in strings)
+
+  values = {
+      'NATIVE_STRINGS': description + native_strings,
+  }
+  return template.format(**values)
+
+
+def _ParseStringFile(path):
+  with open(path) as f:
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        StringParserDelegate(), f.readlines())
+  return string_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+  with open(template_path) as f:
+    lines = f.readlines()
+
+  template = ''.join(lines)
+  package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+  output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+  strings = []
+  for source_path in source_paths:
+    strings.extend(_ParseStringFile(source_path))
+
+  output = _GenerateOutput(template, source_paths, template_path, strings)
+  return output, output_path
+
+
+def _Main(argv):
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument('--srcjar',
+                      required=True,
+                      help='The path at which to generate the .srcjar file')
+
+  parser.add_argument('--template',
+                      required=True,
+                      help='The template file with which to generate the Java '
+                      'class. Must have "{NATIVE_STRINGS}" somewhere in '
+                      'the template.')
+
+  parser.add_argument(
+      'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
+  args = parser.parse_args(argv)
+
+  with build_utils.AtomicOutput(args.srcjar) as f:
+    with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+      data, path = _Generate(args.inputs, args.template)
+      build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+  _Main(sys.argv[1:])
diff --git a/src/build/android/gyp/java_cpp_strings.pydeps b/src/build/android/gyp/java_cpp_strings.pydeps
new file mode 100644
index 0000000..0a821f4
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../gn_helpers.py
+java_cpp_strings.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/src/build/android/gyp/java_cpp_strings_tests.py b/src/build/android/gyp/java_cpp_strings_tests.py
new file mode 100755
index 0000000..4cb1eee
--- /dev/null
+++ b/src/build/android/gyp/java_cpp_strings_tests.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_cpp_strings.py.
+
+This test suite contains various tests for the C++ -> Java string generator.
+"""
+
+import unittest
+
+import java_cpp_strings
+from util import java_cpp_utils
+
+
+class _TestStringsParser(unittest.TestCase):
+
+  def testParseComments(self):
+    test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const char kASwitch[] = "a-value";
+
+// Real comment that spans
+// multiple lines.
+const char kAnotherSwitch[] = "another-value";
+
+// Comment followed by nothing.
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual(2, len(strings))
+    self.assertEqual('A_SWITCH', strings[0].name)
+    self.assertEqual('"a-value"', strings[0].value)
+    self.assertEqual(1, len(strings[0].comments.split('\n')))
+    self.assertEqual('ANOTHER_SWITCH', strings[1].name)
+    self.assertEqual('"another-value"', strings[1].value)
+    self.assertEqual(2, len(strings[1].comments.split('\n')))
+
+  def testStringValues(self):
+    test_data = r"""
+// Single line string constants.
+const char kAString[] = "a-value";
+const char kNoComment[] = "no-comment";
+
+namespace myfeature {
+const char kMyFeatureNoComment[] = "myfeature.no-comment";
+}
+
+// Single line switch with a big space.
+const char kAStringWithSpace[]                      = "a-value";
+
+// Wrapped constant definition.
+const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
+    "a-string-with-a-very-long-name-that-will-have-to-wrap";
+
+// This one has no comment before it.
+
+const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] =
+    "a-string-with-a-very-long-name-that-will-have-to-wrap2";
+
+const char kStringWithEscapes[] = "tab\tquote\"newline\n";
+const char kStringWithEscapes2[] =
+    "tab\tquote\"newline\n";
+
+const char kEmptyString[] = "";
+
+// These are valid C++ but not currently supported by the script.
+const char kInvalidLineBreak[] =
+
+    "invalid-line-break";
+
+const char kConcatenateMultipleStringLiterals[] =
+    "first line"
+    "second line";
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual(9, len(strings))
+    self.assertEqual('A_STRING', strings[0].name)
+    self.assertEqual('"a-value"', strings[0].value)
+    self.assertEqual('NO_COMMENT', strings[1].name)
+    self.assertEqual('"no-comment"', strings[1].value)
+    self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name)
+    self.assertEqual('"myfeature.no-comment"', strings[2].value)
+    self.assertEqual('A_STRING_WITH_SPACE', strings[3].name)
+    self.assertEqual('"a-value"', strings[3].value)
+    self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+                     strings[4].name)
+    self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
+                     strings[4].value)
+    self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2',
+                     strings[5].name)
+    self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"',
+                     strings[5].value)
+    self.assertEqual('STRING_WITH_ESCAPES', strings[6].name)
+    self.assertEqual(r'"tab\tquote\"newline\n"', strings[6].value)
+    self.assertEqual('STRING_WITH_ESCAPES2', strings[7].name)
+    self.assertEqual(r'"tab\tquote\"newline\n"', strings[7].value)
+    self.assertEqual('EMPTY_STRING', strings[8].name)
+    self.assertEqual('""', strings[8].value)
+
+  def testTreatWebViewLikeOneWord(self):
+    test_data = """
+const char kSomeWebViewSwitch[] = "some-webview-switch";
+const char kWebViewOtherSwitch[] = "webview-other-switch";
+const char kSwitchWithPluralWebViews[] = "switch-with-plural-webviews";
+""".split('\n')
+    string_file_parser = java_cpp_utils.CppConstantParser(
+        java_cpp_strings.StringParserDelegate(), test_data)
+    strings = string_file_parser.Parse()
+    self.assertEqual('SOME_WEBVIEW_SWITCH', strings[0].name)
+    self.assertEqual('"some-webview-switch"', strings[0].value)
+    self.assertEqual('WEBVIEW_OTHER_SWITCH', strings[1].name)
+    self.assertEqual('"webview-other-switch"', strings[1].value)
+    self.assertEqual('SWITCH_WITH_PLURAL_WEBVIEWS', strings[2].name)
+    self.assertEqual('"switch-with-plural-webviews"', strings[2].value)
+
+  def testTemplateParsing(self):
+    test_data = """
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package my.java.package;
+
+public any sort of class MyClass {{
+
+{NATIVE_STRINGS}
+
+}}
+""".split('\n')
+    package, class_name = java_cpp_utils.ParseTemplateFile(test_data)
+    self.assertEqual('my.java.package', package)
+    self.assertEqual('MyClass', class_name)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/java_google_api_keys.py b/src/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000..a58628a
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+    os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+  return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+  template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+  constant_template = string.Template(
+      '  public static final String ${NAME} = "${VALUE}";')
+  constant_entries_list = []
+  for constant_name, constant_value in constant_definitions.items():
+    values = {
+        'NAME': constant_name,
+        'VALUE': constant_value,
+    }
+    constant_entries_list.append(constant_template.substitute(values))
+  constant_entries_string = '\n'.join(constant_entries_list)
+
+  values = {
+      'CLASS_NAME': CLASSNAME,
+      'CONSTANT_ENTRIES': constant_entries_string,
+      'PACKAGE': PACKAGE,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+  }
+  return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with zipfile.ZipFile(output_path, 'w') as srcjar:
+    path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+    data = GenerateOutput(constant_definition)
+    build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--out", help="Path for java output.")
+  parser.add_argument("--srcjar", help="Path for srcjar output.")
+  options = parser.parse_args(argv)
+  if not options.out and not options.srcjar:
+    parser.print_help()
+    sys.exit(-1)
+
+  values = {}
+  values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+  values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+      GetAPIKeyPhysicalWebTest())
+  values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+  values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+  values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+  values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+      'REMOTING')
+  values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+      'REMOTING_HOST')
+  values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+      GetClientSecret('REMOTING_HOST'))
+  values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+      GetClientID('REMOTING_IDENTITY_API'))
+
+  if options.out:
+    _DoWriteJavaOutput(options.out, values)
+  if options.srcjar:
+    _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+  _DoMain(sys.argv[1:])
diff --git a/src/build/android/gyp/java_google_api_keys.pydeps b/src/build/android/gyp/java_google_api_keys.pydeps
new file mode 100644
index 0000000..ebb7172
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py
+../../../google_apis/google_api_keys.py
+../../gn_helpers.py
+java_google_api_keys.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/java_google_api_keys_tests.py b/src/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000..e00e86c
--- /dev/null
+++ b/src/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import unittest
+
+import java_google_api_keys
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+  def testOutput(self):
+    definition = {'E1': 'abc', 'E2': 'defgh'}
+    output = java_google_api_keys.GenerateOutput(definition)
+    expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+  public static final String E1 = "abc";
+  public static final String E2 = "defgh";
+}
+"""
+    self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/jetify_jar.py b/src/build/android/gyp/jetify_jar.py
new file mode 100755
index 0000000..e97ad97
--- /dev/null
+++ b/src/build/android/gyp/jetify_jar.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+  """Adds arguments related to jetifying to parser.
+
+  Args:
+    parser: ArgumentParser object.
+  """
+  parser.add_argument(
+      '--input-path',
+      required=True,
+      help='Path to input file(s). Either the classes '
+      'directory, or the path to a jar.')
+  parser.add_argument(
+      '--output-path',
+      required=True,
+      help='Path to output final file(s) to. Either the '
+      'final classes directory, or the directory in '
+      'which to place the instrumented/copied jar.')
+  parser.add_argument(
+      '--jetify-path', required=True, help='Path to jetify bin.')
+  parser.add_argument(
+      '--jetify-config-path', required=True, help='Path to jetify config file.')
+
+
+def _RunJetifyCommand(parser):
+  args = parser.parse_args()
+  cmd = [
+      args.jetify_path,
+      '-i',
+      args.input_path,
+      '-o',
+      args.output_path,
+      # Need to suppress a lot of warning output when jar doesn't have
+      # any references rewritten.
+      '-l',
+      'error'
+  ]
+  if args.jetify_config_path:
+    cmd.extend(['-c', args.jetify_config_path])
+  # Must wait for jetify command to complete to prevent race condition.
+  env = os.environ.copy()
+  env['JAVA_HOME'] = build_utils.JAVA_HOME
+  subprocess.check_call(cmd, env=env)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  _AddArguments(parser)
+  _RunJetifyCommand(parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/jetify_jar.pydeps b/src/build/android/gyp/jetify_jar.pydeps
new file mode 100644
index 0000000..6a1a589
--- /dev/null
+++ b/src/build/android/gyp/jetify_jar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jetify_jar.pydeps build/android/gyp/jetify_jar.py
+../../gn_helpers.py
+jetify_jar.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/jinja_template.py b/src/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000..d42189b
--- /dev/null
+++ b/src/build/android/gyp/jinja_template.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import argparse
+import os
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2  # pylint: disable=F0401
+
+
+class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
+  def __init__(self, searchpath):
+    jinja2.FileSystemLoader.__init__(self, searchpath)
+    self.loaded_templates = set()
+
+  def get_source(self, environment, template):
+    contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+        self, environment, template)
+    self.loaded_templates.add(os.path.relpath(filename))
+    return contents, filename, uptodate
+
+
+class JinjaProcessor(object):
+  """Allows easy rendering of jinja templates with input file tracking."""
+  def __init__(self, loader_base_dir, variables=None):
+    self.loader_base_dir = loader_base_dir
+    self.variables = variables or {}
+    self.loader = _RecordingFileSystemLoader(loader_base_dir)
+    self.env = jinja2.Environment(loader=self.loader)
+    self.env.undefined = jinja2.StrictUndefined
+    self.env.line_comment_prefix = '##'
+    self.env.trim_blocks = True
+    self.env.lstrip_blocks = True
+    self._template_cache = {}  # Map of path -> Template
+
+  def Render(self, input_filename, variables=None):
+    input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
+    template = self._template_cache.get(input_rel_path)
+    if not template:
+      template = self.env.get_template(input_rel_path)
+      self._template_cache[input_rel_path] = template
+    return template.render(variables or self.variables)
+
+  def GetLoadedTemplates(self):
+    return list(self.loader.loaded_templates)
+
+
+def _ProcessFile(processor, input_filename, output_filename):
+  output = processor.Render(input_filename)
+
+  # If |output| is same with the file content, we skip update and
+  # ninja's restat will avoid rebuilding things that depend on it.
+  if os.path.isfile(output_filename):
+    with codecs.open(output_filename, 'r', 'utf-8') as f:
+      if f.read() == output:
+        return
+
+  with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+    output_file.write(output)
+
+
+def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
+  with build_utils.TempDir() as temp_dir:
+    path_info = resource_utils.ResourceInfoFile()
+    for input_filename in input_filenames:
+      relpath = os.path.relpath(os.path.abspath(input_filename),
+                                os.path.abspath(inputs_base_dir))
+      if relpath.startswith(os.pardir):
+        raise Exception('input file %s is not contained in inputs base dir %s'
+                        % (input_filename, inputs_base_dir))
+
+      output_filename = os.path.join(temp_dir, relpath)
+      parent_dir = os.path.dirname(output_filename)
+      build_utils.MakeDirectory(parent_dir)
+      _ProcessFile(processor, input_filename, output_filename)
+      path_info.AddMapping(relpath, input_filename)
+
+    path_info.Write(outputs_zip + '.info')
+    build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def _ParseVariables(variables_arg, error_func):
+  variables = {}
+  for v in build_utils.ParseGnList(variables_arg):
+    if '=' not in v:
+      error_func('--variables argument must contain "=": ' + v)
+    name, _, value = v.partition('=')
+    variables[name] = value
+  return variables
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--inputs', required=True,
+                      help='GN-list of template files to process.')
+  parser.add_argument('--includes', default='',
+                      help="GN-list of files that get {% include %}'ed.")
+  parser.add_argument('--output', help='The output file to generate. Valid '
+                      'only if there is a single input.')
+  parser.add_argument('--outputs-zip', help='A zip file for the processed '
+                      'templates. Required if there are multiple inputs.')
+  parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
+                      'of the inputs. Each output\'s path in the output zip '
+                      'will match the relative path from INPUTS_BASE_DIR to '
+                      'the input. Required if --output-zip is given.')
+  parser.add_argument('--loader-base-dir', help='Base path used by the '
+                      'template loader. Must be a common ancestor directory of '
+                      'the inputs. Defaults to DIR_SOURCE_ROOT.',
+                      default=host_paths.DIR_SOURCE_ROOT)
+  parser.add_argument('--variables', help='Variables to be made available in '
+                      'the template processing environment, as a GYP list '
+                      '(e.g. --variables "channel=beta mstone=39")', default='')
+  parser.add_argument('--check-includes', action='store_true',
+                      help='Enable inputs and includes checks.')
+  options = parser.parse_args()
+
+  inputs = build_utils.ParseGnList(options.inputs)
+  includes = build_utils.ParseGnList(options.includes)
+
+  if (options.output is None) == (options.outputs_zip is None):
+    parser.error('Exactly one of --output and --output-zip must be given')
+  if options.output and len(inputs) != 1:
+    parser.error('--output cannot be used with multiple inputs')
+  if options.outputs_zip and not options.inputs_base_dir:
+    parser.error('--inputs-base-dir must be given when --output-zip is used')
+
+  variables = _ParseVariables(options.variables, parser.error)
+  processor = JinjaProcessor(options.loader_base_dir, variables=variables)
+
+  if options.output:
+    _ProcessFile(processor, inputs[0], options.output)
+  else:
+    _ProcessFiles(processor, inputs, options.inputs_base_dir,
+                  options.outputs_zip)
+
+  if options.check_includes:
+    all_inputs = set(processor.GetLoadedTemplates())
+    all_inputs.difference_update(inputs)
+    all_inputs.difference_update(includes)
+    if all_inputs:
+      raise Exception('Found files not listed via --includes:\n' +
+                      '\n'.join(sorted(all_inputs)))
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/jinja_template.pydeps b/src/build/android/gyp/jinja_template.pydeps
new file mode 100644
index 0000000..af22c40
--- /dev/null
+++ b/src/build/android/gyp/jinja_template.pydeps
@@ -0,0 +1,42 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+jinja_template.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/src/build/android/gyp/lint.py b/src/build/android/gyp/lint.py
new file mode 100755
index 0000000..faad21c
--- /dev/null
+++ b/src/build/android/gyp/lint.py
@@ -0,0 +1,489 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Runs Android's lint tool."""
+
+from __future__ import print_function
+
+import argparse
+import functools
+import logging
+import os
+import re
+import shutil
+import sys
+import time
+import traceback
+from xml.dom import minidom
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import manifest_utils
+from util import server_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md'  # pylint: disable=line-too-long
+
+# These checks are not useful for chromium.
+_DISABLED_ALWAYS = [
+    "AppCompatResource",  # Lint does not correctly detect our appcompat lib.
+    "Assert",  # R8 --force-enable-assertions is used to enable java asserts.
+    "InflateParams",  # Null is ok when inflating views for dialogs.
+    "InlinedApi",  # Constants are copied so they are always available.
+    "LintBaseline",  # Don't warn about using baseline.xml files.
+    "MissingApplicationIcon",  # False positive for non-production targets.
+    "SwitchIntDef",  # Many C++ enums are not used at all in java.
+    "UniqueConstants",  # Chromium enums allow aliases.
+    "UnusedAttribute",  # Chromium apks have various minSdkVersion values.
+    "ObsoleteLintCustomCheck",  # We have no control over custom lint checks.
+]
+
+# These checks are not useful for test targets and adds an unnecessary burden
+# to suppress them.
+_DISABLED_FOR_TESTS = [
+    # We should not require test strings.xml files to explicitly add
+    # translatable=false since they are not translated and not used in
+    # production.
+    "MissingTranslation",
+    # Test strings.xml files often have simple names and are not translatable,
+    # so it may conflict with a production string and cause this error.
+    "Untranslatable",
+    # Test targets often use the same strings target and resources target as the
+    # production targets but may not use all of them.
+    "UnusedResources",
+    # TODO(wnwen): Turn this back on since to crash it would require running on
+    #     a device with all the various minSdkVersions.
+    # Real NewApi violations crash the app, so the only ones that lint catches
+    # but tests still succeed are false positives.
+    "NewApi",
+    # Tests should be allowed to access these methods/classes.
+    "VisibleForTests",
+]
+
+_RES_ZIP_DIR = 'RESZIPS'
+_SRCJAR_DIR = 'SRCJARS'
+_AAR_DIR = 'AARS'
+
+
+def _SrcRelative(path):
+  """Returns relative path to top-level src dir."""
+  return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT)
+
+
+def _GenerateProjectFile(android_manifest,
+                         android_sdk_root,
+                         cache_dir,
+                         sources=None,
+                         classpath=None,
+                         srcjar_sources=None,
+                         resource_sources=None,
+                         custom_lint_jars=None,
+                         custom_annotation_zips=None,
+                         android_sdk_version=None):
+  project = ElementTree.Element('project')
+  root = ElementTree.SubElement(project, 'root')
+  # Run lint from output directory: crbug.com/1115594
+  root.set('dir', os.getcwd())
+  sdk = ElementTree.SubElement(project, 'sdk')
+  # Lint requires that the sdk path be an absolute path.
+  sdk.set('dir', os.path.abspath(android_sdk_root))
+  cache = ElementTree.SubElement(project, 'cache')
+  cache.set('dir', cache_dir)
+  main_module = ElementTree.SubElement(project, 'module')
+  main_module.set('name', 'main')
+  main_module.set('android', 'true')
+  main_module.set('library', 'false')
+  if android_sdk_version:
+    main_module.set('compile_sdk_version', android_sdk_version)
+  manifest = ElementTree.SubElement(main_module, 'manifest')
+  manifest.set('file', android_manifest)
+  if srcjar_sources:
+    for srcjar_file in srcjar_sources:
+      src = ElementTree.SubElement(main_module, 'src')
+      src.set('file', srcjar_file)
+  if sources:
+    for source in sources:
+      src = ElementTree.SubElement(main_module, 'src')
+      src.set('file', source)
+  if classpath:
+    for file_path in classpath:
+      classpath_element = ElementTree.SubElement(main_module, 'classpath')
+      classpath_element.set('file', file_path)
+  if resource_sources:
+    for resource_file in resource_sources:
+      resource = ElementTree.SubElement(main_module, 'resource')
+      resource.set('file', resource_file)
+  if custom_lint_jars:
+    for lint_jar in custom_lint_jars:
+      lint = ElementTree.SubElement(main_module, 'lint-checks')
+      lint.set('file', lint_jar)
+  if custom_annotation_zips:
+    for annotation_zip in custom_annotation_zips:
+      annotation = ElementTree.SubElement(main_module, 'annotations')
+      annotation.set('file', annotation_zip)
+  return project
+
+
+def _RetrieveBackportedMethods(backported_methods_path):
+  with open(backported_methods_path) as f:
+    methods = f.read().splitlines()
+  # Methods look like:
+  #   java/util/Set#of(Ljava/lang/Object;)Ljava/util/Set;
+  # But error message looks like:
+  #   Call requires API level R (current min is 21): java.util.Set#of [NewApi]
+  methods = (m.replace('/', '\\.') for m in methods)
+  methods = (m[:m.index('(')] for m in methods)
+  return sorted(set(methods))
+
+
+def _GenerateConfigXmlTree(orig_config_path, backported_methods):
+  if orig_config_path:
+    root_node = ElementTree.parse(orig_config_path).getroot()
+  else:
+    root_node = ElementTree.fromstring('<lint/>')
+
+  issue_node = ElementTree.SubElement(root_node, 'issue')
+  issue_node.attrib['id'] = 'NewApi'
+  ignore_node = ElementTree.SubElement(issue_node, 'ignore')
+  ignore_node.attrib['regexp'] = '|'.join(backported_methods)
+  return root_node
+
+
+def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths,
+                             min_sdk_version, android_sdk_version):
+  # Set minSdkVersion in the manifest to the correct value.
+  doc, manifest, app_node = manifest_utils.ParseManifest(original_manifest_path)
+
+  # TODO(crbug.com/1126301): Should this be done using manifest merging?
+  # Add anything in the application node of the extra manifests to the main
+  # manifest to prevent unused resource errors.
+  for path in extra_manifest_paths:
+    _, _, extra_app_node = manifest_utils.ParseManifest(path)
+    for node in extra_app_node:
+      app_node.append(node)
+
+  if app_node.find(
+      '{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE) is None:
+    # Assume no backup is intended, appeases AllowBackup lint check and keeping
+    # it working for manifests that do define android:allowBackup.
+    app_node.set('{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE, 'false')
+
+  uses_sdk = manifest.find('./uses-sdk')
+  if uses_sdk is None:
+    uses_sdk = ElementTree.Element('uses-sdk')
+    manifest.insert(0, uses_sdk)
+  uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+               min_sdk_version)
+  uses_sdk.set('{%s}targetSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+               android_sdk_version)
+  return doc
+
+
+def _WriteXmlFile(root, path):
+  logging.info('Writing xml file %s', path)
+  build_utils.MakeDirectory(os.path.dirname(path))
+  with build_utils.AtomicOutput(path) as f:
+    # Although we can write it just with ElementTree.tostring, using minidom
+    # makes it a lot easier to read as a human (also on code search).
+    f.write(
+        minidom.parseString(ElementTree.tostring(
+            root, encoding='utf-8')).toprettyxml(indent='  ').encode('utf-8'))
+
+
+def _RunLint(lint_binary_path,
+             backported_methods_path,
+             config_path,
+             manifest_path,
+             extra_manifest_paths,
+             sources,
+             classpath,
+             cache_dir,
+             android_sdk_version,
+             aars,
+             srcjars,
+             min_sdk_version,
+             resource_sources,
+             resource_zips,
+             android_sdk_root,
+             lint_gen_dir,
+             baseline,
+             testonly_target=False,
+             warnings_as_errors=False):
+  logging.info('Lint starting')
+
+  cmd = [
+      lint_binary_path,
+      '--quiet',  # Silences lint's "." progress updates.
+      '--disable',
+      ','.join(_DISABLED_ALWAYS),
+  ]
+  if baseline:
+    cmd.extend(['--baseline', baseline])
+  if testonly_target:
+    cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
+
+  if not manifest_path:
+    manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
+                                 'android', 'AndroidManifest.xml')
+
+  logging.info('Generating config.xml')
+  backported_methods = _RetrieveBackportedMethods(backported_methods_path)
+  config_xml_node = _GenerateConfigXmlTree(config_path, backported_methods)
+  generated_config_path = os.path.join(lint_gen_dir, 'config.xml')
+  _WriteXmlFile(config_xml_node, generated_config_path)
+  cmd.extend(['--config', generated_config_path])
+
+  logging.info('Generating Android manifest file')
+  android_manifest_tree = _GenerateAndroidManifest(manifest_path,
+                                                   extra_manifest_paths,
+                                                   min_sdk_version,
+                                                   android_sdk_version)
+  # Include the rebased manifest_path in the lint generated path so that it is
+  # clear in error messages where the original AndroidManifest.xml came from.
+  lint_android_manifest_path = os.path.join(lint_gen_dir, manifest_path)
+  _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)
+
+  resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
+  # These are zip files with generated resources (e. g. strings from GRD).
+  logging.info('Extracting resource zips')
+  for resource_zip in resource_zips:
+    # Use a consistent root and name rather than a temporary file so that
+    # suppressions can be local to the lint target and the resource target.
+    resource_dir = os.path.join(resource_root_dir, resource_zip)
+    shutil.rmtree(resource_dir, True)
+    os.makedirs(resource_dir)
+    resource_sources.extend(
+        build_utils.ExtractAll(resource_zip, path=resource_dir))
+
+  logging.info('Extracting aars')
+  aar_root_dir = os.path.join(lint_gen_dir, _AAR_DIR)
+  custom_lint_jars = []
+  custom_annotation_zips = []
+  if aars:
+    for aar in aars:
+      # Use relative source for aar files since they are not generated.
+      aar_dir = os.path.join(aar_root_dir,
+                             os.path.splitext(_SrcRelative(aar))[0])
+      shutil.rmtree(aar_dir, True)
+      os.makedirs(aar_dir)
+      aar_files = build_utils.ExtractAll(aar, path=aar_dir)
+      for f in aar_files:
+        if f.endswith('lint.jar'):
+          custom_lint_jars.append(f)
+        elif f.endswith('annotations.zip'):
+          custom_annotation_zips.append(f)
+
+  logging.info('Extracting srcjars')
+  srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
+  srcjar_sources = []
+  if srcjars:
+    for srcjar in srcjars:
+      # Use path without extensions since otherwise the file name includes
+      # .srcjar and lint treats it as a srcjar.
+      srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
+      shutil.rmtree(srcjar_dir, True)
+      os.makedirs(srcjar_dir)
+      # Sadly lint's srcjar support is broken since it only considers the first
+      # srcjar. Until we roll a lint version with that fixed, we need to extract
+      # it ourselves.
+      srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))
+
+  logging.info('Generating project file')
+  project_file_root = _GenerateProjectFile(lint_android_manifest_path,
+                                           android_sdk_root, cache_dir, sources,
+                                           classpath, srcjar_sources,
+                                           resource_sources, custom_lint_jars,
+                                           custom_annotation_zips,
+                                           android_sdk_version)
+
+  project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
+  _WriteXmlFile(project_file_root, project_xml_path)
+  cmd += ['--project', project_xml_path]
+
+  logging.info('Preparing environment variables')
+  env = os.environ.copy()
+  # It is important that lint uses the checked-in JDK11 as it is almost 50%
+  # faster than JDK8.
+  env['JAVA_HOME'] = build_utils.JAVA_HOME
+  # This is necessary so that lint errors print stack traces in stdout.
+  env['LINT_PRINT_STACKTRACE'] = 'true'
+  if baseline and not os.path.exists(baseline):
+    # Generating new baselines is only done locally, and requires more memory to
+    # avoid OOMs.
+    env['LINT_OPTS'] = '-Xmx4g'
+  else:
+    # The default set in the wrapper script is 1g, but it seems not enough :(
+    env['LINT_OPTS'] = '-Xmx2g'
+
+  # This filter is necessary for JDK11.
+  stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
+  stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found')
+
+  start = time.time()
+  logging.debug('Lint command %s', ' '.join(cmd))
+  failed = True
+  try:
+    failed = bool(
+        build_utils.CheckOutput(cmd,
+                                env=env,
+                                print_stdout=True,
+                                stdout_filter=stdout_filter,
+                                stderr_filter=stderr_filter,
+                                fail_on_output=warnings_as_errors))
+  finally:
+    # When not treating warnings as errors, display the extra footer.
+    is_debug = os.environ.get('LINT_DEBUG', '0') != '0'
+
+    if failed:
+      print('- For more help with lint in Chrome:', _LINT_MD_URL)
+      if is_debug:
+        print('- DEBUG MODE: Here is the project.xml: {}'.format(
+            _SrcRelative(project_xml_path)))
+      else:
+        print('- Run with LINT_DEBUG=1 to enable lint configuration debugging')
+
+    end = time.time() - start
+    logging.info('Lint command took %ss', end)
+    if not is_debug:
+      shutil.rmtree(aar_root_dir, ignore_errors=True)
+      shutil.rmtree(resource_root_dir, ignore_errors=True)
+      shutil.rmtree(srcjar_root_dir, ignore_errors=True)
+      os.unlink(project_xml_path)
+
+  logging.info('Lint completed')
+
+
+def _ParseArgs(argv):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--target-name', help='Fully qualified GN target name.')
+  parser.add_argument('--skip-build-server',
+                      action='store_true',
+                      help='Avoid using the build server.')
+  parser.add_argument('--lint-binary-path',
+                      required=True,
+                      help='Path to lint executable.')
+  parser.add_argument('--backported-methods',
+                      help='Path to backported methods file created by R8.')
+  parser.add_argument('--cache-dir',
+                      required=True,
+                      help='Path to the directory in which the android cache '
+                      'directory tree should be stored.')
+  parser.add_argument('--config-path', help='Path to lint suppressions file.')
+  parser.add_argument('--lint-gen-dir',
+                      required=True,
+                      help='Path to store generated xml files.')
+  parser.add_argument('--stamp', help='Path to stamp upon success.')
+  parser.add_argument('--android-sdk-version',
+                      help='Version (API level) of the Android SDK used for '
+                      'building.')
+  parser.add_argument('--min-sdk-version',
+                      required=True,
+                      help='Minimal SDK version to lint against.')
+  parser.add_argument('--android-sdk-root',
+                      required=True,
+                      help='Lint needs an explicit path to the android sdk.')
+  parser.add_argument('--testonly',
+                      action='store_true',
+                      help='If set, some checks like UnusedResources will be '
+                      'disabled since they are not helpful for test '
+                      'targets.')
+  parser.add_argument('--create-cache',
+                      action='store_true',
+                      help='Whether this invocation is just warming the cache.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--java-sources',
+                      help='File containing a list of java sources files.')
+  parser.add_argument('--aars', help='GN list of included aars.')
+  parser.add_argument('--srcjars', help='GN list of included srcjars.')
+  parser.add_argument('--manifest-path',
+                      help='Path to original AndroidManifest.xml')
+  parser.add_argument('--extra-manifest-paths',
+                      action='append',
+                      help='GYP-list of manifest paths to merge into the '
+                      'original AndroidManifest.xml')
+  parser.add_argument('--resource-sources',
+                      default=[],
+                      action='append',
+                      help='GYP-list of resource sources files, similar to '
+                      'java sources files, but for resource files.')
+  parser.add_argument('--resource-zips',
+                      default=[],
+                      action='append',
+                      help='GYP-list of resource zips, zip files of generated '
+                      'resource files.')
+  parser.add_argument('--classpath',
+                      help='List of jars to add to the classpath.')
+  parser.add_argument('--baseline',
+                      help='Baseline file to ignore existing errors and fail '
+                      'on new errors.')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(argv))
+  args.java_sources = build_utils.ParseGnList(args.java_sources)
+  args.aars = build_utils.ParseGnList(args.aars)
+  args.srcjars = build_utils.ParseGnList(args.srcjars)
+  args.resource_sources = build_utils.ParseGnList(args.resource_sources)
+  args.extra_manifest_paths = build_utils.ParseGnList(args.extra_manifest_paths)
+  args.resource_zips = build_utils.ParseGnList(args.resource_zips)
+  args.classpath = build_utils.ParseGnList(args.classpath)
+  return args
+
+
+def main():
+  build_utils.InitLogging('LINT_DEBUG')
+  args = _ParseArgs(sys.argv[1:])
+
+  # TODO(wnwen): Consider removing lint cache now that there are only two lint
+  #              invocations.
+  # Avoid parallelizing cache creation since lint runs without the cache defeat
+  # the purpose of creating the cache in the first place.
+  if (not args.create_cache and not args.skip_build_server
+      and server_utils.MaybeRunCommand(
+          name=args.target_name, argv=sys.argv, stamp_file=args.stamp)):
+    return
+
+  sources = []
+  for java_sources_file in args.java_sources:
+    sources.extend(build_utils.ReadSourcesList(java_sources_file))
+  resource_sources = []
+  for resource_sources_file in args.resource_sources:
+    resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
+
+  possible_depfile_deps = (args.srcjars + args.resource_zips + sources +
+                           resource_sources + [
+                               args.baseline,
+                               args.manifest_path,
+                           ])
+  depfile_deps = [p for p in possible_depfile_deps if p]
+
+  _RunLint(args.lint_binary_path,
+           args.backported_methods,
+           args.config_path,
+           args.manifest_path,
+           args.extra_manifest_paths,
+           sources,
+           args.classpath,
+           args.cache_dir,
+           args.android_sdk_version,
+           args.aars,
+           args.srcjars,
+           args.min_sdk_version,
+           resource_sources,
+           args.resource_zips,
+           args.android_sdk_root,
+           args.lint_gen_dir,
+           args.baseline,
+           testonly_target=args.testonly,
+           warnings_as_errors=args.warnings_as_errors)
+  logging.info('Creating stamp file')
+  build_utils.Touch(args.stamp)
+
+  if args.depfile:
+    build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/lint.pydeps b/src/build/android/gyp/lint.pydeps
new file mode 100644
index 0000000..0994e19
--- /dev/null
+++ b/src/build/android/gyp/lint.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../gn_helpers.py
+lint.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/server_utils.py
diff --git a/src/build/android/gyp/merge_manifest.py b/src/build/android/gyp/merge_manifest.py
new file mode 100755
index 0000000..53f1c11
--- /dev/null
+++ b/src/build/android/gyp/merge_manifest.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges dependency Android manifests into a root manifest."""
+
+import argparse
+import contextlib
+import os
+import sys
+import tempfile
+import xml.etree.ElementTree as ElementTree
+
+from util import build_utils
+from util import manifest_utils
+
+_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
+_MANIFEST_MERGER_JARS = [
+    os.path.join('build-system', 'manifest-merger.jar'),
+    os.path.join('common', 'common.jar'),
+    os.path.join('sdk-common', 'sdk-common.jar'),
+    os.path.join('sdklib', 'sdklib.jar'),
+    os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
+                 'guava-28.1-jre.jar'),
+    os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
+                 'kotlin-stdlib.jar'),
+    os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.5',
+                 'gson-2.8.5.jar'),
+]
+
+
+@contextlib.contextmanager
+def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version,
+                     max_sdk_version, manifest_package):
+  """Patches an Android manifest's package and performs assertions to ensure
+  correctness for the manifest.
+  """
+  doc, manifest, _ = manifest_utils.ParseManifest(manifest_path)
+  manifest_utils.AssertUsesSdk(manifest, min_sdk_version, target_sdk_version,
+                               max_sdk_version)
+  assert manifest_utils.GetPackage(manifest) or manifest_package, \
+            'Must set manifest package in GN or in AndroidManifest.xml'
+  manifest_utils.AssertPackage(manifest, manifest_package)
+  if manifest_package:
+    manifest.set('package', manifest_package)
+  tmp_prefix = os.path.basename(manifest_path)
+  with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+    manifest_utils.SaveManifest(doc, patched_manifest.name)
+    yield patched_manifest.name, manifest_utils.GetPackage(manifest)
+
+
+def _BuildManifestMergerClasspath(android_sdk_cmdline_tools):
+  return ':'.join([
+      os.path.join(android_sdk_cmdline_tools, 'lib', jar)
+      for jar in _MANIFEST_MERGER_JARS
+  ])
+
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--android-sdk-cmdline-tools',
+      help='Path to SDK\'s cmdline-tools folder.',
+      required=True)
+  parser.add_argument('--root-manifest',
+                      help='Root manifest which to merge into',
+                      required=True)
+  parser.add_argument('--output', help='Output manifest path', required=True)
+  parser.add_argument('--extras',
+                      help='GN list of additional manifest to merge')
+  parser.add_argument(
+      '--min-sdk-version',
+      required=True,
+      help='android:minSdkVersion for merging.')
+  parser.add_argument(
+      '--target-sdk-version',
+      required=True,
+      help='android:targetSdkVersion for merging.')
+  parser.add_argument(
+      '--max-sdk-version', help='android:maxSdkVersion for merging.')
+  parser.add_argument(
+      '--manifest-package',
+      help='Package name of the merged AndroidManifest.xml.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  args = parser.parse_args(argv)
+
+  classpath = _BuildManifestMergerClasspath(args.android_sdk_cmdline_tools)
+
+  with build_utils.AtomicOutput(args.output) as output:
+    cmd = build_utils.JavaCmd(args.warnings_as_errors) + [
+        '-cp',
+        classpath,
+        _MANIFEST_MERGER_MAIN_CLASS,
+        '--out',
+        output.name,
+        '--property',
+        'MIN_SDK_VERSION=' + args.min_sdk_version,
+        '--property',
+        'TARGET_SDK_VERSION=' + args.target_sdk_version,
+    ]
+
+    if args.max_sdk_version:
+      cmd += [
+          '--property',
+          'MAX_SDK_VERSION=' + args.max_sdk_version,
+      ]
+
+    extras = build_utils.ParseGnList(args.extras)
+    if extras:
+      cmd += ['--libs', ':'.join(extras)]
+
+    with _ProcessManifest(args.root_manifest, args.min_sdk_version,
+                          args.target_sdk_version, args.max_sdk_version,
+                          args.manifest_package) as tup:
+      root_manifest, package = tup
+      cmd += [
+          '--main',
+          root_manifest,
+          '--property',
+          'PACKAGE=' + package,
+      ]
+      build_utils.CheckOutput(
+          cmd,
+          # https://issuetracker.google.com/issues/63514300:
+          # The merger doesn't set a nonzero exit code for failures.
+          fail_func=lambda returncode, stderr: returncode != 0 or build_utils.
+          IsTimeStale(output.name, [root_manifest] + extras),
+          fail_on_output=args.warnings_as_errors)
+
+    # Check for correct output.
+    _, manifest, _ = manifest_utils.ParseManifest(output.name)
+    manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version,
+                                 args.target_sdk_version)
+    manifest_utils.AssertPackage(manifest, package)
+
+  if args.depfile:
+    inputs = extras + classpath.split(':')
+    build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/merge_manifest.pydeps b/src/build/android/gyp/merge_manifest.pydeps
new file mode 100644
index 0000000..ef9bb34
--- /dev/null
+++ b/src/build/android/gyp/merge_manifest.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../gn_helpers.py
+merge_manifest.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
diff --git a/src/build/android/gyp/native_libraries_template.py b/src/build/android/gyp/native_libraries_template.py
new file mode 100644
index 0000000..cf336ec
--- /dev/null
+++ b/src/build/android/gyp/native_libraries_template.py
@@ -0,0 +1,39 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+//     build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+    public static final int CPU_FAMILY_UNKNOWN = 0;
+    public static final int CPU_FAMILY_ARM = 1;
+    public static final int CPU_FAMILY_MIPS = 2;
+    public static final int CPU_FAMILY_X86 = 3;
+
+    // Set to true to enable the use of the Chromium Linker.
+    public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+    public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+    public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+    // This is the list of native libraries to be loaded (in the correct order)
+    // by LibraryLoader.java.
+    // TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java
+    // directly. The two ways of library loading should be refactored into one.
+    public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+    // This is the expected version of the 'main' native library, which is the one that
+    // implements the initial set of base JNI functions including
+    // base::android::nativeGetVersionName()
+    // TODO(torne): This is public to work around classloader issues in Trichrome
+    // where NativeLibraries is not in the same dex as LibraryLoader.
+    // We should instead split up Java code along package boundaries.
+    public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER};
+
+    public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
diff --git a/src/build/android/gyp/nocompile_test.py b/src/build/android/gyp/nocompile_test.py
new file mode 100755
index 0000000..a5739f1
--- /dev/null
+++ b/src/build/android/gyp/nocompile_test.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Checks that compiling targets in BUILD.gn file fails."""
+
+import argparse
+import json
+import os
+import subprocess
+import re
+import sys
+from util import build_utils
+
+_CHROMIUM_SRC = os.path.normpath(os.path.join(__file__, '..', '..', '..', '..'))
+_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'depot_tools', 'ninja')
+
+# Relative to _CHROMIUM_SRC
+_GN_SRC_REL_PATH = os.path.join('third_party', 'depot_tools', 'gn')
+
+
+def _raise_command_exception(args, returncode, output):
+  """Raises an exception whose message describes a command failure.
+
+    Args:
+      args: shell command-line (as passed to subprocess.Popen())
+      returncode: status code.
+      output: command output.
+    Raises:
+      a new Exception.
+    """
+  message = 'Command failed with status {}: {}\n' \
+      'Output:-----------------------------------------\n{}\n' \
+      '------------------------------------------------\n'.format(
+          returncode, args, output)
+  raise Exception(message)
+
+
+def _run_command(args, cwd=None):
+  """Runs shell command. Raises exception if command fails."""
+  p = subprocess.Popen(args,
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.STDOUT,
+                       cwd=cwd)
+  pout, _ = p.communicate()
+  if p.returncode != 0:
+    _raise_command_exception(args, p.returncode, pout)
+
+
+def _run_command_get_output(args, success_output):
+  """Runs shell command and returns command output."""
+  p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  pout, _ = p.communicate()
+  if p.returncode == 0:
+    return success_output
+
+  # For Python3 only:
+  if isinstance(pout, bytes) and sys.version_info >= (3, ):
+    pout = pout.decode('utf-8')
+  return pout
+
+
+def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args):
+  """Copies args.gn.
+
+    Args:
+      src_args_path: args.gn file to copy.
+      dest_args_path: Copy file destination.
+      extra_args: Text to append to args.gn after copy.
+    """
+  with open(src_args_path) as f_in, open(dest_args_path, 'w') as f_out:
+    f_out.write(f_in.read())
+    f_out.write('\n')
+    f_out.write('\n'.join(extra_args))
+
+
+def _find_lines_after_prefix(text, prefix, num_lines):
+  """Searches |text| for a line which starts with |prefix|.
+
+  Args:
+    text: String to search in.
+    prefix: Prefix to search for.
+    num_lines: Number of lines, starting with line with prefix, to return.
+  Returns:
+    Matched lines. Returns None otherwise.
+  """
+  lines = text.split('\n')
+  for i, line in enumerate(lines):
+    if line.startswith(prefix):
+      return lines[i:i + num_lines]
+  return None
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--gn-args-path',
+                      required=True,
+                      help='Path to args.gn file.')
+  parser.add_argument('--test-configs-path',
+                      required=True,
+                      help='Path to file with test configurations')
+  parser.add_argument('--out-dir',
+                      required=True,
+                      help='Path to output directory to use for compilation.')
+  parser.add_argument('--stamp', help='Path to touch.')
+  options = parser.parse_args()
+
+  with open(options.test_configs_path) as f:
+    test_configs = json.loads(f.read())
+
+  if not os.path.exists(options.out_dir):
+    os.makedirs(options.out_dir)
+
+  out_gn_args_path = os.path.join(options.out_dir, 'args.gn')
+  extra_gn_args = [
+      'enable_android_nocompile_tests = true',
+      'treat_warnings_as_errors = true',
+      # GOMA does not work with non-standard output directories.
+      'use_goma = false',
+  ]
+  _copy_and_append_gn_args(options.gn_args_path, out_gn_args_path,
+                           extra_gn_args)
+
+  # As all of the test targets are declared in the same BUILD.gn file, it does
+  # not matter which test target is used as the root target.
+  gn_args = [
+      _GN_SRC_REL_PATH, '--root-target=' + test_configs[0]['target'], 'gen',
+      os.path.relpath(options.out_dir, _CHROMIUM_SRC)
+  ]
+  _run_command(gn_args, cwd=_CHROMIUM_SRC)
+
+  error_messages = []
+  for config in test_configs:
+    # Strip leading '//'
+    gn_path = config['target'][2:]
+    expect_regex = config['expect_regex']
+    ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path]
+
+    # Purpose of quotes at beginning of message is to make it clear that
+    # "Compile successful." is not a compiler log message.
+    test_output = _run_command_get_output(ninja_args, '""\nCompile successful.')
+
+    failure_message_lines = _find_lines_after_prefix(test_output, 'FAILED:', 5)
+
+    found_expect_regex = False
+    if failure_message_lines:
+      for line in failure_message_lines:
+        if re.search(expect_regex, line):
+          found_expect_regex = True
+          break
+    if not found_expect_regex:
+      error_message = '//{} failed.\nExpected compile output pattern:\n'\
+          '{}\nActual compile output:\n{}'.format(
+              gn_path, expect_regex, test_output)
+      error_messages.append(error_message)
+
+  if error_messages:
+    raise Exception('\n'.join(error_messages))
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/prepare_resources.py b/src/build/android/gyp/prepare_resources.py
new file mode 100755
index 0000000..93fe9f9
--- /dev/null
+++ b/src/build/android/gyp/prepare_resources.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip and R.txt
+files."""
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+from util import md5_check
+from util import resources_parser
+from util import resource_utils
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from argparse.ArgumentParser.parse_args()
+  """
+  parser = argparse.ArgumentParser(description=__doc__)
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_argument('--res-sources-path',
+                      required=True,
+                      help='Path to a list of input resources for this target.')
+
+  parser.add_argument(
+      '--r-text-in',
+      help='Path to pre-existing R.txt. Its resource IDs override those found '
+      'in the generated R.txt when generating R.java.')
+
+  parser.add_argument(
+      '--resource-zip-out',
+      help='Path to a zip archive containing all resources from '
+      '--resource-dirs, merged into a single directory tree.')
+
+  parser.add_argument('--r-text-out',
+                      help='Path to store the generated R.txt file.')
+
+  parser.add_argument('--strip-drawables',
+                      action="store_true",
+                      help='Remove drawables from the resources.')
+
+  options = parser.parse_args(args)
+
+  with open(options.res_sources_path) as f:
+    options.sources = f.read().splitlines()
+  options.resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
+      options.sources)
+
+  return options
+
+
+def _CheckAllFilesListed(resource_files, resource_dirs):
+  resource_files = set(resource_files)
+  missing_files = []
+  for path, _ in resource_utils.IterResourceFilesInDirectories(resource_dirs):
+    if path not in resource_files:
+      missing_files.append(path)
+
+  if missing_files:
+    sys.stderr.write('Error: Found files not listed in the sources list of '
+                     'the BUILD.gn target:\n')
+    for path in missing_files:
+      sys.stderr.write('{}\n'.format(path))
+    sys.exit(1)
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+  # ignore_pattern is a string of ':' delimited list of globs used to ignore
+  # files that should not be part of the final resource zip.
+  files_to_zip = []
+  path_info = resource_utils.ResourceInfoFile()
+  for index, resource_dir in enumerate(resource_dirs):
+    attributed_aar = None
+    if not resource_dir.startswith('..'):
+      aar_source_info_path = os.path.join(
+          os.path.dirname(resource_dir), 'source.info')
+      if os.path.exists(aar_source_info_path):
+        attributed_aar = jar_info_utils.ReadAarSourceInfo(aar_source_info_path)
+
+    for path, archive_path in resource_utils.IterResourceFilesInDirectories(
+        [resource_dir], ignore_pattern):
+      attributed_path = path
+      if attributed_aar:
+        attributed_path = os.path.join(attributed_aar, 'res',
+                                       path[len(resource_dir) + 1:])
+      # Use the non-prefixed archive_path in the .info file.
+      path_info.AddMapping(archive_path, attributed_path)
+
+      resource_dir_name = os.path.basename(resource_dir)
+      archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path)
+      files_to_zip.append((archive_path, path))
+
+  path_info.Write(zip_path + '.info')
+
+  with zipfile.ZipFile(zip_path, 'w') as z:
+    # This magic comment signals to resource_utils.ExtractDeps that this zip is
+    # not just the contents of a single res dir, without the encapsulating res/
+    # (like the outputs of android_generated_resources targets), but instead has
+    # the contents of possibly multiple res/ dirs each within an encapsulating
+    # directory within the zip.
+    z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING
+    build_utils.DoZip(files_to_zip, z)
+
+
+def _GenerateRTxt(options, r_txt_path):
+  """Generate R.txt file.
+
+  Args:
+    options: The command-line options tuple.
+    r_txt_path: Locates where the R.txt file goes.
+  """
+  ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+  if options.strip_drawables:
+    ignore_pattern += ':*drawable*'
+
+  resources_parser.RTxtGenerator(options.resource_dirs,
+                                 ignore_pattern).WriteRTxtFile(r_txt_path)
+
+
+def _OnStaleMd5(options):
+  with resource_utils.BuildContext() as build:
+    if options.sources:
+      _CheckAllFilesListed(options.sources, options.resource_dirs)
+    if options.r_text_in:
+      r_txt_path = options.r_text_in
+    else:
+      _GenerateRTxt(options, build.r_txt_path)
+      r_txt_path = build.r_txt_path
+
+    if options.r_text_out:
+      shutil.copyfile(r_txt_path, options.r_text_out)
+
+    if options.resource_zip_out:
+      ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+      if options.strip_drawables:
+        ignore_pattern += ':*drawable*'
+      _ZipResources(options.resource_dirs, options.resource_zip_out,
+                    ignore_pattern)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  # Order of these must match order specified in GN so that the correct one
+  # appears first in the depfile.
+  output_paths = [
+      options.resource_zip_out,
+      options.resource_zip_out + '.info',
+      options.r_text_out,
+  ]
+
+  input_paths = [options.res_sources_path]
+  if options.r_text_in:
+    input_paths += [options.r_text_in]
+
+  # Resource files aren't explicitly listed in GN. Listing them in the depfile
+  # ensures the target will be marked stale when resource files are removed.
+  depfile_deps = []
+  resource_names = []
+  for resource_dir in options.resource_dirs:
+    for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+      # Don't list the empty .keep file in depfile. Since it doesn't end up
+      # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+      # if ever moved.
+      if not resource_file.endswith(os.path.join('empty', '.keep')):
+        input_paths.append(resource_file)
+        depfile_deps.append(resource_file)
+      resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+  # Resource filenames matter to the output, so add them to strings as well.
+  # This matters if a file is renamed but not changed (http://crbug.com/597126).
+  input_strings = sorted(resource_names) + [
+      options.strip_drawables,
+  ]
+
+  # Since android_resources targets like *__all_dfm_resources depend on java
+  # targets that they do not need (in reality it only needs the transitive
+  # resource targets that those java targets depend on), md5_check is used to
+  # prevent outputs from being re-written when real inputs have not changed.
+  md5_check.CallAndWriteDepfileIfStale(lambda: _OnStaleMd5(options),
+                                       options,
+                                       input_paths=input_paths,
+                                       input_strings=input_strings,
+                                       output_paths=output_paths,
+                                       depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/prepare_resources.pydeps b/src/build/android/gyp/prepare_resources.pydeps
new file mode 100644
index 0000000..b225918
--- /dev/null
+++ b/src/build/android/gyp/prepare_resources.pydeps
@@ -0,0 +1,34 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+prepare_resources.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/src/build/android/gyp/process_native_prebuilt.py b/src/build/android/gyp/process_native_prebuilt.py
new file mode 100755
index 0000000..52645d9
--- /dev/null
+++ b/src/build/android/gyp/process_native_prebuilt.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def main(args):
+  parser = argparse.ArgumentParser(args)
+  parser.add_argument('--strip-path', required=True, help='')
+  parser.add_argument('--input-path', required=True, help='')
+  parser.add_argument('--stripped-output-path', required=True, help='')
+  parser.add_argument('--unstripped-output-path', required=True, help='')
+  options = parser.parse_args(args)
+
+  # eu-strip's output keeps mode from source file which might not be writable
+  # thus it fails to override its output on the next run. AtomicOutput fixes
+  # the issue.
+  with build_utils.AtomicOutput(options.stripped_output_path) as out:
+    cmd = [
+        options.strip_path,
+        options.input_path,
+        '-o',
+        out.name,
+    ]
+    build_utils.CheckOutput(cmd)
+  shutil.copyfile(options.input_path, options.unstripped_output_path)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/process_native_prebuilt.pydeps b/src/build/android/gyp/process_native_prebuilt.pydeps
new file mode 100644
index 0000000..8e2012a
--- /dev/null
+++ b/src/build/android/gyp/process_native_prebuilt.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py
+../../gn_helpers.py
+process_native_prebuilt.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/proguard.py b/src/build/android/gyp/proguard.py
new file mode 100755
index 0000000..7f59769
--- /dev/null
+++ b/src/build/android/gyp/proguard.py
@@ -0,0 +1,722 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+from collections import defaultdict
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import dex
+import dex_jdk_libs
+from pylib.dex import dex_parser
+from util import build_utils
+from util import diff_utils
+
+_API_LEVEL_VERSION_CODE = [
+    (21, 'L'),
+    (22, 'LollipopMR1'),
+    (23, 'M'),
+    (24, 'N'),
+    (25, 'NMR1'),
+    (26, 'O'),
+    (27, 'OMR1'),
+    (28, 'P'),
+    (29, 'Q'),
+    (30, 'R'),
+    (31, 'S'),
+]
+
+
+def _ParseOptions():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--r8-path',
+                      required=True,
+                      help='Path to the R8.jar to use.')
+  parser.add_argument(
+      '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+  parser.add_argument('--input-paths',
+                      action='append',
+                      required=True,
+                      help='GN-list of .jar files to optimize.')
+  parser.add_argument('--desugar-jdk-libs-jar',
+                      help='Path to desugar_jdk_libs.jar.')
+  parser.add_argument('--desugar-jdk-libs-configuration-jar',
+                      help='Path to desugar_jdk_libs_configuration.jar.')
+  parser.add_argument('--output-path', help='Path to the generated .jar file.')
+  parser.add_argument(
+      '--proguard-configs',
+      action='append',
+      required=True,
+      help='GN-list of configuration files.')
+  parser.add_argument(
+      '--apply-mapping', help='Path to ProGuard mapping to apply.')
+  parser.add_argument(
+      '--mapping-output',
+      required=True,
+      help='Path for ProGuard to output mapping file to.')
+  parser.add_argument(
+      '--extra-mapping-output-paths',
+      help='GN-list of additional paths to copy output mapping file to.')
+  parser.add_argument(
+      '--classpath',
+      action='append',
+      help='GN-list of .jar files to include as libraries.')
+  parser.add_argument('--main-dex-rules-path',
+                      action='append',
+                      help='Path to main dex rules for multidex.')
+  parser.add_argument(
+      '--min-api', help='Minimum Android API level compatibility.')
+  parser.add_argument('--enable-obfuscation',
+                      action='store_true',
+                      help='Minify symbol names')
+  parser.add_argument(
+      '--verbose', '-v', action='store_true', help='Print all ProGuard output')
+  parser.add_argument(
+      '--repackage-classes', help='Package all optimized classes are put in.')
+  parser.add_argument(
+      '--disable-outlining',
+      action='store_true',
+      help='Disable the outlining optimization provided by R8.')
+  parser.add_argument(
+    '--disable-checks',
+    action='store_true',
+    help='Disable -checkdiscard directives and missing symbols check')
+  parser.add_argument('--sourcefile', help='Value for source file attribute')
+  parser.add_argument(
+      '--force-enable-assertions',
+      action='store_true',
+      help='Forcefully enable javac generated assertion code.')
+  parser.add_argument(
+      '--feature-jars',
+      action='append',
+      help='GN list of path to jars which comprise the corresponding feature.')
+  parser.add_argument(
+      '--dex-dest',
+      action='append',
+      dest='dex_dests',
+      help='Destination for dex file of the corresponding feature.')
+  parser.add_argument(
+      '--feature-name',
+      action='append',
+      dest='feature_names',
+      help='The name of the feature module.')
+  parser.add_argument(
+      '--uses-split',
+      action='append',
+      help='List of name pairs separated by : mapping a feature module to a '
+      'dependent feature module.')
+  parser.add_argument(
+      '--keep-rules-targets-regex',
+      metavar='KEEP_RULES_REGEX',
+      help='If passed outputs keep rules for references from all other inputs '
+      'to the subset of inputs that satisfy the KEEP_RULES_REGEX.')
+  parser.add_argument(
+      '--keep-rules-output-path',
+      help='Output path to the keep rules for references to the '
+      '--keep-rules-targets-regex inputs from the rest of the inputs.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  parser.add_argument('--show-desugar-default-interface-warnings',
+                      action='store_true',
+                      help='Enable desugaring warnings.')
+  parser.add_argument('--dump-inputs',
+                      action='store_true',
+                      help='Use when filing R8 bugs to capture inputs.'
+                      ' Stores inputs to r8inputs.zip')
+  parser.add_argument(
+      '--stamp',
+      help='File to touch upon success. Mutually exclusive with --output-path')
+  parser.add_argument('--desugared-library-keep-rule-output',
+                      help='Path to desugared library keep rule output file.')
+
+  diff_utils.AddCommandLineFlags(parser)
+  options = parser.parse_args(args)
+
+  if options.feature_names:
+    if options.output_path:
+      parser.error('Feature splits cannot specify an output in GN.')
+    if not options.actual_file and not options.stamp:
+      parser.error('Feature splits require a stamp file as output.')
+  elif not options.output_path:
+    parser.error('Output path required when feature splits aren\'t used')
+
+  if bool(options.keep_rules_targets_regex) != bool(
+      options.keep_rules_output_path):
+    raise Exception('You must path both --keep-rules-targets-regex and '
+                    '--keep-rules-output-path')
+
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  options.input_paths = build_utils.ParseGnList(options.input_paths)
+  options.extra_mapping_output_paths = build_utils.ParseGnList(
+      options.extra_mapping_output_paths)
+
+  if options.feature_names:
+    if 'base' not in options.feature_names:
+      parser.error('"base" feature required when feature arguments are used.')
+    if len(options.feature_names) != len(options.feature_jars) or len(
+        options.feature_names) != len(options.dex_dests):
+      parser.error('Invalid feature argument lengths.')
+
+    options.feature_jars = [
+        build_utils.ParseGnList(x) for x in options.feature_jars
+    ]
+
+  split_map = {}
+  if options.uses_split:
+    for split_pair in options.uses_split:
+      child, parent = split_pair.split(':')
+      for name in (child, parent):
+        if name not in options.feature_names:
+          parser.error('"%s" referenced in --uses-split not present.' % name)
+      split_map[child] = parent
+  options.uses_split = split_map
+
+  return options
+
+
+class _SplitContext(object):
+  def __init__(self, name, output_path, input_jars, work_dir, parent_name=None):
+    self.name = name
+    self.parent_name = parent_name
+    self.input_jars = set(input_jars)
+    self.final_output_path = output_path
+    self.staging_dir = os.path.join(work_dir, name)
+    os.mkdir(self.staging_dir)
+
+  def CreateOutput(self, has_imported_lib=False, keep_rule_output=None):
+    found_files = build_utils.FindInDirectory(self.staging_dir)
+    if not found_files:
+      raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
+
+    if self.final_output_path.endswith('.dex'):
+      if has_imported_lib:
+        raise Exception(
+            'Trying to create a single .dex file, but a dependency requires '
+            'JDK Library Desugaring (which necessitates a second file).'
+            'Refer to %s to see what desugaring was required' %
+            keep_rule_output)
+      if len(found_files) != 1:
+        raise Exception('Expected exactly 1 dex file output, found: {}'.format(
+            '\t'.join(found_files)))
+      shutil.move(found_files[0], self.final_output_path)
+      return
+
+    # Add to .jar using Python rather than having R8 output to a .zip directly
+    # in order to disable compression of the .jar, saving ~500ms.
+    tmp_jar_output = self.staging_dir + '.jar'
+    build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir)
+    shutil.move(tmp_jar_output, self.final_output_path)
+
+
+def _DeDupeInputJars(split_contexts_by_name):
+  """Moves jars used by multiple splits into common ancestors.
+
+  Updates |input_jars| for each _SplitContext.
+  """
+
+  def count_ancestors(split_context):
+    ret = 0
+    if split_context.parent_name:
+      ret += 1
+      ret += count_ancestors(split_contexts_by_name[split_context.parent_name])
+    return ret
+
+  base_context = split_contexts_by_name['base']
+  # Sort by tree depth so that ensure children are visited before their parents.
+  sorted_contexts = list(split_contexts_by_name.values())
+  sorted_contexts.remove(base_context)
+  sorted_contexts.sort(key=count_ancestors, reverse=True)
+
+  # If a jar is present in multiple siblings, promote it to their parent.
+  seen_jars_by_parent = defaultdict(set)
+  for split_context in sorted_contexts:
+    seen_jars = seen_jars_by_parent[split_context.parent_name]
+    new_dupes = seen_jars.intersection(split_context.input_jars)
+    parent_context = split_contexts_by_name[split_context.parent_name]
+    parent_context.input_jars.update(new_dupes)
+    seen_jars.update(split_context.input_jars)
+
+  def ancestor_jars(parent_name, dest=None):
+    dest = dest or set()
+    if not parent_name:
+      return dest
+    parent_context = split_contexts_by_name[parent_name]
+    dest.update(parent_context.input_jars)
+    return ancestor_jars(parent_context.parent_name, dest)
+
+  # Now that jars have been moved up the tree, remove those that appear in
+  # ancestors.
+  for split_context in sorted_contexts:
+    split_context.input_jars -= ancestor_jars(split_context.parent_name)
+
+
+def _OptimizeWithR8(options,
+                    config_paths,
+                    libraries,
+                    dynamic_config_data,
+                    print_stdout=False):
+  with build_utils.TempDir() as tmp_dir:
+    if dynamic_config_data:
+      dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags')
+      with open(dynamic_config_path, 'w') as f:
+        f.write(dynamic_config_data)
+      config_paths = config_paths + [dynamic_config_path]
+
+    tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
+    # If there is no output (no classes are kept), this prevents this script
+    # from failing.
+    build_utils.Touch(tmp_mapping_path)
+
+    tmp_output = os.path.join(tmp_dir, 'r8out')
+    os.mkdir(tmp_output)
+
+    split_contexts_by_name = {}
+    if options.feature_names:
+      for name, dest_dex, input_jars in zip(options.feature_names,
+                                            options.dex_dests,
+                                            options.feature_jars):
+        parent_name = options.uses_split.get(name)
+        if parent_name is None and name != 'base':
+          parent_name = 'base'
+        split_context = _SplitContext(name,
+                                      dest_dex,
+                                      input_jars,
+                                      tmp_output,
+                                      parent_name=parent_name)
+        split_contexts_by_name[name] = split_context
+    else:
+      # Base context will get populated via "extra_jars" below.
+      split_contexts_by_name['base'] = _SplitContext('base',
+                                                     options.output_path, [],
+                                                     tmp_output)
+    base_context = split_contexts_by_name['base']
+
+    # R8 OOMs with the default xmx=1G.
+    cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
+        '-Dcom.android.tools.r8.allowTestProguardOptions=1',
+        '-Dcom.android.tools.r8.verticalClassMerging=1',
+        '-Dcom.android.tools.r8.disableHorizontalClassMerging=1',
+    ]
+    if options.disable_outlining:
+      cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
+    if options.dump_inputs:
+      cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
+    cmd += [
+        '-cp',
+        options.r8_path,
+        'com.android.tools.r8.R8',
+        '--no-data-resources',
+        '--output',
+        base_context.staging_dir,
+        '--pg-map-output',
+        tmp_mapping_path,
+    ]
+
+    if options.disable_checks:
+      # Info level priority logs are not printed by default.
+      cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info']
+
+    if options.desugar_jdk_libs_json:
+      cmd += [
+          '--desugared-lib',
+          options.desugar_jdk_libs_json,
+          '--desugared-lib-pg-conf-output',
+          options.desugared_library_keep_rule_output,
+      ]
+
+    if options.min_api:
+      cmd += ['--min-api', options.min_api]
+
+    if options.force_enable_assertions:
+      cmd += ['--force-enable-assertions']
+
+    for lib in libraries:
+      cmd += ['--lib', lib]
+
+    for config_file in config_paths:
+      cmd += ['--pg-conf', config_file]
+
+    if options.main_dex_rules_path:
+      for main_dex_rule in options.main_dex_rules_path:
+        cmd += ['--main-dex-rules', main_dex_rule]
+
+    _DeDupeInputJars(split_contexts_by_name)
+
+    # Add any extra inputs to the base context (e.g. desugar runtime).
+    extra_jars = set(options.input_paths)
+    for split_context in split_contexts_by_name.values():
+      extra_jars -= split_context.input_jars
+    base_context.input_jars.update(extra_jars)
+
+    for split_context in split_contexts_by_name.values():
+      if split_context is base_context:
+        continue
+      for in_jar in sorted(split_context.input_jars):
+        cmd += ['--feature', in_jar, split_context.staging_dir]
+
+    cmd += sorted(base_context.input_jars)
+
+    try:
+      stderr_filter = dex.CreateStderrFilter(
+          options.show_desugar_default_interface_warnings)
+      logging.debug('Running R8')
+      build_utils.CheckOutput(cmd,
+                              print_stdout=print_stdout,
+                              stderr_filter=stderr_filter,
+                              fail_on_output=options.warnings_as_errors)
+    except build_utils.CalledProcessError as err:
+      debugging_link = ('\n\nR8 failed. Please see {}.'.format(
+          'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
+          'android/docs/java_optimization.md#Debugging-common-failures\n'))
+      raise build_utils.CalledProcessError(err.cwd, err.args,
+                                           err.output + debugging_link)
+
+    base_has_imported_lib = False
+    if options.desugar_jdk_libs_json:
+      logging.debug('Running L8')
+      existing_files = build_utils.FindInDirectory(base_context.staging_dir)
+      jdk_dex_output = os.path.join(base_context.staging_dir,
+                                    'classes%d.dex' % (len(existing_files) + 1))
+      # Use -applymapping to avoid name collisions.
+      l8_dynamic_config_path = os.path.join(tmp_dir, 'l8_dynamic_config.flags')
+      with open(l8_dynamic_config_path, 'w') as f:
+        f.write("-applymapping '{}'\n".format(tmp_mapping_path))
+      # Pass the dynamic config so that obfuscation options are picked up.
+      l8_config_paths = [dynamic_config_path, l8_dynamic_config_path]
+      if os.path.exists(options.desugared_library_keep_rule_output):
+        l8_config_paths.append(options.desugared_library_keep_rule_output)
+
+      base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
+          options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+          options.desugar_jdk_libs_jar,
+          options.desugar_jdk_libs_configuration_jar, jdk_dex_output,
+          options.warnings_as_errors, l8_config_paths)
+      if int(options.min_api) >= 24 and base_has_imported_lib:
+        with open(jdk_dex_output, 'rb') as f:
+          dexfile = dex_parser.DexFile(bytearray(f.read()))
+          for m in dexfile.IterMethodSignatureParts():
+            print('{}#{}'.format(m[0], m[2]))
+        assert False, (
+            'Desugared JDK libs are disabled on Monochrome and newer - see '
+            'crbug.com/1159984 for details, and see above list for desugared '
+            'classes and methods.')
+
+    logging.debug('Collecting ouputs')
+    base_context.CreateOutput(base_has_imported_lib,
+                              options.desugared_library_keep_rule_output)
+    for split_context in split_contexts_by_name.values():
+      if split_context is not base_context:
+        split_context.CreateOutput()
+
+    with open(options.mapping_output, 'w') as out_file, \
+        open(tmp_mapping_path) as in_file:
+      # Mapping files generated by R8 include comments that may break
+      # some of our tooling so remove those (specifically: apkanalyzer).
+      out_file.writelines(l for l in in_file if not l.startswith('#'))
+  return base_context
+
+
+def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
+                     keep_rules_output):
+  cmd = build_utils.JavaCmd(False) + [
+      '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+      '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+      '--keep-rules', '--output', keep_rules_output
+  ]
+  targets_re = re.compile(targets_re_string)
+  for path in input_paths:
+    if targets_re.search(path):
+      cmd += ['--target', path]
+    else:
+      cmd += ['--source', path]
+  for path in classpath:
+    cmd += ['--lib', path]
+
+  build_utils.CheckOutput(cmd, print_stderr=False, fail_on_output=False)
+
+
+def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
+                            error_title):
+  cmd = build_utils.JavaCmd(warnings_as_errors) + [
+      '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+      '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+      '--check'
+  ]
+
+  for path in classpath:
+    cmd += ['--lib', path]
+  for path in dex_files:
+    cmd += ['--source', path]
+
+  def stderr_filter(stderr):
+    ignored_lines = [
+        # Summary contains warning count, which our filtering makes wrong.
+        'Warning: Tracereferences found',
+
+        # TODO(agrieve): Create interface jars for these missing classes rather
+        #     than allowlisting here.
+        'dalvik/system',
+        'libcore/io',
+        'sun/misc/Unsafe',
+
+        # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
+        ('android/text/StaticLayout;<init>(Ljava/lang/CharSequence;IILandroid'
+         '/text/TextPaint;ILandroid/text/Layout$Alignment;Landroid/text/'
+         'TextDirectionHeuristic;FFZLandroid/text/TextUtils$TruncateAt;II)V'),
+
+        # Found in
+        # com/google/android/gms/cast/framework/media/internal/ResourceProvider
+        # Missing due to setting "strip_resources = true".
+        'com/google/android/gms/cast/framework/R',
+
+        # Found in com/google/android/gms/common/GoogleApiAvailability
+        # Missing due to setting "strip_drawables = true".
+        'com/google/android/gms/base/R$drawable',
+
+        # Explicictly guarded by try (NoClassDefFoundError) in Flogger's
+        # PlatformProvider.
+        'com/google/common/flogger/backend/google/GooglePlatform',
+        'com/google/common/flogger/backend/system/DefaultPlatform',
+
+        # trichrome_webview_google_bundle contains this missing reference.
+        # TODO(crbug.com/1142530): Fix this missing reference properly.
+        'org/chromium/build/NativeLibraries',
+
+        # TODO(agrieve): Exclude these only when use_jacoco_coverage=true.
+        'Ljava/lang/instrument/ClassFileTransformer',
+        'Ljava/lang/instrument/IllegalClassFormatException',
+        'Ljava/lang/instrument/Instrumentation',
+        'Ljava/lang/management/ManagementFactory',
+        'Ljavax/management/MBeanServer',
+        'Ljavax/management/ObjectInstance',
+        'Ljavax/management/ObjectName',
+        'Ljavax/management/StandardMBean',
+
+        # Explicitly guarded by try (NoClassDefFoundError) in Firebase's
+        # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector.
+        'Lkotlin/KotlinVersion',
+    ]
+
+    had_unfiltered_items = '  ' in stderr
+    stderr = build_utils.FilterLines(
+        stderr, '|'.join(re.escape(x) for x in ignored_lines))
+    if stderr:
+      if '  ' in stderr:
+        stderr = error_title + """
+Tip: Build with:
+        is_java_debug=false
+        treat_warnings_as_errors=false
+        enable_proguard_obfuscation=false
+     and then use dexdump to see which class(s) reference them.
+
+     E.g.:
+       third_party/android_sdk/public/build-tools/*/dexdump -d \
+out/Release/apks/YourApk.apk > dex.txt
+""" + stderr
+
+        if 'FragmentActivity' in stderr:
+          stderr += """
+You may need to update build configs to run FragmentActivityReplacer for
+additional targets. See
+https://chromium.googlesource.com/chromium/src.git/+/master/docs/ui/android/bytecode_rewriting.md.
+"""
+      elif had_unfiltered_items:
+        # Left only with empty headings. All indented items filtered out.
+        stderr = ''
+    return stderr
+
+  logging.debug('cmd: %s', ' '.join(cmd))
+  build_utils.CheckOutput(cmd,
+                          print_stdout=True,
+                          stderr_filter=stderr_filter,
+                          fail_on_output=warnings_as_errors)
+
+
+def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False):
+  ret = []
+
+  # Sort in this way so //clank versions of the same libraries will sort
+  # to the same spot in the file.
+  def sort_key(path):
+    return tuple(reversed(path.split(os.path.sep)))
+
+  for config in sorted(configs, key=sort_key):
+    if exclude_generated and config.endswith('.resources.proguard.txt'):
+      continue
+
+    with open(config) as config_file:
+      contents = config_file.read().rstrip()
+
+    if not contents.strip():
+      # Ignore empty files.
+      continue
+
+    # Fix up line endings (third_party configs can have windows endings).
+    contents = contents.replace('\r', '')
+    # Remove numbers from generated rule comments to make file more
+    # diff'able.
+    contents = re.sub(r' #generated:\d+', '', contents)
+    ret.append('# File: ' + config)
+    ret.append(contents)
+    ret.append('')
+
+  if dynamic_config_data:
+    ret.append('# File: //build/android/gyp/proguard.py (generated rules)')
+    ret.append(dynamic_config_data)
+    ret.append('')
+  return '\n'.join(ret)
+
+
+def _CreateDynamicConfig(options):
+  # Our scripts already fail on output. Adding -ignorewarnings makes R8 output
+  # warnings rather than throw exceptions so we can selectively ignore them via
+  # dex.py's ignore list. Context: https://crbug.com/1180222
+  ret = ["-ignorewarnings"]
+
+  if options.sourcefile:
+    ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" %
+               options.sourcefile)
+
+  if options.enable_obfuscation:
+    ret.append("-repackageclasses ''")
+  else:
+    ret.append("-dontobfuscate")
+
+  if options.apply_mapping:
+    ret.append("-applymapping '%s'" % options.apply_mapping)
+
+  _min_api = int(options.min_api) if options.min_api else 0
+  for api_level, version_code in _API_LEVEL_VERSION_CODE:
+    annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code
+    if api_level > _min_api:
+      ret.append('-keep @interface %s' % annotation_name)
+      ret.append("""\
+-if @%s class * {
+    *** *(...);
+}
+-keep,allowobfuscation class <1> {
+    *** <2>(...);
+}""" % annotation_name)
+      ret.append("""\
+-keepclassmembers,allowobfuscation class ** {
+  @%s <methods>;
+}""" % annotation_name)
+  return '\n'.join(ret)
+
+
+def _VerifyNoEmbeddedConfigs(jar_paths):
+  failed = False
+  for jar_path in jar_paths:
+    with zipfile.ZipFile(jar_path) as z:
+      for name in z.namelist():
+        if name.startswith('META-INF/proguard/'):
+          failed = True
+          sys.stderr.write("""\
+Found embedded proguard config within {}.
+Embedded configs are not permitted (https://crbug.com/989505)
+""".format(jar_path))
+          break
+  if failed:
+    sys.exit(1)
+
+
+def _ContainsDebuggingConfig(config_str):
+  debugging_configs = ('-whyareyoukeeping', '-whyareyounotinlining')
+  return any(config in config_str for config in debugging_configs)
+
+
+def _MaybeWriteStampAndDepFile(options, inputs):
+  output = options.output_path
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+    output = options.stamp
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
+
+
+def main():
+  build_utils.InitLogging('PROGUARD_DEBUG')
+  options = _ParseOptions()
+
+  logging.debug('Preparing configs')
+  proguard_configs = options.proguard_configs
+
+  # ProGuard configs that are derived from flags.
+  dynamic_config_data = _CreateDynamicConfig(options)
+
+  # ProGuard configs that are derived from flags.
+  merged_configs = _CombineConfigs(
+      proguard_configs, dynamic_config_data, exclude_generated=True)
+  print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose
+
+  if options.expected_file:
+    diff_utils.CheckExpectations(merged_configs, options)
+    if options.only_verify_expectations:
+      build_utils.WriteDepfile(options.depfile,
+                               options.actual_file,
+                               inputs=options.proguard_configs)
+      return
+
+  logging.debug('Looking for embedded configs')
+  libraries = []
+  for p in options.classpath:
+    # TODO(bjoyce): Remove filter once old android support libraries are gone.
+    # Fix for having Library class extend program class dependency problem.
+    if 'com_android_support' in p or 'android_support_test' in p:
+      continue
+    # If a jar is part of input no need to include it as library jar.
+    if p not in libraries and p not in options.input_paths:
+      libraries.append(p)
+  _VerifyNoEmbeddedConfigs(options.input_paths + libraries)
+  if options.keep_rules_output_path:
+    _OutputKeepRules(options.r8_path, options.input_paths, options.classpath,
+                     options.keep_rules_targets_regex,
+                     options.keep_rules_output_path)
+    return
+
+  base_context = _OptimizeWithR8(options, proguard_configs, libraries,
+                                 dynamic_config_data, print_stdout)
+
+  if not options.disable_checks:
+    logging.debug('Running tracereferences')
+    all_dex_files = []
+    if options.output_path:
+      all_dex_files.append(options.output_path)
+    if options.dex_dests:
+      all_dex_files.extend(options.dex_dests)
+    error_title = 'DEX contains references to non-existent symbols after R8.'
+    _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath,
+                            options.warnings_as_errors, error_title)
+    # Also ensure that base module doesn't have any references to child dex
+    # symbols.
+    # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put
+    #     synthesized classes in the base module.
+    error_title = 'Base module DEX contains references symbols within DFMs.'
+    _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path],
+                            options.classpath, options.warnings_as_errors,
+                            error_title)
+
+  for output in options.extra_mapping_output_paths:
+    shutil.copy(options.mapping_output, output)
+
+  inputs = options.proguard_configs + options.input_paths + libraries
+  if options.apply_mapping:
+    inputs.append(options.apply_mapping)
+
+  _MaybeWriteStampAndDepFile(options, inputs)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/gyp/proguard.pydeps b/src/build/android/gyp/proguard.pydeps
new file mode 100644
index 0000000..c1de73b
--- /dev/null
+++ b/src/build/android/gyp/proguard.pydeps
@@ -0,0 +1,16 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+dex.py
+dex_jdk_libs.py
+proguard.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/src/build/android/gyp/proto/Configuration_pb2.py b/src/build/android/gyp/proto/Configuration_pb2.py
new file mode 100644
index 0000000..8591830
--- /dev/null
+++ b/src/build/android/gyp/proto/Configuration_pb2.py
@@ -0,0 +1,697 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Configuration.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='frameworks/base/tools/aapt2/Configuration.proto',
+  package='aapt.pb',
+  syntax='proto3',
+  serialized_options=_b('\n\020com.android.aapt'),
+  serialized_pb=_b('\n/frameworks/base/tools/aapt2/Configuration.proto\x12\x07\x61\x61pt.pb\"\xd9\x14\n\rConfiguration\x12\x0b\n\x03mcc\x18\x01 \x01(\r\x12\x0b\n\x03mnc\x18\x02 \x01(\r\x12\x0e\n\x06locale\x18\x03 \x01(\t\x12@\n\x10layout_direction\x18\x04 \x01(\x0e\x32&.aapt.pb.Configuration.LayoutDirection\x12\x14\n\x0cscreen_width\x18\x05 \x01(\r\x12\x15\n\rscreen_height\x18\x06 \x01(\r\x12\x17\n\x0fscreen_width_dp\x18\x07 \x01(\r\x12\x18\n\x10screen_height_dp\x18\x08 \x01(\r\x12 \n\x18smallest_screen_width_dp\x18\t \x01(\r\x12\x43\n\x12screen_layout_size\x18\n \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutSize\x12\x43\n\x12screen_layout_long\x18\x0b \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutLong\x12\x38\n\x0cscreen_round\x18\x0c \x01(\x0e\x32\".aapt.pb.Configuration.ScreenRound\x12?\n\x10wide_color_gamut\x18\r \x01(\x0e\x32%.aapt.pb.Configuration.WideColorGamut\x12\'\n\x03hdr\x18\x0e \x01(\x0e\x32\x1a.aapt.pb.Configuration.Hdr\x12\x37\n\x0borientation\x18\x0f \x01(\x0e\x32\".aapt.pb.Configuration.Orientation\x12\x37\n\x0cui_mode_type\x18\x10 \x01(\x0e\x32!.aapt.pb.Configuration.UiModeType\x12\x39\n\rui_mode_night\x18\x11 \x01(\x0e\x32\".aapt.pb.Configuration.UiModeNight\x12\x0f\n\x07\x64\x65nsity\x18\x12 \x01(\r\x12\x37\n\x0btouchscreen\x18\x13 \x01(\x0e\x32\".aapt.pb.Configuration.Touchscreen\x12\x36\n\x0bkeys_hidden\x18\x14 \x01(\x0e\x32!.aapt.pb.Configuration.KeysHidden\x12\x31\n\x08keyboard\x18\x15 \x01(\x0e\x32\x1f.aapt.pb.Configuration.Keyboard\x12\x34\n\nnav_hidden\x18\x16 \x01(\x0e\x32 .aapt.pb.Configuration.NavHidden\x12\x35\n\nnavigation\x18\x17 \x01(\x0e\x32!.aapt.pb.Configuration.Navigation\x12\x13\n\x0bsdk_version\x18\x18 \x01(\r\x12\x0f\n\x07product\x18\x19 \x01(\t\"a\n\x0fLayoutDirection\x12\x1a\n\x16LAYOUT_DIRECTION_UNSET\x10\x00\x12\x18\n\x14LAYOUT_DIRECTION_LTR\x10\x01\x12\x18\n\x14LAYOUT_DIRECTION_RTL\x10\x02\"\xaa\x01\n\x10ScreenLayoutSize\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_UNSET\x10\x00\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_SMALL\x10\x01\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_NORMAL\x10\x02\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_LARGE\x10\x03\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_XLARGE\x10\x04\"m\n\x10ScreenLayoutLong\x12\x1c\n\x18SCREEN_LAYOUT_LONG_UNSET\x10\x00\x12\x1b\n\x17SCREEN_LAYOUT_LONG_LONG\x10\x01\x12\x1e\n\x1aSCREEN_LAYOUT_LONG_NOTLONG\x10\x02\"X\n\x0bScreenRound\x12\x16\n\x12SCREEN_ROUND_UNSET\x10\x00\x12\x16\n\x12SCREEN_ROUND_ROUND\x10\x01\x12\x19\n\x15SCREEN_ROUND_NOTROUND\x10\x02\"h\n\x0eWideColorGamut\x12\x1a\n\x16WIDE_COLOR_GAMUT_UNSET\x10\x00\x12\x1b\n\x17WIDE_COLOR_GAMUT_WIDECG\x10\x01\x12\x1d\n\x19WIDE_COLOR_GAMUT_NOWIDECG\x10\x02\"3\n\x03Hdr\x12\r\n\tHDR_UNSET\x10\x00\x12\x0e\n\nHDR_HIGHDR\x10\x01\x12\r\n\tHDR_LOWDR\x10\x02\"h\n\x0bOrientation\x12\x15\n\x11ORIENTATION_UNSET\x10\x00\x12\x14\n\x10ORIENTATION_PORT\x10\x01\x12\x14\n\x10ORIENTATION_LAND\x10\x02\x12\x16\n\x12ORIENTATION_SQUARE\x10\x03\"\xd7\x01\n\nUiModeType\x12\x16\n\x12UI_MODE_TYPE_UNSET\x10\x00\x12\x17\n\x13UI_MODE_TYPE_NORMAL\x10\x01\x12\x15\n\x11UI_MODE_TYPE_DESK\x10\x02\x12\x14\n\x10UI_MODE_TYPE_CAR\x10\x03\x12\x1b\n\x17UI_MODE_TYPE_TELEVISION\x10\x04\x12\x1a\n\x16UI_MODE_TYPE_APPLIANCE\x10\x05\x12\x16\n\x12UI_MODE_TYPE_WATCH\x10\x06\x12\x1a\n\x16UI_MODE_TYPE_VRHEADSET\x10\x07\"[\n\x0bUiModeNight\x12\x17\n\x13UI_MODE_NIGHT_UNSET\x10\x00\x12\x17\n\x13UI_MODE_NIGHT_NIGHT\x10\x01\x12\x1a\n\x16UI_MODE_NIGHT_NOTNIGHT\x10\x02\"m\n\x0bTouchscreen\x12\x15\n\x11TOUCHSCREEN_UNSET\x10\x00\x12\x17\n\x13TOUCHSCREEN_NOTOUCH\x10\x01\x12\x16\n\x12TOUCHSCREEN_STYLUS\x10\x02\x12\x16\n\x12TOUCHSCREEN_FINGER\x10\x03\"v\n\nKeysHidden\x12\x15\n\x11KEYS_HIDDEN_UNSET\x10\x00\x12\x1b\n\x17KEYS_HIDDEN_KEYSEXPOSED\x10\x01\x12\x1a\n\x16KEYS_HIDDEN_KEYSHIDDEN\x10\x02\x12\x18\n\x14KEYS_HIDDEN_KEYSSOFT\x10\x03\"`\n\x08Keyboard\x12\x12\n\x0eKEYBOARD_UNSET\x10\x00\x12\x13\n\x0fKEYBOARD_NOKEYS\x10\x01\x12\x13\n\x0fKEYBOARD_QWERTY\x10\x02\x12\x16\n\x12KEYBOARD_TWELVEKEY\x10\x03\"V\n\tNavHidden\x12\x14\n\x10NAV_HIDDEN_UNSET\x10\x00\x12\x19\n\x15NAV_HIDDEN_NAVEXPOSED\x10\x01\x12\x18\n\x14NAV_HIDDEN_NAVHIDDEN\x10\x02\"}\n\nNavigation\x12\x14\n\x10NAVIGATION_UNSET\x10\x00\x12\x14\n\x10NAVIGATION_NONAV\x10\x01\x12\x13\n\x0fNAVIGATION_DPAD\x10\x02\x12\x18\n\x14NAVIGATION_TRACKBALL\x10\x03\x12\x14\n\x10NAVIGATION_WHEEL\x10\x04\x42\x12\n\x10\x63om.android.aaptb\x06proto3')
+)
+
+
+
+_CONFIGURATION_LAYOUTDIRECTION = _descriptor.EnumDescriptor(
+  name='LayoutDirection',
+  full_name='aapt.pb.Configuration.LayoutDirection',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_LTR', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LAYOUT_DIRECTION_RTL', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1119,
+  serialized_end=1216,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_LAYOUTDIRECTION)
+
+_CONFIGURATION_SCREENLAYOUTSIZE = _descriptor.EnumDescriptor(
+  name='ScreenLayoutSize',
+  full_name='aapt.pb.Configuration.ScreenLayoutSize',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_SMALL', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_NORMAL', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_LARGE', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_SIZE_XLARGE', index=4, number=4,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1219,
+  serialized_end=1389,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTSIZE)
+
+_CONFIGURATION_SCREENLAYOUTLONG = _descriptor.EnumDescriptor(
+  name='ScreenLayoutLong',
+  full_name='aapt.pb.Configuration.ScreenLayoutLong',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_LONG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_LAYOUT_LONG_NOTLONG', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1391,
+  serialized_end=1500,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTLONG)
+
+_CONFIGURATION_SCREENROUND = _descriptor.EnumDescriptor(
+  name='ScreenRound',
+  full_name='aapt.pb.Configuration.ScreenRound',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_ROUND', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SCREEN_ROUND_NOTROUND', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1502,
+  serialized_end=1590,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENROUND)
+
+_CONFIGURATION_WIDECOLORGAMUT = _descriptor.EnumDescriptor(
+  name='WideColorGamut',
+  full_name='aapt.pb.Configuration.WideColorGamut',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_WIDECG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WIDE_COLOR_GAMUT_NOWIDECG', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1592,
+  serialized_end=1696,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_WIDECOLORGAMUT)
+
+_CONFIGURATION_HDR = _descriptor.EnumDescriptor(
+  name='Hdr',
+  full_name='aapt.pb.Configuration.Hdr',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='HDR_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='HDR_HIGHDR', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='HDR_LOWDR', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1698,
+  serialized_end=1749,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_HDR)
+
+_CONFIGURATION_ORIENTATION = _descriptor.EnumDescriptor(
+  name='Orientation',
+  full_name='aapt.pb.Configuration.Orientation',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_PORT', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_LAND', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ORIENTATION_SQUARE', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1751,
+  serialized_end=1855,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_ORIENTATION)
+
+_CONFIGURATION_UIMODETYPE = _descriptor.EnumDescriptor(
+  name='UiModeType',
+  full_name='aapt.pb.Configuration.UiModeType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_NORMAL', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_DESK', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_CAR', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_TELEVISION', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_APPLIANCE', index=5, number=5,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_WATCH', index=6, number=6,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_TYPE_VRHEADSET', index=7, number=7,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1858,
+  serialized_end=2073,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODETYPE)
+
+_CONFIGURATION_UIMODENIGHT = _descriptor.EnumDescriptor(
+  name='UiModeNight',
+  full_name='aapt.pb.Configuration.UiModeNight',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_NIGHT', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='UI_MODE_NIGHT_NOTNIGHT', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2075,
+  serialized_end=2166,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODENIGHT)
+
+_CONFIGURATION_TOUCHSCREEN = _descriptor.EnumDescriptor(
+  name='Touchscreen',
+  full_name='aapt.pb.Configuration.Touchscreen',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_NOTOUCH', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_STYLUS', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TOUCHSCREEN_FINGER', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2168,
+  serialized_end=2277,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_TOUCHSCREEN)
+
+_CONFIGURATION_KEYSHIDDEN = _descriptor.EnumDescriptor(
+  name='KeysHidden',
+  full_name='aapt.pb.Configuration.KeysHidden',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSEXPOSED', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSHIDDEN', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYS_HIDDEN_KEYSSOFT', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2279,
+  serialized_end=2397,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYSHIDDEN)
+
+_CONFIGURATION_KEYBOARD = _descriptor.EnumDescriptor(
+  name='Keyboard',
+  full_name='aapt.pb.Configuration.Keyboard',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_NOKEYS', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_QWERTY', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='KEYBOARD_TWELVEKEY', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2399,
+  serialized_end=2495,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYBOARD)
+
+_CONFIGURATION_NAVHIDDEN = _descriptor.EnumDescriptor(
+  name='NavHidden',
+  full_name='aapt.pb.Configuration.NavHidden',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_NAVEXPOSED', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAV_HIDDEN_NAVHIDDEN', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2497,
+  serialized_end=2583,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVHIDDEN)
+
+_CONFIGURATION_NAVIGATION = _descriptor.EnumDescriptor(
+  name='Navigation',
+  full_name='aapt.pb.Configuration.Navigation',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_UNSET', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_NONAV', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_DPAD', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_TRACKBALL', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NAVIGATION_WHEEL', index=4, number=4,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2585,
+  serialized_end=2710,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVIGATION)
+
+
+_CONFIGURATION = _descriptor.Descriptor(
+  name='Configuration',
+  full_name='aapt.pb.Configuration',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='mcc', full_name='aapt.pb.Configuration.mcc', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='mnc', full_name='aapt.pb.Configuration.mnc', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='locale', full_name='aapt.pb.Configuration.locale', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='layout_direction', full_name='aapt.pb.Configuration.layout_direction', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_width', full_name='aapt.pb.Configuration.screen_width', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_height', full_name='aapt.pb.Configuration.screen_height', index=5,
+      number=6, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_width_dp', full_name='aapt.pb.Configuration.screen_width_dp', index=6,
+      number=7, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_height_dp', full_name='aapt.pb.Configuration.screen_height_dp', index=7,
+      number=8, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='smallest_screen_width_dp', full_name='aapt.pb.Configuration.smallest_screen_width_dp', index=8,
+      number=9, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_layout_size', full_name='aapt.pb.Configuration.screen_layout_size', index=9,
+      number=10, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_layout_long', full_name='aapt.pb.Configuration.screen_layout_long', index=10,
+      number=11, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='screen_round', full_name='aapt.pb.Configuration.screen_round', index=11,
+      number=12, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='wide_color_gamut', full_name='aapt.pb.Configuration.wide_color_gamut', index=12,
+      number=13, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='hdr', full_name='aapt.pb.Configuration.hdr', index=13,
+      number=14, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='orientation', full_name='aapt.pb.Configuration.orientation', index=14,
+      number=15, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ui_mode_type', full_name='aapt.pb.Configuration.ui_mode_type', index=15,
+      number=16, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ui_mode_night', full_name='aapt.pb.Configuration.ui_mode_night', index=16,
+      number=17, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='density', full_name='aapt.pb.Configuration.density', index=17,
+      number=18, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='touchscreen', full_name='aapt.pb.Configuration.touchscreen', index=18,
+      number=19, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='keys_hidden', full_name='aapt.pb.Configuration.keys_hidden', index=19,
+      number=20, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='keyboard', full_name='aapt.pb.Configuration.keyboard', index=20,
+      number=21, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='nav_hidden', full_name='aapt.pb.Configuration.nav_hidden', index=21,
+      number=22, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='navigation', full_name='aapt.pb.Configuration.navigation', index=22,
+      number=23, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sdk_version', full_name='aapt.pb.Configuration.sdk_version', index=23,
+      number=24, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='product', full_name='aapt.pb.Configuration.product', index=24,
+      number=25, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _CONFIGURATION_LAYOUTDIRECTION,
+    _CONFIGURATION_SCREENLAYOUTSIZE,
+    _CONFIGURATION_SCREENLAYOUTLONG,
+    _CONFIGURATION_SCREENROUND,
+    _CONFIGURATION_WIDECOLORGAMUT,
+    _CONFIGURATION_HDR,
+    _CONFIGURATION_ORIENTATION,
+    _CONFIGURATION_UIMODETYPE,
+    _CONFIGURATION_UIMODENIGHT,
+    _CONFIGURATION_TOUCHSCREEN,
+    _CONFIGURATION_KEYSHIDDEN,
+    _CONFIGURATION_KEYBOARD,
+    _CONFIGURATION_NAVHIDDEN,
+    _CONFIGURATION_NAVIGATION,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=61,
+  serialized_end=2710,
+)
+
+_CONFIGURATION.fields_by_name['layout_direction'].enum_type = _CONFIGURATION_LAYOUTDIRECTION
+_CONFIGURATION.fields_by_name['screen_layout_size'].enum_type = _CONFIGURATION_SCREENLAYOUTSIZE
+_CONFIGURATION.fields_by_name['screen_layout_long'].enum_type = _CONFIGURATION_SCREENLAYOUTLONG
+_CONFIGURATION.fields_by_name['screen_round'].enum_type = _CONFIGURATION_SCREENROUND
+_CONFIGURATION.fields_by_name['wide_color_gamut'].enum_type = _CONFIGURATION_WIDECOLORGAMUT
+_CONFIGURATION.fields_by_name['hdr'].enum_type = _CONFIGURATION_HDR
+_CONFIGURATION.fields_by_name['orientation'].enum_type = _CONFIGURATION_ORIENTATION
+_CONFIGURATION.fields_by_name['ui_mode_type'].enum_type = _CONFIGURATION_UIMODETYPE
+_CONFIGURATION.fields_by_name['ui_mode_night'].enum_type = _CONFIGURATION_UIMODENIGHT
+_CONFIGURATION.fields_by_name['touchscreen'].enum_type = _CONFIGURATION_TOUCHSCREEN
+_CONFIGURATION.fields_by_name['keys_hidden'].enum_type = _CONFIGURATION_KEYSHIDDEN
+_CONFIGURATION.fields_by_name['keyboard'].enum_type = _CONFIGURATION_KEYBOARD
+_CONFIGURATION.fields_by_name['nav_hidden'].enum_type = _CONFIGURATION_NAVHIDDEN
+_CONFIGURATION.fields_by_name['navigation'].enum_type = _CONFIGURATION_NAVIGATION
+_CONFIGURATION_LAYOUTDIRECTION.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTSIZE.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTLONG.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENROUND.containing_type = _CONFIGURATION
+_CONFIGURATION_WIDECOLORGAMUT.containing_type = _CONFIGURATION
+_CONFIGURATION_HDR.containing_type = _CONFIGURATION
+_CONFIGURATION_ORIENTATION.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODETYPE.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODENIGHT.containing_type = _CONFIGURATION
+_CONFIGURATION_TOUCHSCREEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYSHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYBOARD.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVIGATION.containing_type = _CONFIGURATION
+DESCRIPTOR.message_types_by_name['Configuration'] = _CONFIGURATION
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Configuration = _reflection.GeneratedProtocolMessageType('Configuration', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGURATION,
+  '__module__' : 'frameworks.base.tools.aapt2.Configuration_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Configuration)
+  })
+_sym_db.RegisterMessage(Configuration)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/gyp/proto/README.md b/src/build/android/gyp/proto/README.md
new file mode 100644
index 0000000..6850410
--- /dev/null
+++ b/src/build/android/gyp/proto/README.md
@@ -0,0 +1,13 @@
+# Protos
+These protos are generated from Resources.proto and Configuration.proto from the
+Android repo. They are found in the frameworks/base/tools/aapt2/ directory. To
+regenerate these if there are changes, run this command from the root of an
+Android checkout:
+
+   protoc --python_out=some_dir frameworks/base/tools/aapt2/Resources.proto \
+      frameworks/base/tools/aapt2/Configuration.proto
+
+Then copy the resulting \*pb2.py files from some_dir here. To make sure
+Resources_pb2.py is able to import Configuration_pb2.py, replace the
+"from frameworks.base.tools.aapt2" portion of the import statement with
+"from ." so it will instead be imported from the current directory.
diff --git a/src/build/android/gyp/proto/Resources_pb2.py b/src/build/android/gyp/proto/Resources_pb2.py
new file mode 100644
index 0000000..3bbd702
--- /dev/null
+++ b/src/build/android/gyp/proto/Resources_pb2.py
@@ -0,0 +1,2779 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Resources.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='frameworks/base/tools/aapt2/Resources.proto',
+  package='aapt.pb',
+  syntax='proto3',
+  serialized_options=_b('\n\020com.android.aapt'),
+  serialized_pb=_b('\n+frameworks/base/tools/aapt2/Resources.proto\x12\x07\x61\x61pt.pb\x1a/frameworks/base/tools/aapt2/Configuration.proto\"\x1a\n\nStringPool\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"<\n\x0eSourcePosition\x12\x13\n\x0bline_number\x18\x01 \x01(\r\x12\x15\n\rcolumn_number\x18\x02 \x01(\r\"E\n\x06Source\x12\x10\n\x08path_idx\x18\x01 \x01(\r\x12)\n\x08position\x18\x02 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"0\n\x0fToolFingerprint\x12\x0c\n\x04tool\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"\xbb\x01\n\rResourceTable\x12(\n\x0bsource_pool\x18\x01 \x01(\x0b\x32\x13.aapt.pb.StringPool\x12!\n\x07package\x18\x02 \x03(\x0b\x32\x10.aapt.pb.Package\x12)\n\x0boverlayable\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Overlayable\x12\x32\n\x10tool_fingerprint\x18\x04 \x03(\x0b\x32\x18.aapt.pb.ToolFingerprint\"\x17\n\tPackageId\x12\n\n\x02id\x18\x01 \x01(\r\"d\n\x07Package\x12&\n\npackage_id\x18\x01 \x01(\x0b\x32\x12.aapt.pb.PackageId\x12\x14\n\x0cpackage_name\x18\x02 \x01(\t\x12\x1b\n\x04type\x18\x03 \x03(\x0b\x32\r.aapt.pb.Type\"\x14\n\x06TypeId\x12\n\n\x02id\x18\x01 \x01(\r\"U\n\x04Type\x12 \n\x07type_id\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.TypeId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1d\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x0e.aapt.pb.Entry\"\x97\x01\n\nVisibility\x12(\n\x05level\x18\x01 \x01(\x0e\x32\x19.aapt.pb.Visibility.Level\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x03 \x01(\t\"-\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PRIVATE\x10\x01\x12\n\n\x06PUBLIC\x10\x02\"<\n\x08\x41llowNew\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\"K\n\x0bOverlayable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\r\n\x05\x61\x63tor\x18\x03 \x01(\t\"\xf3\x01\n\x0fOverlayableItem\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12/\n\x06policy\x18\x03 \x03(\x0e\x32\x1f.aapt.pb.OverlayableItem.Policy\x12\x17\n\x0foverlayable_idx\x18\x04 \x01(\r\"d\n\x06Policy\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06PUBLIC\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\n\n\x06VENDOR\x10\x03\x12\x0b\n\x07PRODUCT\x10\x04\x12\r\n\tSIGNATURE\x10\x05\x12\x07\n\x03ODM\x10\x06\x12\x07\n\x03OEM\x10\x07\"\x15\n\x07\x45ntryId\x12\n\n\x02id\x18\x01 \x01(\r\"\xe8\x01\n\x05\x45ntry\x12\"\n\x08\x65ntry_id\x18\x01 \x01(\x0b\x32\x10.aapt.pb.EntryId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\nvisibility\x18\x03 \x01(\x0b\x32\x13.aapt.pb.Visibility\x12$\n\tallow_new\x18\x04 \x01(\x0b\x32\x11.aapt.pb.AllowNew\x12\x32\n\x10overlayable_item\x18\x05 \x01(\x0b\x32\x18.aapt.pb.OverlayableItem\x12*\n\x0c\x63onfig_value\x18\x06 \x03(\x0b\x32\x14.aapt.pb.ConfigValue\"T\n\x0b\x43onfigValue\x12&\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x16.aapt.pb.Configuration\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.Value\"\xa1\x01\n\x05Value\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x0c\n\x04weak\x18\x03 \x01(\x08\x12\x1d\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.ItemH\x00\x12\x30\n\x0e\x63ompound_value\x18\x05 \x01(\x0b\x32\x16.aapt.pb.CompoundValueH\x00\x42\x07\n\x05value\"\x8d\x02\n\x04Item\x12!\n\x03ref\x18\x01 \x01(\x0b\x32\x12.aapt.pb.ReferenceH\x00\x12\x1e\n\x03str\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.StringH\x00\x12%\n\x07raw_str\x18\x03 \x01(\x0b\x32\x12.aapt.pb.RawStringH\x00\x12+\n\nstyled_str\x18\x04 \x01(\x0b\x32\x15.aapt.pb.StyledStringH\x00\x12&\n\x04\x66ile\x18\x05 \x01(\x0b\x32\x16.aapt.pb.FileReferenceH\x00\x12\x19\n\x02id\x18\x06 \x01(\x0b\x32\x0b.aapt.pb.IdH\x00\x12\"\n\x04prim\x18\x07 \x01(\x0b\x32\x12.aapt.pb.PrimitiveH\x00\x42\x07\n\x05value\"\xca\x01\n\rCompoundValue\x12\"\n\x04\x61ttr\x18\x01 \x01(\x0b\x32\x12.aapt.pb.AttributeH\x00\x12\x1f\n\x05style\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.StyleH\x00\x12\'\n\tstyleable\x18\x03 \x01(\x0b\x32\x12.aapt.pb.StyleableH\x00\x12\x1f\n\x05\x61rray\x18\x04 \x01(\x0b\x32\x0e.aapt.pb.ArrayH\x00\x12!\n\x06plural\x18\x05 \x01(\x0b\x32\x0f.aapt.pb.PluralH\x00\x42\x07\n\x05value\"\x18\n\x07\x42oolean\x12\r\n\x05value\x18\x01 \x01(\x08\"\xa9\x01\n\tReference\x12%\n\x04type\x18\x01 \x01(\x0e\x32\x17.aapt.pb.Reference.Type\x12\n\n\x02id\x18\x02 \x01(\r\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07private\x18\x04 \x01(\x08\x12$\n\nis_dynamic\x18\x05 \x01(\x0b\x32\x10.aapt.pb.Boolean\"$\n\x04Type\x12\r\n\tREFERENCE\x10\x00\x12\r\n\tATTRIBUTE\x10\x01\"\x04\n\x02Id\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x1a\n\tRawString\x12\r\n\x05value\x18\x01 \x01(\t\"\x83\x01\n\x0cStyledString\x12\r\n\x05value\x18\x01 \x01(\t\x12(\n\x04span\x18\x02 \x03(\x0b\x32\x1a.aapt.pb.StyledString.Span\x1a:\n\x04Span\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x12\n\nfirst_char\x18\x02 \x01(\r\x12\x11\n\tlast_char\x18\x03 \x01(\r\"\x85\x01\n\rFileReference\x12\x0c\n\x04path\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.aapt.pb.FileReference.Type\";\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03PNG\x10\x01\x12\x0e\n\nBINARY_XML\x10\x02\x12\r\n\tPROTO_XML\x10\x03\"\x83\x04\n\tPrimitive\x12\x31\n\nnull_value\x18\x01 \x01(\x0b\x32\x1b.aapt.pb.Primitive.NullTypeH\x00\x12\x33\n\x0b\x65mpty_value\x18\x02 \x01(\x0b\x32\x1c.aapt.pb.Primitive.EmptyTypeH\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x19\n\x0f\x64imension_value\x18\r \x01(\rH\x00\x12\x18\n\x0e\x66raction_value\x18\x0e \x01(\rH\x00\x12\x1b\n\x11int_decimal_value\x18\x06 \x01(\x05H\x00\x12\x1f\n\x15int_hexadecimal_value\x18\x07 \x01(\rH\x00\x12\x17\n\rboolean_value\x18\x08 \x01(\x08H\x00\x12\x1b\n\x11\x63olor_argb8_value\x18\t \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb8_value\x18\n \x01(\rH\x00\x12\x1b\n\x11\x63olor_argb4_value\x18\x0b \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb4_value\x18\x0c \x01(\rH\x00\x12(\n\x1a\x64imension_value_deprecated\x18\x04 \x01(\x02\x42\x02\x18\x01H\x00\x12\'\n\x19\x66raction_value_deprecated\x18\x05 \x01(\x02\x42\x02\x18\x01H\x00\x1a\n\n\x08NullType\x1a\x0b\n\tEmptyTypeB\r\n\x0boneof_value\"\x90\x03\n\tAttribute\x12\x14\n\x0c\x66ormat_flags\x18\x01 \x01(\r\x12\x0f\n\x07min_int\x18\x02 \x01(\x05\x12\x0f\n\x07max_int\x18\x03 \x01(\x05\x12)\n\x06symbol\x18\x04 \x03(\x0b\x32\x19.aapt.pb.Attribute.Symbol\x1ay\n\x06Symbol\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04name\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\r\n\x05value\x18\x04 \x01(\r\x12\x0c\n\x04type\x18\x05 \x01(\r\"\xa4\x01\n\x0b\x46ormatFlags\x12\x08\n\x04NONE\x10\x00\x12\t\n\x03\x41NY\x10\xff\xff\x03\x12\r\n\tREFERENCE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x0b\n\x07INTEGER\x10\x04\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\t\n\x05\x43OLOR\x10\x10\x12\t\n\x05\x46LOAT\x10 \x12\r\n\tDIMENSION\x10@\x12\r\n\x08\x46RACTION\x10\x80\x01\x12\n\n\x04\x45NUM\x10\x80\x80\x04\x12\x0b\n\x05\x46LAGS\x10\x80\x80\x08\"\xf1\x01\n\x05Style\x12\"\n\x06parent\x18\x01 \x01(\x0b\x32\x12.aapt.pb.Reference\x12&\n\rparent_source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12#\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Style.Entry\x1aw\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1f\n\x03key\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"\x91\x01\n\tStyleable\x12\'\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x18.aapt.pb.Styleable.Entry\x1a[\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04\x61ttr\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\"\x8a\x01\n\x05\x41rray\x12\'\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x16.aapt.pb.Array.Element\x1aX\n\x07\x45lement\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1b\n\x04item\x18\x03 \x01(\x0b\x32\r.aapt.pb.Item\"\xef\x01\n\x06Plural\x12$\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x15.aapt.pb.Plural.Entry\x1a|\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12$\n\x05\x61rity\x18\x03 \x01(\x0e\x32\x15.aapt.pb.Plural.Arity\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"A\n\x05\x41rity\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\x12\x07\n\x03TWO\x10\x02\x12\x07\n\x03\x46\x45W\x10\x03\x12\x08\n\x04MANY\x10\x04\x12\t\n\x05OTHER\x10\x05\"r\n\x07XmlNode\x12&\n\x07\x65lement\x18\x01 \x01(\x0b\x32\x13.aapt.pb.XmlElementH\x00\x12\x0e\n\x04text\x18\x02 \x01(\tH\x00\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePositionB\x06\n\x04node\"\xb2\x01\n\nXmlElement\x12\x34\n\x15namespace_declaration\x18\x01 \x03(\x0b\x32\x15.aapt.pb.XmlNamespace\x12\x15\n\rnamespace_uri\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12(\n\tattribute\x18\x04 \x03(\x0b\x32\x15.aapt.pb.XmlAttribute\x12\x1f\n\x05\x63hild\x18\x05 \x03(\x0b\x32\x10.aapt.pb.XmlNode\"T\n\x0cXmlNamespace\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"\xa6\x01\n\x0cXmlAttribute\x12\x15\n\rnamespace_uri\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\'\n\x06source\x18\x04 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\x12\x13\n\x0bresource_id\x18\x05 \x01(\r\x12$\n\rcompiled_item\x18\x06 \x01(\x0b\x32\r.aapt.pb.ItemB\x12\n\x10\x63om.android.aaptb\x06proto3')
+  ,
+  dependencies=[frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2.DESCRIPTOR,])
+
+
+
+_VISIBILITY_LEVEL = _descriptor.EnumDescriptor(
+  name='Level',
+  full_name='aapt.pb.Visibility.Level',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRIVATE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PUBLIC', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=849,
+  serialized_end=894,
+)
+_sym_db.RegisterEnumDescriptor(_VISIBILITY_LEVEL)
+
+_OVERLAYABLEITEM_POLICY = _descriptor.EnumDescriptor(
+  name='Policy',
+  full_name='aapt.pb.OverlayableItem.Policy',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NONE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PUBLIC', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SYSTEM', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='VENDOR', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PRODUCT', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SIGNATURE', index=5, number=5,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ODM', index=6, number=6,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OEM', index=7, number=7,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1179,
+  serialized_end=1279,
+)
+_sym_db.RegisterEnumDescriptor(_OVERLAYABLEITEM_POLICY)
+
+_REFERENCE_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='aapt.pb.Reference.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='REFERENCE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ATTRIBUTE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2426,
+  serialized_end=2462,
+)
+_sym_db.RegisterEnumDescriptor(_REFERENCE_TYPE)
+
+_FILEREFERENCE_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='aapt.pb.FileReference.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PNG', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BINARY_XML', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PROTO_XML', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2732,
+  serialized_end=2791,
+)
+_sym_db.RegisterEnumDescriptor(_FILEREFERENCE_TYPE)
+
+_ATTRIBUTE_FORMATFLAGS = _descriptor.EnumDescriptor(
+  name='FormatFlags',
+  full_name='aapt.pb.Attribute.FormatFlags',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='NONE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ANY', index=1, number=65535,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REFERENCE', index=2, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=3, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INTEGER', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BOOLEAN', index=5, number=8,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COLOR', index=6, number=16,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FLOAT', index=7, number=32,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DIMENSION', index=8, number=64,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FRACTION', index=9, number=128,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ENUM', index=10, number=65536,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FLAGS', index=11, number=131072,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3548,
+  serialized_end=3712,
+)
+_sym_db.RegisterEnumDescriptor(_ATTRIBUTE_FORMATFLAGS)
+
+_PLURAL_ARITY = _descriptor.EnumDescriptor(
+  name='Arity',
+  full_name='aapt.pb.Plural.Arity',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='ZERO', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ONE', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TWO', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FEW', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='MANY', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTHER', index=5, number=5,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4422,
+  serialized_end=4487,
+)
+_sym_db.RegisterEnumDescriptor(_PLURAL_ARITY)
+
+
+_STRINGPOOL = _descriptor.Descriptor(
+  name='StringPool',
+  full_name='aapt.pb.StringPool',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='data', full_name='aapt.pb.StringPool.data', index=0,
+      number=1, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=105,
+  serialized_end=131,
+)
+
+
+_SOURCEPOSITION = _descriptor.Descriptor(
+  name='SourcePosition',
+  full_name='aapt.pb.SourcePosition',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='line_number', full_name='aapt.pb.SourcePosition.line_number', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='column_number', full_name='aapt.pb.SourcePosition.column_number', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=133,
+  serialized_end=193,
+)
+
+
+_SOURCE = _descriptor.Descriptor(
+  name='Source',
+  full_name='aapt.pb.Source',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path_idx', full_name='aapt.pb.Source.path_idx', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='position', full_name='aapt.pb.Source.position', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=195,
+  serialized_end=264,
+)
+
+
+_TOOLFINGERPRINT = _descriptor.Descriptor(
+  name='ToolFingerprint',
+  full_name='aapt.pb.ToolFingerprint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='tool', full_name='aapt.pb.ToolFingerprint.tool', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='aapt.pb.ToolFingerprint.version', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=266,
+  serialized_end=314,
+)
+
+
+_RESOURCETABLE = _descriptor.Descriptor(
+  name='ResourceTable',
+  full_name='aapt.pb.ResourceTable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source_pool', full_name='aapt.pb.ResourceTable.source_pool', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='package', full_name='aapt.pb.ResourceTable.package', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable', full_name='aapt.pb.ResourceTable.overlayable', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='tool_fingerprint', full_name='aapt.pb.ResourceTable.tool_fingerprint', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=317,
+  serialized_end=504,
+)
+
+
+_PACKAGEID = _descriptor.Descriptor(
+  name='PackageId',
+  full_name='aapt.pb.PackageId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.PackageId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=506,
+  serialized_end=529,
+)
+
+
+_PACKAGE = _descriptor.Descriptor(
+  name='Package',
+  full_name='aapt.pb.Package',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='package_id', full_name='aapt.pb.Package.package_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='package_name', full_name='aapt.pb.Package.package_name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Package.type', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=531,
+  serialized_end=631,
+)
+
+
+_TYPEID = _descriptor.Descriptor(
+  name='TypeId',
+  full_name='aapt.pb.TypeId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.TypeId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=633,
+  serialized_end=653,
+)
+
+
+_TYPE = _descriptor.Descriptor(
+  name='Type',
+  full_name='aapt.pb.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type_id', full_name='aapt.pb.Type.type_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Type.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Type.entry', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=655,
+  serialized_end=740,
+)
+
+
+_VISIBILITY = _descriptor.Descriptor(
+  name='Visibility',
+  full_name='aapt.pb.Visibility',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='level', full_name='aapt.pb.Visibility.level', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Visibility.source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Visibility.comment', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _VISIBILITY_LEVEL,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=743,
+  serialized_end=894,
+)
+
+
+_ALLOWNEW = _descriptor.Descriptor(
+  name='AllowNew',
+  full_name='aapt.pb.AllowNew',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.AllowNew.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.AllowNew.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=896,
+  serialized_end=956,
+)
+
+
+_OVERLAYABLE = _descriptor.Descriptor(
+  name='Overlayable',
+  full_name='aapt.pb.Overlayable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Overlayable.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Overlayable.source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='actor', full_name='aapt.pb.Overlayable.actor', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=958,
+  serialized_end=1033,
+)
+
+
+_OVERLAYABLEITEM = _descriptor.Descriptor(
+  name='OverlayableItem',
+  full_name='aapt.pb.OverlayableItem',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.OverlayableItem.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.OverlayableItem.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='policy', full_name='aapt.pb.OverlayableItem.policy', index=2,
+      number=3, type=14, cpp_type=8, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable_idx', full_name='aapt.pb.OverlayableItem.overlayable_idx', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _OVERLAYABLEITEM_POLICY,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1036,
+  serialized_end=1279,
+)
+
+
+_ENTRYID = _descriptor.Descriptor(
+  name='EntryId',
+  full_name='aapt.pb.EntryId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.EntryId.id', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1281,
+  serialized_end=1302,
+)
+
+
+_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry_id', full_name='aapt.pb.Entry.entry_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Entry.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='visibility', full_name='aapt.pb.Entry.visibility', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='allow_new', full_name='aapt.pb.Entry.allow_new', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='overlayable_item', full_name='aapt.pb.Entry.overlayable_item', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='config_value', full_name='aapt.pb.Entry.config_value', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1305,
+  serialized_end=1537,
+)
+
+
+_CONFIGVALUE = _descriptor.Descriptor(
+  name='ConfigValue',
+  full_name='aapt.pb.ConfigValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='config', full_name='aapt.pb.ConfigValue.config', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.ConfigValue.value', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1539,
+  serialized_end=1623,
+)
+
+
+_VALUE = _descriptor.Descriptor(
+  name='Value',
+  full_name='aapt.pb.Value',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Value.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Value.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='weak', full_name='aapt.pb.Value.weak', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Value.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='compound_value', full_name='aapt.pb.Value.compound_value', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.Value.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1626,
+  serialized_end=1787,
+)
+
+
+_ITEM = _descriptor.Descriptor(
+  name='Item',
+  full_name='aapt.pb.Item',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ref', full_name='aapt.pb.Item.ref', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='str', full_name='aapt.pb.Item.str', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='raw_str', full_name='aapt.pb.Item.raw_str', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='styled_str', full_name='aapt.pb.Item.styled_str', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='file', full_name='aapt.pb.Item.file', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.Item.id', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='prim', full_name='aapt.pb.Item.prim', index=6,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.Item.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1790,
+  serialized_end=2059,
+)
+
+
+_COMPOUNDVALUE = _descriptor.Descriptor(
+  name='CompoundValue',
+  full_name='aapt.pb.CompoundValue',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='attr', full_name='aapt.pb.CompoundValue.attr', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='style', full_name='aapt.pb.CompoundValue.style', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='styleable', full_name='aapt.pb.CompoundValue.styleable', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='array', full_name='aapt.pb.CompoundValue.array', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='plural', full_name='aapt.pb.CompoundValue.plural', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='aapt.pb.CompoundValue.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=2062,
+  serialized_end=2264,
+)
+
+
+_BOOLEAN = _descriptor.Descriptor(
+  name='Boolean',
+  full_name='aapt.pb.Boolean',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.Boolean.value', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2266,
+  serialized_end=2290,
+)
+
+
+_REFERENCE = _descriptor.Descriptor(
+  name='Reference',
+  full_name='aapt.pb.Reference',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Reference.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='aapt.pb.Reference.id', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Reference.name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='private', full_name='aapt.pb.Reference.private', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='is_dynamic', full_name='aapt.pb.Reference.is_dynamic', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _REFERENCE_TYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2293,
+  serialized_end=2462,
+)
+
+
+_ID = _descriptor.Descriptor(
+  name='Id',
+  full_name='aapt.pb.Id',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2464,
+  serialized_end=2468,
+)
+
+
+_STRING = _descriptor.Descriptor(
+  name='String',
+  full_name='aapt.pb.String',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.String.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2470,
+  serialized_end=2493,
+)
+
+
+_RAWSTRING = _descriptor.Descriptor(
+  name='RawString',
+  full_name='aapt.pb.RawString',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.RawString.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2495,
+  serialized_end=2521,
+)
+
+
+_STYLEDSTRING_SPAN = _descriptor.Descriptor(
+  name='Span',
+  full_name='aapt.pb.StyledString.Span',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='tag', full_name='aapt.pb.StyledString.Span.tag', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='first_char', full_name='aapt.pb.StyledString.Span.first_char', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='last_char', full_name='aapt.pb.StyledString.Span.last_char', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2597,
+  serialized_end=2655,
+)
+
+_STYLEDSTRING = _descriptor.Descriptor(
+  name='StyledString',
+  full_name='aapt.pb.StyledString',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.StyledString.value', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='span', full_name='aapt.pb.StyledString.span', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLEDSTRING_SPAN, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2524,
+  serialized_end=2655,
+)
+
+
+_FILEREFERENCE = _descriptor.Descriptor(
+  name='FileReference',
+  full_name='aapt.pb.FileReference',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path', full_name='aapt.pb.FileReference.path', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.FileReference.type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FILEREFERENCE_TYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2658,
+  serialized_end=2791,
+)
+
+
+_PRIMITIVE_NULLTYPE = _descriptor.Descriptor(
+  name='NullType',
+  full_name='aapt.pb.Primitive.NullType',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3271,
+  serialized_end=3281,
+)
+
+_PRIMITIVE_EMPTYTYPE = _descriptor.Descriptor(
+  name='EmptyType',
+  full_name='aapt.pb.Primitive.EmptyType',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3283,
+  serialized_end=3294,
+)
+
+_PRIMITIVE = _descriptor.Descriptor(
+  name='Primitive',
+  full_name='aapt.pb.Primitive',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='null_value', full_name='aapt.pb.Primitive.null_value', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='empty_value', full_name='aapt.pb.Primitive.empty_value', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='float_value', full_name='aapt.pb.Primitive.float_value', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dimension_value', full_name='aapt.pb.Primitive.dimension_value', index=3,
+      number=13, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fraction_value', full_name='aapt.pb.Primitive.fraction_value', index=4,
+      number=14, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='int_decimal_value', full_name='aapt.pb.Primitive.int_decimal_value', index=5,
+      number=6, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='int_hexadecimal_value', full_name='aapt.pb.Primitive.int_hexadecimal_value', index=6,
+      number=7, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='boolean_value', full_name='aapt.pb.Primitive.boolean_value', index=7,
+      number=8, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_argb8_value', full_name='aapt.pb.Primitive.color_argb8_value', index=8,
+      number=9, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_rgb8_value', full_name='aapt.pb.Primitive.color_rgb8_value', index=9,
+      number=10, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_argb4_value', full_name='aapt.pb.Primitive.color_argb4_value', index=10,
+      number=11, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='color_rgb4_value', full_name='aapt.pb.Primitive.color_rgb4_value', index=11,
+      number=12, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dimension_value_deprecated', full_name='aapt.pb.Primitive.dimension_value_deprecated', index=12,
+      number=4, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=_b('\030\001'), file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fraction_value_deprecated', full_name='aapt.pb.Primitive.fraction_value_deprecated', index=13,
+      number=5, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=_b('\030\001'), file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PRIMITIVE_NULLTYPE, _PRIMITIVE_EMPTYTYPE, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='oneof_value', full_name='aapt.pb.Primitive.oneof_value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=2794,
+  serialized_end=3309,
+)
+
+
+_ATTRIBUTE_SYMBOL = _descriptor.Descriptor(
+  name='Symbol',
+  full_name='aapt.pb.Attribute.Symbol',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Attribute.Symbol.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Attribute.Symbol.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.Attribute.Symbol.name', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.Attribute.Symbol.value', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='aapt.pb.Attribute.Symbol.type', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3424,
+  serialized_end=3545,
+)
+
+_ATTRIBUTE = _descriptor.Descriptor(
+  name='Attribute',
+  full_name='aapt.pb.Attribute',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='format_flags', full_name='aapt.pb.Attribute.format_flags', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='min_int', full_name='aapt.pb.Attribute.min_int', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='max_int', full_name='aapt.pb.Attribute.max_int', index=2,
+      number=3, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='symbol', full_name='aapt.pb.Attribute.symbol', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_ATTRIBUTE_SYMBOL, ],
+  enum_types=[
+    _ATTRIBUTE_FORMATFLAGS,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3312,
+  serialized_end=3712,
+)
+
+
+_STYLE_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Style.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Style.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Style.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='key', full_name='aapt.pb.Style.Entry.key', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Style.Entry.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3837,
+  serialized_end=3956,
+)
+
+_STYLE = _descriptor.Descriptor(
+  name='Style',
+  full_name='aapt.pb.Style',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='parent', full_name='aapt.pb.Style.parent', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='parent_source', full_name='aapt.pb.Style.parent_source', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Style.entry', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLE_ENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3715,
+  serialized_end=3956,
+)
+
+
+_STYLEABLE_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Styleable.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Styleable.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Styleable.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='attr', full_name='aapt.pb.Styleable.Entry.attr', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4013,
+  serialized_end=4104,
+)
+
+_STYLEABLE = _descriptor.Descriptor(
+  name='Styleable',
+  full_name='aapt.pb.Styleable',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Styleable.entry', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_STYLEABLE_ENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3959,
+  serialized_end=4104,
+)
+
+
+_ARRAY_ELEMENT = _descriptor.Descriptor(
+  name='Element',
+  full_name='aapt.pb.Array.Element',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Array.Element.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Array.Element.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Array.Element.item', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4157,
+  serialized_end=4245,
+)
+
+_ARRAY = _descriptor.Descriptor(
+  name='Array',
+  full_name='aapt.pb.Array',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='element', full_name='aapt.pb.Array.element', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_ARRAY_ELEMENT, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4107,
+  serialized_end=4245,
+)
+
+
+_PLURAL_ENTRY = _descriptor.Descriptor(
+  name='Entry',
+  full_name='aapt.pb.Plural.Entry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.Plural.Entry.source', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='comment', full_name='aapt.pb.Plural.Entry.comment', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='arity', full_name='aapt.pb.Plural.Entry.arity', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='item', full_name='aapt.pb.Plural.Entry.item', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4296,
+  serialized_end=4420,
+)
+
+_PLURAL = _descriptor.Descriptor(
+  name='Plural',
+  full_name='aapt.pb.Plural',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='entry', full_name='aapt.pb.Plural.entry', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PLURAL_ENTRY, ],
+  enum_types=[
+    _PLURAL_ARITY,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4248,
+  serialized_end=4487,
+)
+
+
+_XMLNODE = _descriptor.Descriptor(
+  name='XmlNode',
+  full_name='aapt.pb.XmlNode',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='element', full_name='aapt.pb.XmlNode.element', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='text', full_name='aapt.pb.XmlNode.text', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlNode.source', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='node', full_name='aapt.pb.XmlNode.node',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=4489,
+  serialized_end=4603,
+)
+
+
+_XMLELEMENT = _descriptor.Descriptor(
+  name='XmlElement',
+  full_name='aapt.pb.XmlElement',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='namespace_declaration', full_name='aapt.pb.XmlElement.namespace_declaration', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='namespace_uri', full_name='aapt.pb.XmlElement.namespace_uri', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.XmlElement.name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='attribute', full_name='aapt.pb.XmlElement.attribute', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='child', full_name='aapt.pb.XmlElement.child', index=4,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4606,
+  serialized_end=4784,
+)
+
+
+_XMLNAMESPACE = _descriptor.Descriptor(
+  name='XmlNamespace',
+  full_name='aapt.pb.XmlNamespace',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='prefix', full_name='aapt.pb.XmlNamespace.prefix', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='uri', full_name='aapt.pb.XmlNamespace.uri', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlNamespace.source', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4786,
+  serialized_end=4870,
+)
+
+
+_XMLATTRIBUTE = _descriptor.Descriptor(
+  name='XmlAttribute',
+  full_name='aapt.pb.XmlAttribute',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='namespace_uri', full_name='aapt.pb.XmlAttribute.namespace_uri', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='aapt.pb.XmlAttribute.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='aapt.pb.XmlAttribute.value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source', full_name='aapt.pb.XmlAttribute.source', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='resource_id', full_name='aapt.pb.XmlAttribute.resource_id', index=4,
+      number=5, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='compiled_item', full_name='aapt.pb.XmlAttribute.compiled_item', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4873,
+  serialized_end=5039,
+)
+
+_SOURCE.fields_by_name['position'].message_type = _SOURCEPOSITION
+_RESOURCETABLE.fields_by_name['source_pool'].message_type = _STRINGPOOL
+_RESOURCETABLE.fields_by_name['package'].message_type = _PACKAGE
+_RESOURCETABLE.fields_by_name['overlayable'].message_type = _OVERLAYABLE
+_RESOURCETABLE.fields_by_name['tool_fingerprint'].message_type = _TOOLFINGERPRINT
+_PACKAGE.fields_by_name['package_id'].message_type = _PACKAGEID
+_PACKAGE.fields_by_name['type'].message_type = _TYPE
+_TYPE.fields_by_name['type_id'].message_type = _TYPEID
+_TYPE.fields_by_name['entry'].message_type = _ENTRY
+_VISIBILITY.fields_by_name['level'].enum_type = _VISIBILITY_LEVEL
+_VISIBILITY.fields_by_name['source'].message_type = _SOURCE
+_VISIBILITY_LEVEL.containing_type = _VISIBILITY
+_ALLOWNEW.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLE.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['policy'].enum_type = _OVERLAYABLEITEM_POLICY
+_OVERLAYABLEITEM_POLICY.containing_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['entry_id'].message_type = _ENTRYID
+_ENTRY.fields_by_name['visibility'].message_type = _VISIBILITY
+_ENTRY.fields_by_name['allow_new'].message_type = _ALLOWNEW
+_ENTRY.fields_by_name['overlayable_item'].message_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['config_value'].message_type = _CONFIGVALUE
+_CONFIGVALUE.fields_by_name['config'].message_type = frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2._CONFIGURATION
+_CONFIGVALUE.fields_by_name['value'].message_type = _VALUE
+_VALUE.fields_by_name['source'].message_type = _SOURCE
+_VALUE.fields_by_name['item'].message_type = _ITEM
+_VALUE.fields_by_name['compound_value'].message_type = _COMPOUNDVALUE
+_VALUE.oneofs_by_name['value'].fields.append(
+  _VALUE.fields_by_name['item'])
+_VALUE.fields_by_name['item'].containing_oneof = _VALUE.oneofs_by_name['value']
+_VALUE.oneofs_by_name['value'].fields.append(
+  _VALUE.fields_by_name['compound_value'])
+_VALUE.fields_by_name['compound_value'].containing_oneof = _VALUE.oneofs_by_name['value']
+_ITEM.fields_by_name['ref'].message_type = _REFERENCE
+_ITEM.fields_by_name['str'].message_type = _STRING
+_ITEM.fields_by_name['raw_str'].message_type = _RAWSTRING
+_ITEM.fields_by_name['styled_str'].message_type = _STYLEDSTRING
+_ITEM.fields_by_name['file'].message_type = _FILEREFERENCE
+_ITEM.fields_by_name['id'].message_type = _ID
+_ITEM.fields_by_name['prim'].message_type = _PRIMITIVE
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['ref'])
+_ITEM.fields_by_name['ref'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['str'])
+_ITEM.fields_by_name['str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['raw_str'])
+_ITEM.fields_by_name['raw_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['styled_str'])
+_ITEM.fields_by_name['styled_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['file'])
+_ITEM.fields_by_name['file'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['id'])
+_ITEM.fields_by_name['id'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+  _ITEM.fields_by_name['prim'])
+_ITEM.fields_by_name['prim'].containing_oneof = _ITEM.oneofs_by_name['value']
+_COMPOUNDVALUE.fields_by_name['attr'].message_type = _ATTRIBUTE
+_COMPOUNDVALUE.fields_by_name['style'].message_type = _STYLE
+_COMPOUNDVALUE.fields_by_name['styleable'].message_type = _STYLEABLE
+_COMPOUNDVALUE.fields_by_name['array'].message_type = _ARRAY
+_COMPOUNDVALUE.fields_by_name['plural'].message_type = _PLURAL
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['attr'])
+_COMPOUNDVALUE.fields_by_name['attr'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['style'])
+_COMPOUNDVALUE.fields_by_name['style'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['styleable'])
+_COMPOUNDVALUE.fields_by_name['styleable'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['array'])
+_COMPOUNDVALUE.fields_by_name['array'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+  _COMPOUNDVALUE.fields_by_name['plural'])
+_COMPOUNDVALUE.fields_by_name['plural'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_REFERENCE.fields_by_name['type'].enum_type = _REFERENCE_TYPE
+_REFERENCE.fields_by_name['is_dynamic'].message_type = _BOOLEAN
+_REFERENCE_TYPE.containing_type = _REFERENCE
+_STYLEDSTRING_SPAN.containing_type = _STYLEDSTRING
+_STYLEDSTRING.fields_by_name['span'].message_type = _STYLEDSTRING_SPAN
+_FILEREFERENCE.fields_by_name['type'].enum_type = _FILEREFERENCE_TYPE
+_FILEREFERENCE_TYPE.containing_type = _FILEREFERENCE
+_PRIMITIVE_NULLTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE_EMPTYTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE.fields_by_name['null_value'].message_type = _PRIMITIVE_NULLTYPE
+_PRIMITIVE.fields_by_name['empty_value'].message_type = _PRIMITIVE_EMPTYTYPE
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['null_value'])
+_PRIMITIVE.fields_by_name['null_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['empty_value'])
+_PRIMITIVE.fields_by_name['empty_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['float_value'])
+_PRIMITIVE.fields_by_name['float_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['dimension_value'])
+_PRIMITIVE.fields_by_name['dimension_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['fraction_value'])
+_PRIMITIVE.fields_by_name['fraction_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['int_decimal_value'])
+_PRIMITIVE.fields_by_name['int_decimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['int_hexadecimal_value'])
+_PRIMITIVE.fields_by_name['int_hexadecimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['boolean_value'])
+_PRIMITIVE.fields_by_name['boolean_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_argb8_value'])
+_PRIMITIVE.fields_by_name['color_argb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_rgb8_value'])
+_PRIMITIVE.fields_by_name['color_rgb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_argb4_value'])
+_PRIMITIVE.fields_by_name['color_argb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['color_rgb4_value'])
+_PRIMITIVE.fields_by_name['color_rgb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['dimension_value_deprecated'])
+_PRIMITIVE.fields_by_name['dimension_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+  _PRIMITIVE.fields_by_name['fraction_value_deprecated'])
+_PRIMITIVE.fields_by_name['fraction_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_ATTRIBUTE_SYMBOL.fields_by_name['source'].message_type = _SOURCE
+_ATTRIBUTE_SYMBOL.fields_by_name['name'].message_type = _REFERENCE
+_ATTRIBUTE_SYMBOL.containing_type = _ATTRIBUTE
+_ATTRIBUTE.fields_by_name['symbol'].message_type = _ATTRIBUTE_SYMBOL
+_ATTRIBUTE_FORMATFLAGS.containing_type = _ATTRIBUTE
+_STYLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLE_ENTRY.fields_by_name['key'].message_type = _REFERENCE
+_STYLE_ENTRY.fields_by_name['item'].message_type = _ITEM
+_STYLE_ENTRY.containing_type = _STYLE
+_STYLE.fields_by_name['parent'].message_type = _REFERENCE
+_STYLE.fields_by_name['parent_source'].message_type = _SOURCE
+_STYLE.fields_by_name['entry'].message_type = _STYLE_ENTRY
+_STYLEABLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLEABLE_ENTRY.fields_by_name['attr'].message_type = _REFERENCE
+_STYLEABLE_ENTRY.containing_type = _STYLEABLE
+_STYLEABLE.fields_by_name['entry'].message_type = _STYLEABLE_ENTRY
+_ARRAY_ELEMENT.fields_by_name['source'].message_type = _SOURCE
+_ARRAY_ELEMENT.fields_by_name['item'].message_type = _ITEM
+_ARRAY_ELEMENT.containing_type = _ARRAY
+_ARRAY.fields_by_name['element'].message_type = _ARRAY_ELEMENT
+_PLURAL_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_PLURAL_ENTRY.fields_by_name['arity'].enum_type = _PLURAL_ARITY
+_PLURAL_ENTRY.fields_by_name['item'].message_type = _ITEM
+_PLURAL_ENTRY.containing_type = _PLURAL
+_PLURAL.fields_by_name['entry'].message_type = _PLURAL_ENTRY
+_PLURAL_ARITY.containing_type = _PLURAL
+_XMLNODE.fields_by_name['element'].message_type = _XMLELEMENT
+_XMLNODE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLNODE.oneofs_by_name['node'].fields.append(
+  _XMLNODE.fields_by_name['element'])
+_XMLNODE.fields_by_name['element'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLNODE.oneofs_by_name['node'].fields.append(
+  _XMLNODE.fields_by_name['text'])
+_XMLNODE.fields_by_name['text'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLELEMENT.fields_by_name['namespace_declaration'].message_type = _XMLNAMESPACE
+_XMLELEMENT.fields_by_name['attribute'].message_type = _XMLATTRIBUTE
+_XMLELEMENT.fields_by_name['child'].message_type = _XMLNODE
+_XMLNAMESPACE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['compiled_item'].message_type = _ITEM
+DESCRIPTOR.message_types_by_name['StringPool'] = _STRINGPOOL
+DESCRIPTOR.message_types_by_name['SourcePosition'] = _SOURCEPOSITION
+DESCRIPTOR.message_types_by_name['Source'] = _SOURCE
+DESCRIPTOR.message_types_by_name['ToolFingerprint'] = _TOOLFINGERPRINT
+DESCRIPTOR.message_types_by_name['ResourceTable'] = _RESOURCETABLE
+DESCRIPTOR.message_types_by_name['PackageId'] = _PACKAGEID
+DESCRIPTOR.message_types_by_name['Package'] = _PACKAGE
+DESCRIPTOR.message_types_by_name['TypeId'] = _TYPEID
+DESCRIPTOR.message_types_by_name['Type'] = _TYPE
+DESCRIPTOR.message_types_by_name['Visibility'] = _VISIBILITY
+DESCRIPTOR.message_types_by_name['AllowNew'] = _ALLOWNEW
+DESCRIPTOR.message_types_by_name['Overlayable'] = _OVERLAYABLE
+DESCRIPTOR.message_types_by_name['OverlayableItem'] = _OVERLAYABLEITEM
+DESCRIPTOR.message_types_by_name['EntryId'] = _ENTRYID
+DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY
+DESCRIPTOR.message_types_by_name['ConfigValue'] = _CONFIGVALUE
+DESCRIPTOR.message_types_by_name['Value'] = _VALUE
+DESCRIPTOR.message_types_by_name['Item'] = _ITEM
+DESCRIPTOR.message_types_by_name['CompoundValue'] = _COMPOUNDVALUE
+DESCRIPTOR.message_types_by_name['Boolean'] = _BOOLEAN
+DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE
+DESCRIPTOR.message_types_by_name['Id'] = _ID
+DESCRIPTOR.message_types_by_name['String'] = _STRING
+DESCRIPTOR.message_types_by_name['RawString'] = _RAWSTRING
+DESCRIPTOR.message_types_by_name['StyledString'] = _STYLEDSTRING
+DESCRIPTOR.message_types_by_name['FileReference'] = _FILEREFERENCE
+DESCRIPTOR.message_types_by_name['Primitive'] = _PRIMITIVE
+DESCRIPTOR.message_types_by_name['Attribute'] = _ATTRIBUTE
+DESCRIPTOR.message_types_by_name['Style'] = _STYLE
+DESCRIPTOR.message_types_by_name['Styleable'] = _STYLEABLE
+DESCRIPTOR.message_types_by_name['Array'] = _ARRAY
+DESCRIPTOR.message_types_by_name['Plural'] = _PLURAL
+DESCRIPTOR.message_types_by_name['XmlNode'] = _XMLNODE
+DESCRIPTOR.message_types_by_name['XmlElement'] = _XMLELEMENT
+DESCRIPTOR.message_types_by_name['XmlNamespace'] = _XMLNAMESPACE
+DESCRIPTOR.message_types_by_name['XmlAttribute'] = _XMLATTRIBUTE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+StringPool = _reflection.GeneratedProtocolMessageType('StringPool', (_message.Message,), {
+  'DESCRIPTOR' : _STRINGPOOL,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.StringPool)
+  })
+_sym_db.RegisterMessage(StringPool)
+
+SourcePosition = _reflection.GeneratedProtocolMessageType('SourcePosition', (_message.Message,), {
+  'DESCRIPTOR' : _SOURCEPOSITION,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.SourcePosition)
+  })
+_sym_db.RegisterMessage(SourcePosition)
+
+Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), {
+  'DESCRIPTOR' : _SOURCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Source)
+  })
+_sym_db.RegisterMessage(Source)
+
+ToolFingerprint = _reflection.GeneratedProtocolMessageType('ToolFingerprint', (_message.Message,), {
+  'DESCRIPTOR' : _TOOLFINGERPRINT,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ToolFingerprint)
+  })
+_sym_db.RegisterMessage(ToolFingerprint)
+
+ResourceTable = _reflection.GeneratedProtocolMessageType('ResourceTable', (_message.Message,), {
+  'DESCRIPTOR' : _RESOURCETABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ResourceTable)
+  })
+_sym_db.RegisterMessage(ResourceTable)
+
+PackageId = _reflection.GeneratedProtocolMessageType('PackageId', (_message.Message,), {
+  'DESCRIPTOR' : _PACKAGEID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.PackageId)
+  })
+_sym_db.RegisterMessage(PackageId)
+
+Package = _reflection.GeneratedProtocolMessageType('Package', (_message.Message,), {
+  'DESCRIPTOR' : _PACKAGE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Package)
+  })
+_sym_db.RegisterMessage(Package)
+
+TypeId = _reflection.GeneratedProtocolMessageType('TypeId', (_message.Message,), {
+  'DESCRIPTOR' : _TYPEID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.TypeId)
+  })
+_sym_db.RegisterMessage(TypeId)
+
+Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), {
+  'DESCRIPTOR' : _TYPE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Type)
+  })
+_sym_db.RegisterMessage(Type)
+
+Visibility = _reflection.GeneratedProtocolMessageType('Visibility', (_message.Message,), {
+  'DESCRIPTOR' : _VISIBILITY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Visibility)
+  })
+_sym_db.RegisterMessage(Visibility)
+
+AllowNew = _reflection.GeneratedProtocolMessageType('AllowNew', (_message.Message,), {
+  'DESCRIPTOR' : _ALLOWNEW,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.AllowNew)
+  })
+_sym_db.RegisterMessage(AllowNew)
+
+Overlayable = _reflection.GeneratedProtocolMessageType('Overlayable', (_message.Message,), {
+  'DESCRIPTOR' : _OVERLAYABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Overlayable)
+  })
+_sym_db.RegisterMessage(Overlayable)
+
+OverlayableItem = _reflection.GeneratedProtocolMessageType('OverlayableItem', (_message.Message,), {
+  'DESCRIPTOR' : _OVERLAYABLEITEM,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.OverlayableItem)
+  })
+_sym_db.RegisterMessage(OverlayableItem)
+
+EntryId = _reflection.GeneratedProtocolMessageType('EntryId', (_message.Message,), {
+  'DESCRIPTOR' : _ENTRYID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.EntryId)
+  })
+_sym_db.RegisterMessage(EntryId)
+
+Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+  'DESCRIPTOR' : _ENTRY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Entry)
+  })
+_sym_db.RegisterMessage(Entry)
+
+ConfigValue = _reflection.GeneratedProtocolMessageType('ConfigValue', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGVALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.ConfigValue)
+  })
+_sym_db.RegisterMessage(ConfigValue)
+
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
+  'DESCRIPTOR' : _VALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Value)
+  })
+_sym_db.RegisterMessage(Value)
+
+Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), {
+  'DESCRIPTOR' : _ITEM,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Item)
+  })
+_sym_db.RegisterMessage(Item)
+
+CompoundValue = _reflection.GeneratedProtocolMessageType('CompoundValue', (_message.Message,), {
+  'DESCRIPTOR' : _COMPOUNDVALUE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.CompoundValue)
+  })
+_sym_db.RegisterMessage(CompoundValue)
+
+Boolean = _reflection.GeneratedProtocolMessageType('Boolean', (_message.Message,), {
+  'DESCRIPTOR' : _BOOLEAN,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Boolean)
+  })
+_sym_db.RegisterMessage(Boolean)
+
+Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), {
+  'DESCRIPTOR' : _REFERENCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Reference)
+  })
+_sym_db.RegisterMessage(Reference)
+
+Id = _reflection.GeneratedProtocolMessageType('Id', (_message.Message,), {
+  'DESCRIPTOR' : _ID,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Id)
+  })
+_sym_db.RegisterMessage(Id)
+
+String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), {
+  'DESCRIPTOR' : _STRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.String)
+  })
+_sym_db.RegisterMessage(String)
+
+RawString = _reflection.GeneratedProtocolMessageType('RawString', (_message.Message,), {
+  'DESCRIPTOR' : _RAWSTRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.RawString)
+  })
+_sym_db.RegisterMessage(RawString)
+
+StyledString = _reflection.GeneratedProtocolMessageType('StyledString', (_message.Message,), {
+
+  'Span' : _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), {
+    'DESCRIPTOR' : _STYLEDSTRING_SPAN,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.StyledString.Span)
+    })
+  ,
+  'DESCRIPTOR' : _STYLEDSTRING,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.StyledString)
+  })
+_sym_db.RegisterMessage(StyledString)
+_sym_db.RegisterMessage(StyledString.Span)
+
+FileReference = _reflection.GeneratedProtocolMessageType('FileReference', (_message.Message,), {
+  'DESCRIPTOR' : _FILEREFERENCE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.FileReference)
+  })
+_sym_db.RegisterMessage(FileReference)
+
+Primitive = _reflection.GeneratedProtocolMessageType('Primitive', (_message.Message,), {
+
+  'NullType' : _reflection.GeneratedProtocolMessageType('NullType', (_message.Message,), {
+    'DESCRIPTOR' : _PRIMITIVE_NULLTYPE,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.NullType)
+    })
+  ,
+
+  'EmptyType' : _reflection.GeneratedProtocolMessageType('EmptyType', (_message.Message,), {
+    'DESCRIPTOR' : _PRIMITIVE_EMPTYTYPE,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.EmptyType)
+    })
+  ,
+  'DESCRIPTOR' : _PRIMITIVE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Primitive)
+  })
+_sym_db.RegisterMessage(Primitive)
+_sym_db.RegisterMessage(Primitive.NullType)
+_sym_db.RegisterMessage(Primitive.EmptyType)
+
+Attribute = _reflection.GeneratedProtocolMessageType('Attribute', (_message.Message,), {
+
+  'Symbol' : _reflection.GeneratedProtocolMessageType('Symbol', (_message.Message,), {
+    'DESCRIPTOR' : _ATTRIBUTE_SYMBOL,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Attribute.Symbol)
+    })
+  ,
+  'DESCRIPTOR' : _ATTRIBUTE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Attribute)
+  })
+_sym_db.RegisterMessage(Attribute)
+_sym_db.RegisterMessage(Attribute.Symbol)
+
+Style = _reflection.GeneratedProtocolMessageType('Style', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _STYLE_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Style.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _STYLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Style)
+  })
+_sym_db.RegisterMessage(Style)
+_sym_db.RegisterMessage(Style.Entry)
+
+Styleable = _reflection.GeneratedProtocolMessageType('Styleable', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _STYLEABLE_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Styleable.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _STYLEABLE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Styleable)
+  })
+_sym_db.RegisterMessage(Styleable)
+_sym_db.RegisterMessage(Styleable.Entry)
+
+Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), {
+
+  'Element' : _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), {
+    'DESCRIPTOR' : _ARRAY_ELEMENT,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Array.Element)
+    })
+  ,
+  'DESCRIPTOR' : _ARRAY,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Array)
+  })
+_sym_db.RegisterMessage(Array)
+_sym_db.RegisterMessage(Array.Element)
+
+Plural = _reflection.GeneratedProtocolMessageType('Plural', (_message.Message,), {
+
+  'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+    'DESCRIPTOR' : _PLURAL_ENTRY,
+    '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+    # @@protoc_insertion_point(class_scope:aapt.pb.Plural.Entry)
+    })
+  ,
+  'DESCRIPTOR' : _PLURAL,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.Plural)
+  })
+_sym_db.RegisterMessage(Plural)
+_sym_db.RegisterMessage(Plural.Entry)
+
+XmlNode = _reflection.GeneratedProtocolMessageType('XmlNode', (_message.Message,), {
+  'DESCRIPTOR' : _XMLNODE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlNode)
+  })
+_sym_db.RegisterMessage(XmlNode)
+
+XmlElement = _reflection.GeneratedProtocolMessageType('XmlElement', (_message.Message,), {
+  'DESCRIPTOR' : _XMLELEMENT,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlElement)
+  })
+_sym_db.RegisterMessage(XmlElement)
+
+XmlNamespace = _reflection.GeneratedProtocolMessageType('XmlNamespace', (_message.Message,), {
+  'DESCRIPTOR' : _XMLNAMESPACE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlNamespace)
+  })
+_sym_db.RegisterMessage(XmlNamespace)
+
+XmlAttribute = _reflection.GeneratedProtocolMessageType('XmlAttribute', (_message.Message,), {
+  'DESCRIPTOR' : _XMLATTRIBUTE,
+  '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+  # @@protoc_insertion_point(class_scope:aapt.pb.XmlAttribute)
+  })
+_sym_db.RegisterMessage(XmlAttribute)
+
+
+DESCRIPTOR._options = None
+_PRIMITIVE.fields_by_name['dimension_value_deprecated']._options = None
+_PRIMITIVE.fields_by_name['fraction_value_deprecated']._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/gyp/proto/__init__.py b/src/build/android/gyp/proto/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/gyp/proto/__init__.py
diff --git a/src/build/android/gyp/resources_shrinker/BUILD.gn b/src/build/android/gyp/resources_shrinker/BUILD.gn
new file mode 100644
index 0000000..e6381e1
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/BUILD.gn
@@ -0,0 +1,15 @@
+import("//build/config/android/rules.gni")
+
+java_binary("resources_shrinker") {
+  sources = [ "//build/android/gyp/resources_shrinker/Shrinker.java" ]
+  main_class = "build.android.gyp.resources_shrinker.Shrinker"
+  deps = [
+    "//third_party/android_deps:com_android_tools_common_java",
+    "//third_party/android_deps:com_android_tools_layoutlib_layoutlib_api_java",
+    "//third_party/android_deps:com_android_tools_sdk_common_java",
+    "//third_party/android_deps:com_google_guava_guava_java",
+    "//third_party/android_deps:org_jetbrains_kotlin_kotlin_stdlib_java",
+    "//third_party/r8:r8_java",
+  ]
+  wrapper_script_name = "helper/resources_shrinker"
+}
diff --git a/src/build/android/gyp/resources_shrinker/Shrinker.java b/src/build/android/gyp/resources_shrinker/Shrinker.java
new file mode 100644
index 0000000..50e2f93
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/Shrinker.java
@@ -0,0 +1,599 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Modifications are owned by the Chromium Authors.
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package build.android.gyp.resources_shrinker;
+
+import static com.android.ide.common.symbols.SymbolIo.readFromAapt;
+import static com.android.utils.SdkUtils.endsWithIgnoreCase;
+import static com.google.common.base.Charsets.UTF_8;
+
+import com.android.ide.common.resources.usage.ResourceUsageModel;
+import com.android.ide.common.resources.usage.ResourceUsageModel.Resource;
+import com.android.ide.common.symbols.Symbol;
+import com.android.ide.common.symbols.SymbolTable;
+import com.android.resources.ResourceFolderType;
+import com.android.resources.ResourceType;
+import com.android.tools.r8.CompilationFailedException;
+import com.android.tools.r8.ProgramResource;
+import com.android.tools.r8.ProgramResourceProvider;
+import com.android.tools.r8.ResourceShrinker;
+import com.android.tools.r8.ResourceShrinker.Command;
+import com.android.tools.r8.ResourceShrinker.ReferenceChecker;
+import com.android.tools.r8.origin.PathOrigin;
+import com.android.utils.XmlUtils;
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Maps;
+import com.google.common.io.ByteStreams;
+import com.google.common.io.Closeables;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.xml.sax.SAXException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import javax.xml.parsers.ParserConfigurationException;
+
+/**
+  Copied with modifications from gradle core source
+  https://android.googlesource.com/platform/tools/base/+/master/build-system/gradle-core/src/main/groovy/com/android/build/gradle/tasks/ResourceUsageAnalyzer.java
+
+  Modifications are mostly to:
+    - Remove unused code paths to reduce complexity.
+    - Reduce dependencies unless absolutely required.
+*/
+
+public class Shrinker {
+    private static final String ANDROID_RES = "android_res/";
+    private static final String DOT_DEX = ".dex";
+    private static final String DOT_CLASS = ".class";
+    private static final String DOT_XML = ".xml";
+    private static final String DOT_JAR = ".jar";
+    private static final String FN_RESOURCE_TEXT = "R.txt";
+
+    /* A source of resource classes to track, can be either a folder or a jar */
+    private final Iterable<File> mRTxtFiles;
+    private final File mProguardMapping;
+    /** These can be class or dex files. */
+    private final Iterable<File> mClasses;
+    private final Iterable<File> mManifests;
+    private final Iterable<File> mResourceDirs;
+
+    private final File mReportFile;
+    private final StringWriter mDebugOutput;
+    private final PrintWriter mDebugPrinter;
+
+    /** Easy way to invoke more verbose output for debugging */
+    private boolean mDebug = false;
+
+    /** The computed set of unused resources */
+    private List<Resource> mUnused;
+
+    /**
+     * Map from resource class owners (VM format class) to corresponding resource entries.
+     * This lets us map back from code references (obfuscated class and possibly obfuscated field
+     * reference) back to the corresponding resource type and name.
+     */
+    private Map<String, Pair<ResourceType, Map<String, String>>> mResourceObfuscation =
+            Maps.newHashMapWithExpectedSize(30);
+
+    /** Obfuscated name of android/support/v7/widget/SuggestionsAdapter.java */
+    private String mSuggestionsAdapter;
+
+    /** Obfuscated name of android/support/v7/internal/widget/ResourcesWrapper.java */
+    private String mResourcesWrapper;
+
+    /* A Pair class because java does not come with batteries included. */
+    private static class Pair<U, V> {
+        private U mFirst;
+        private V mSecond;
+
+        Pair(U first, V second) {
+            this.mFirst = first;
+            this.mSecond = second;
+        }
+
+        public U getFirst() {
+            return mFirst;
+        }
+
+        public V getSecond() {
+            return mSecond;
+        }
+    }
+
+    public Shrinker(Iterable<File> rTxtFiles, Iterable<File> classes, Iterable<File> manifests,
+            File mapping, Iterable<File> resources, File reportFile) {
+        mRTxtFiles = rTxtFiles;
+        mProguardMapping = mapping;
+        mClasses = classes;
+        mManifests = manifests;
+        mResourceDirs = resources;
+
+        mReportFile = reportFile;
+        if (reportFile != null) {
+            mDebugOutput = new StringWriter(8 * 1024);
+            mDebugPrinter = new PrintWriter(mDebugOutput);
+        } else {
+            mDebugOutput = null;
+            mDebugPrinter = null;
+        }
+    }
+
+    public void close() {
+        if (mDebugOutput != null) {
+            String output = mDebugOutput.toString();
+
+            if (mReportFile != null) {
+                File dir = mReportFile.getParentFile();
+                if (dir != null) {
+                    if ((dir.exists() || dir.mkdir()) && dir.canWrite()) {
+                        try {
+                            Files.asCharSink(mReportFile, Charsets.UTF_8).write(output);
+                        } catch (IOException ignore) {
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    public void analyze() throws IOException, ParserConfigurationException, SAXException {
+        gatherResourceValues(mRTxtFiles);
+        recordMapping(mProguardMapping);
+
+        for (File jarOrDir : mClasses) {
+            recordClassUsages(jarOrDir);
+        }
+        recordManifestUsages(mManifests);
+        recordResources(mResourceDirs);
+        dumpReferences();
+        mModel.processToolsAttributes();
+        mUnused = mModel.findUnused();
+    }
+
+    public void emitConfig(Path destination) throws IOException {
+        File destinationFile = destination.toFile();
+        if (!destinationFile.exists()) {
+            destinationFile.getParentFile().mkdirs();
+            boolean success = destinationFile.createNewFile();
+            if (!success) {
+                throw new IOException("Could not create " + destination);
+            }
+        }
+        StringBuilder sb = new StringBuilder();
+        Collections.sort(mUnused);
+        for (Resource resource : mUnused) {
+            sb.append(resource.type + "/" + resource.name + "#remove\n");
+        }
+        Files.asCharSink(destinationFile, UTF_8).write(sb.toString());
+    }
+
+    private void dumpReferences() {
+        if (mDebugPrinter != null) {
+            mDebugPrinter.print(mModel.dumpReferences());
+        }
+    }
+
+    private void recordResources(Iterable<File> resources)
+            throws IOException, SAXException, ParserConfigurationException {
+        for (File resDir : resources) {
+            File[] resourceFolders = resDir.listFiles();
+            if (resourceFolders != null) {
+                for (File folder : resourceFolders) {
+                    ResourceFolderType folderType =
+                            ResourceFolderType.getFolderType(folder.getName());
+                    if (folderType != null) {
+                        recordResources(folderType, folder);
+                    }
+                }
+            }
+        }
+    }
+
+    private void recordResources(ResourceFolderType folderType, File folder)
+            throws ParserConfigurationException, SAXException, IOException {
+        File[] files = folder.listFiles();
+        if (files != null) {
+            for (File file : files) {
+                String path = file.getPath();
+                mModel.file = file;
+                try {
+                    boolean isXml = endsWithIgnoreCase(path, DOT_XML);
+                    if (isXml) {
+                        String xml = Files.toString(file, UTF_8);
+                        Document document = XmlUtils.parseDocument(xml, true);
+                        mModel.visitXmlDocument(file, folderType, document);
+                    } else {
+                        mModel.visitBinaryResource(folderType, file);
+                    }
+                } finally {
+                    mModel.file = null;
+                }
+            }
+        }
+    }
+
+    void recordMapping(File mapping) throws IOException {
+        if (mapping == null || !mapping.exists()) {
+            return;
+        }
+        final String arrowString = " -> ";
+        final String resourceString = ".R$";
+        Map<String, String> nameMap = null;
+        for (String line : Files.readLines(mapping, UTF_8)) {
+            if (line.startsWith(" ") || line.startsWith("\t")) {
+                if (nameMap != null) {
+                    // We're processing the members of a resource class: record names into the map
+                    int n = line.length();
+                    int i = 0;
+                    for (; i < n; i++) {
+                        if (!Character.isWhitespace(line.charAt(i))) {
+                            break;
+                        }
+                    }
+                    if (i < n && line.startsWith("int", i)) { // int or int[]
+                        int start = line.indexOf(' ', i + 3) + 1;
+                        int arrow = line.indexOf(arrowString);
+                        if (start > 0 && arrow != -1) {
+                            int end = line.indexOf(' ', start + 1);
+                            if (end != -1) {
+                                String oldName = line.substring(start, end);
+                                String newName =
+                                        line.substring(arrow + arrowString.length()).trim();
+                                if (!newName.equals(oldName)) {
+                                    nameMap.put(newName, oldName);
+                                }
+                            }
+                        }
+                    }
+                }
+                continue;
+            } else {
+                nameMap = null;
+            }
+            int index = line.indexOf(resourceString);
+            if (index == -1) {
+                // Record obfuscated names of a few known appcompat usages of
+                // Resources#getIdentifier that are unlikely to be used for general
+                // resource name reflection
+                if (line.startsWith("android.support.v7.widget.SuggestionsAdapter ")) {
+                    mSuggestionsAdapter =
+                            line.substring(line.indexOf(arrowString) + arrowString.length(),
+                                        line.indexOf(':') != -1 ? line.indexOf(':') : line.length())
+                                    .trim()
+                                    .replace('.', '/')
+                            + DOT_CLASS;
+                } else if (line.startsWith("android.support.v7.internal.widget.ResourcesWrapper ")
+                        || line.startsWith("android.support.v7.widget.ResourcesWrapper ")
+                        || (mResourcesWrapper == null // Recently wrapper moved
+                                && line.startsWith(
+                                        "android.support.v7.widget.TintContextWrapper$TintResources "))) {
+                    mResourcesWrapper =
+                            line.substring(line.indexOf(arrowString) + arrowString.length(),
+                                        line.indexOf(':') != -1 ? line.indexOf(':') : line.length())
+                                    .trim()
+                                    .replace('.', '/')
+                            + DOT_CLASS;
+                }
+                continue;
+            }
+            int arrow = line.indexOf(arrowString, index + 3);
+            if (arrow == -1) {
+                continue;
+            }
+            String typeName = line.substring(index + resourceString.length(), arrow);
+            ResourceType type = ResourceType.fromClassName(typeName);
+            if (type == null) {
+                continue;
+            }
+            int end = line.indexOf(':', arrow + arrowString.length());
+            if (end == -1) {
+                end = line.length();
+            }
+            String target = line.substring(arrow + arrowString.length(), end).trim();
+            String ownerName = target.replace('.', '/');
+
+            nameMap = Maps.newHashMap();
+            Pair<ResourceType, Map<String, String>> pair = new Pair(type, nameMap);
+            mResourceObfuscation.put(ownerName, pair);
+            // For fast lookup in isResourceClass
+            mResourceObfuscation.put(ownerName + DOT_CLASS, pair);
+        }
+    }
+
+    private void recordManifestUsages(File manifest)
+            throws IOException, ParserConfigurationException, SAXException {
+        String xml = Files.toString(manifest, UTF_8);
+        Document document = XmlUtils.parseDocument(xml, true);
+        mModel.visitXmlDocument(manifest, null, document);
+    }
+
+    private void recordManifestUsages(Iterable<File> manifests)
+            throws IOException, ParserConfigurationException, SAXException {
+        for (File manifest : manifests) {
+            recordManifestUsages(manifest);
+        }
+    }
+
+    private void recordClassUsages(File file) throws IOException {
+        assert file.isFile();
+        if (file.getPath().endsWith(DOT_DEX)) {
+            byte[] bytes = Files.toByteArray(file);
+            recordClassUsages(file, file.getName(), bytes);
+        } else if (file.getPath().endsWith(DOT_JAR)) {
+            ZipInputStream zis = null;
+            try {
+                FileInputStream fis = new FileInputStream(file);
+                try {
+                    zis = new ZipInputStream(fis);
+                    ZipEntry entry = zis.getNextEntry();
+                    while (entry != null) {
+                        String name = entry.getName();
+                        if (name.endsWith(DOT_DEX)) {
+                            byte[] bytes = ByteStreams.toByteArray(zis);
+                            if (bytes != null) {
+                                recordClassUsages(file, name, bytes);
+                            }
+                        }
+
+                        entry = zis.getNextEntry();
+                    }
+                } finally {
+                    Closeables.close(fis, true);
+                }
+            } finally {
+                Closeables.close(zis, true);
+            }
+        }
+    }
+
+    private void recordClassUsages(File file, String name, byte[] bytes) {
+        assert name.endsWith(DOT_DEX);
+        ReferenceChecker callback = new ReferenceChecker() {
+            @Override
+            public boolean shouldProcess(String internalName) {
+                return !isResourceClass(internalName + DOT_CLASS);
+            }
+
+            @Override
+            public void referencedInt(int value) {
+                Shrinker.this.referencedInt("dex", value, file, name);
+            }
+
+            @Override
+            public void referencedString(String value) {
+                // do nothing.
+            }
+
+            @Override
+            public void referencedStaticField(String internalName, String fieldName) {
+                Resource resource = getResourceFromCode(internalName, fieldName);
+                if (resource != null) {
+                    ResourceUsageModel.markReachable(resource);
+                }
+            }
+
+            @Override
+            public void referencedMethod(
+                    String internalName, String methodName, String methodDescriptor) {
+                // Do nothing.
+            }
+        };
+        ProgramResource resource = ProgramResource.fromBytes(
+                new PathOrigin(file.toPath()), ProgramResource.Kind.DEX, bytes, null);
+        ProgramResourceProvider provider = () -> Arrays.asList(resource);
+        try {
+            Command command =
+                    (new ResourceShrinker.Builder()).addProgramResourceProvider(provider).build();
+            ResourceShrinker.run(command, callback);
+        } catch (CompilationFailedException e) {
+            e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
+        } catch (ExecutionException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /** Returns whether the given class file name points to an aapt-generated compiled R class. */
+    boolean isResourceClass(String name) {
+        if (mResourceObfuscation.containsKey(name)) {
+            return true;
+        }
+        int index = name.lastIndexOf('/');
+        if (index != -1 && name.startsWith("R$", index + 1) && name.endsWith(DOT_CLASS)) {
+            String typeName = name.substring(index + 3, name.length() - DOT_CLASS.length());
+            return ResourceType.fromClassName(typeName) != null;
+        }
+        return false;
+    }
+
+    Resource getResourceFromCode(String owner, String name) {
+        Pair<ResourceType, Map<String, String>> pair = mResourceObfuscation.get(owner);
+        if (pair != null) {
+            ResourceType type = pair.getFirst();
+            Map<String, String> nameMap = pair.getSecond();
+            String renamedField = nameMap.get(name);
+            if (renamedField != null) {
+                name = renamedField;
+            }
+            return mModel.getResource(type, name);
+        }
+        if (isValidResourceType(owner)) {
+            ResourceType type =
+                    ResourceType.fromClassName(owner.substring(owner.lastIndexOf('$') + 1));
+            if (type != null) {
+                return mModel.getResource(type, name);
+            }
+        }
+        return null;
+    }
+
+    private Boolean isValidResourceType(String candidateString) {
+        return candidateString.contains("/")
+                && candidateString.substring(candidateString.lastIndexOf('/') + 1).contains("$");
+    }
+
+    private void gatherResourceValues(Iterable<File> rTxts) throws IOException {
+        for (File rTxt : rTxts) {
+            assert rTxt.isFile();
+            assert rTxt.getName().endsWith(FN_RESOURCE_TEXT);
+            addResourcesFromRTxtFile(rTxt);
+        }
+    }
+
+    private void addResourcesFromRTxtFile(File file) {
+        try {
+            SymbolTable st = readFromAapt(file, null);
+            for (Symbol symbol : st.getSymbols().values()) {
+                String symbolValue = symbol.getValue();
+                if (symbol.getResourceType() == ResourceType.STYLEABLE) {
+                    if (symbolValue.trim().startsWith("{")) {
+                        // Only add the styleable parent, styleable children are not yet supported.
+                        mModel.addResource(symbol.getResourceType(), symbol.getName(), null);
+                    }
+                } else {
+                    mModel.addResource(symbol.getResourceType(), symbol.getName(), symbolValue);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    ResourceUsageModel getModel() {
+        return mModel;
+    }
+
+    private void referencedInt(String context, int value, File file, String currentClass) {
+        Resource resource = mModel.getResource(value);
+        if (ResourceUsageModel.markReachable(resource) && mDebug) {
+            assert mDebugPrinter != null : "mDebug is true, but mDebugPrinter is null.";
+            mDebugPrinter.println("Marking " + resource + " reachable: referenced from " + context
+                    + " in " + file + ":" + currentClass);
+        }
+    }
+
+    private final ResourceShrinkerUsageModel mModel = new ResourceShrinkerUsageModel();
+
+    private class ResourceShrinkerUsageModel extends ResourceUsageModel {
+        public File file;
+
+        /**
+         * Whether we should ignore tools attribute resource references.
+         * <p>
+         * For example, for resource shrinking we want to ignore tools attributes,
+         * whereas for resource refactoring on the source code we do not.
+         *
+         * @return whether tools attributes should be ignored
+         */
+        @Override
+        protected boolean ignoreToolsAttributes() {
+            return true;
+        }
+
+        @Override
+        protected void onRootResourcesFound(List<Resource> roots) {
+            if (mDebugPrinter != null) {
+                mDebugPrinter.println(
+                        "\nThe root reachable resources are:\n" + Joiner.on(",\n   ").join(roots));
+            }
+        }
+
+        @Override
+        protected Resource declareResource(ResourceType type, String name, Node node) {
+            Resource resource = super.declareResource(type, name, node);
+            resource.addLocation(file);
+            return resource;
+        }
+
+        @Override
+        protected void referencedString(String string) {
+            // Do nothing
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        List<File> rTxtFiles = null; // R.txt files
+        List<File> classes = null; // Dex/jar w dex
+        List<File> manifests = null; // manifests
+        File mapping = null; // mapping
+        List<File> resources = null; // resources dirs
+        File log = null; // output log for debugging
+        Path configPath = null; // output config
+        for (int i = 0; i < args.length; i += 2) {
+            switch (args[i]) {
+                case "--rtxts":
+                    rTxtFiles = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--dex":
+                    classes = Arrays.stream(args[i + 1].split(":"))
+                                      .map(s -> new File(s))
+                                      .collect(Collectors.toList());
+                    break;
+                case "--manifests":
+                    manifests = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--mapping":
+                    mapping = new File(args[i + 1]);
+                    break;
+                case "--resourceDirs":
+                    resources = Arrays.stream(args[i + 1].split(":"))
+                                        .map(s -> new File(s))
+                                        .collect(Collectors.toList());
+                    break;
+                case "--log":
+                    log = new File(args[i + 1]);
+                    break;
+                case "--outputConfig":
+                    configPath = Paths.get(args[i + 1]);
+                    break;
+                default:
+                    throw new IllegalArgumentException(args[i] + " is not a valid arg.");
+            }
+        }
+        Shrinker shrinker = new Shrinker(rTxtFiles, classes, manifests, mapping, resources, log);
+        shrinker.analyze();
+        shrinker.close();
+        shrinker.emitConfig(configPath);
+    }
+}
diff --git a/src/build/android/gyp/resources_shrinker/shrinker.py b/src/build/android/gyp/resources_shrinker/shrinker.py
new file mode 100755
index 0000000..2800ce2
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/shrinker.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+from util import resource_utils
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--script',
+                      required=True,
+                      help='Path to the unused resources detector script.')
+  parser.add_argument(
+      '--dependencies-res-zips',
+      required=True,
+      help='Resources zip archives to investigate for unused resources.')
+  parser.add_argument('--dex',
+                      required=True,
+                      help='Path to dex file, or zip with dex files.')
+  parser.add_argument(
+      '--proguard-mapping',
+      required=True,
+      help='Path to proguard mapping file for the optimized dex.')
+  parser.add_argument('--r-text', required=True, help='Path to R.txt')
+  parser.add_argument('--android-manifest',
+                      required=True,
+                      help='Path to AndroidManifest')
+  parser.add_argument('--output-config',
+                      required=True,
+                      help='Path to output the aapt2 config to.')
+  args = build_utils.ExpandFileArgs(args)
+  options = parser.parse_args(args)
+  options.dependencies_res_zips = (build_utils.ParseGnList(
+      options.dependencies_res_zips))
+
+  # in case of no resources, short circuit early.
+  if not options.dependencies_res_zips:
+    build_utils.Touch(options.output_config)
+    return
+
+  with build_utils.TempDir() as temp_dir:
+    dep_subdirs = []
+    for dependency_res_zip in options.dependencies_res_zips:
+      dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir)
+
+    build_utils.CheckOutput([
+        options.script, '--rtxts', options.r_text, '--manifests',
+        options.android_manifest, '--resourceDirs', ':'.join(dep_subdirs),
+        '--dex', options.dex, '--mapping', options.proguard_mapping,
+        '--outputConfig', options.output_config
+    ])
+
+  if options.depfile:
+    depfile_deps = options.dependencies_res_zips + [
+        options.r_text,
+        options.android_manifest,
+        options.dex,
+        options.proguard_mapping,
+    ]
+    build_utils.WriteDepfile(options.depfile, options.output_config,
+                             depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/resources_shrinker/shrinker.pydeps b/src/build/android/gyp/resources_shrinker/shrinker.pydeps
new file mode 100644
index 0000000..92c8905
--- /dev/null
+++ b/src/build/android/gyp/resources_shrinker/shrinker.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp/resources_shrinker --output build/android/gyp/resources_shrinker/shrinker.pydeps build/android/gyp/resources_shrinker/shrinker.py
+../../../../third_party/jinja2/__init__.py
+../../../../third_party/jinja2/_compat.py
+../../../../third_party/jinja2/asyncfilters.py
+../../../../third_party/jinja2/asyncsupport.py
+../../../../third_party/jinja2/bccache.py
+../../../../third_party/jinja2/compiler.py
+../../../../third_party/jinja2/defaults.py
+../../../../third_party/jinja2/environment.py
+../../../../third_party/jinja2/exceptions.py
+../../../../third_party/jinja2/filters.py
+../../../../third_party/jinja2/idtracking.py
+../../../../third_party/jinja2/lexer.py
+../../../../third_party/jinja2/loaders.py
+../../../../third_party/jinja2/nodes.py
+../../../../third_party/jinja2/optimizer.py
+../../../../third_party/jinja2/parser.py
+../../../../third_party/jinja2/runtime.py
+../../../../third_party/jinja2/tests.py
+../../../../third_party/jinja2/utils.py
+../../../../third_party/jinja2/visitor.py
+../../../../third_party/markupsafe/__init__.py
+../../../../third_party/markupsafe/_compat.py
+../../../../third_party/markupsafe/_native.py
+../../../gn_helpers.py
+../util/__init__.py
+../util/build_utils.py
+../util/resource_utils.py
+shrinker.py
diff --git a/src/build/android/gyp/test/BUILD.gn b/src/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000..301a220
--- /dev/null
+++ b/src/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,11 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+  sources = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+  deps = [ ":hello_world_java" ]
+  sources = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+  main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000..10860d8
--- /dev/null
+++ b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+    public static void main(String[] args) {
+        if (args.length > 0) {
+            System.exit(Integer.parseInt(args[0]));
+        }
+        HelloWorldPrinter.print();
+    }
+}
+
diff --git a/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000..b09673e
--- /dev/null
+++ b/src/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+    public static void print() {
+        System.out.println("Hello, world!");
+    }
+}
+
diff --git a/src/build/android/gyp/turbine.py b/src/build/android/gyp/turbine.py
new file mode 100755
index 0000000..208cc76
--- /dev/null
+++ b/src/build/android/gyp/turbine.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wraps the turbine jar and expands @FileArgs."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import time
+
+from util import build_utils
+
+
+def main(argv):
+  build_utils.InitLogging('TURBINE_DEBUG')
+  argv = build_utils.ExpandFileArgs(argv[1:])
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument(
+      '--turbine-jar-path', required=True, help='Path to the turbine jar file.')
+  parser.add_argument(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_argument(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_argument(
+      '--java-version',
+      help='Java language version to use in -source and -target args to javac.')
+  parser.add_argument('--classpath', action='append', help='Classpath to use.')
+  parser.add_argument(
+      '--processors',
+      action='append',
+      help='GN list of annotation processor main classes.')
+  parser.add_argument(
+      '--processorpath',
+      action='append',
+      help='GN list of jars that comprise the classpath used for Annotation '
+      'Processors.')
+  parser.add_argument(
+      '--processor-args',
+      action='append',
+      help='key=value arguments for the annotation processors.')
+  parser.add_argument('--jar-path', help='Jar output path.', required=True)
+  parser.add_argument(
+      '--generated-jar-path',
+      required=True,
+      help='Output path for generated source files.')
+  parser.add_argument('--warnings-as-errors',
+                      action='store_true',
+                      help='Treat all warnings as errors.')
+  options, unknown_args = parser.parse_known_args(argv)
+
+  options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+  options.classpath = build_utils.ParseGnList(options.classpath)
+  options.processorpath = build_utils.ParseGnList(options.processorpath)
+  options.processors = build_utils.ParseGnList(options.processors)
+  options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+
+  files = []
+  for arg in unknown_args:
+    # Interpret a path prefixed with @ as a file containing a list of sources.
+    if arg.startswith('@'):
+      files.extend(build_utils.ReadSourcesList(arg[1:]))
+
+  cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+      '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main'
+  ]
+  javac_cmd = []
+
+  # Turbine reads lists from command line args by consuming args until one
+  # starts with double dash (--). Thus command line args should be grouped
+  # together and passed in together.
+  if options.processors:
+    cmd += ['--processors']
+    cmd += options.processors
+
+  if options.java_version:
+    javac_cmd.extend([
+        '-source',
+        options.java_version,
+        '-target',
+        options.java_version,
+    ])
+  if options.java_version == '1.8':
+    # Android's boot jar doesn't contain all java 8 classes.
+    options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+  if options.bootclasspath:
+    cmd += ['--bootclasspath']
+    for bootclasspath in options.bootclasspath:
+      cmd += bootclasspath.split(':')
+
+  if options.processorpath:
+    cmd += ['--processorpath']
+    cmd += options.processorpath
+
+  if options.processor_args:
+    for arg in options.processor_args:
+      javac_cmd.extend(['-A%s' % arg])
+
+  if options.classpath:
+    cmd += ['--classpath']
+    cmd += options.classpath
+
+  if options.java_srcjars:
+    cmd += ['--source_jars']
+    cmd += options.java_srcjars
+
+  if files:
+    # Use jar_path to ensure paths are relative (needed for goma).
+    files_rsp_path = options.jar_path + '.files_list.txt'
+    with open(files_rsp_path, 'w') as f:
+      f.write(' '.join(files))
+    # Pass source paths as response files to avoid extremely long command lines
+    # that are tedius to debug.
+    cmd += ['--sources']
+    cmd += ['@' + files_rsp_path]
+
+  if javac_cmd:
+    cmd.append('--javacopts')
+    cmd += javac_cmd
+    cmd.append('--')  # Terminate javacopts
+
+  # Use AtomicOutput so that output timestamps are not updated when outputs
+  # are not changed.
+  with build_utils.AtomicOutput(options.jar_path) as output_jar, \
+      build_utils.AtomicOutput(options.generated_jar_path) as generated_jar:
+    cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name]
+    logging.debug('Command: %s', cmd)
+    start = time.time()
+    build_utils.CheckOutput(cmd,
+                            print_stdout=True,
+                            fail_on_output=options.warnings_as_errors)
+    end = time.time() - start
+    logging.info('Header compilation took %ss', end)
+
+  if options.depfile:
+    # GN already knows of the java files, so avoid listing individual java files
+    # in the depfile.
+    depfile_deps = (options.bootclasspath + options.classpath +
+                    options.processorpath + options.java_srcjars)
+    build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/gyp/turbine.pydeps b/src/build/android/gyp/turbine.pydeps
new file mode 100644
index 0000000..f0b2411
--- /dev/null
+++ b/src/build/android/gyp/turbine.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py
+../../gn_helpers.py
+turbine.py
+util/__init__.py
+util/build_utils.py
diff --git a/src/build/android/gyp/util/__init__.py b/src/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/gyp/util/build_utils.py b/src/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..d1d3a72
--- /dev/null
+++ b/src/build/android/gyp/util/build_utils.py
@@ -0,0 +1,724 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import atexit
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import logging
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import time
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+                             os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Use relative paths to improved hermetic property of build scripts.
+DIR_SOURCE_ROOT = os.path.relpath(
+    os.environ.get(
+        'CHECKOUT_SOURCE_ROOT',
+        os.path.join(
+            os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+            os.pardir)))
+JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
+JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
+RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras',
+                           'java_8', 'jre', 'lib', 'rt.jar')
+
+try:
+  string_types = basestring
+except NameError:
+  string_types = (str, bytes)
+
+
+def JavaCmd(verify=True, xmx='1G'):
+  ret = [os.path.join(JAVA_HOME, 'bin', 'java')]
+  # Limit heap to avoid Java not GC'ing when it should, and causing
+  # bots to OOM when many java commands are runnig at the same time
+  # https://crbug.com/1098333
+  ret += ['-Xmx' + xmx]
+
+  # Disable bytecode verification for local builds gives a ~2% speed-up.
+  if not verify:
+    ret += ['-noverify']
+
+  return ret
+
+
+@contextlib.contextmanager
+def TempDir(**kwargs):
+  dirname = tempfile.mkdtemp(**kwargs)
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter='*'):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def ParseGnList(value):
+  """Converts a "GN-list" command-line parameter into a list.
+
+  Conversions handled:
+    * None -> []
+    * '' -> []
+    * 'asdf' -> ['asdf']
+    * '["a", "b"]' -> ['a', 'b']
+    * ['["a", "b"]', 'c'] -> ['a', 'b', 'c']  (flattened list)
+
+  The common use for this behavior is in the Android build where things can
+  take lists of @FileArg references that are expanded via ExpandFileArgs.
+  """
+  # Convert None to [].
+  if not value:
+    return []
+  # Convert a list of GN lists to a flattened list.
+  if isinstance(value, list):
+    ret = []
+    for arg in value:
+      ret.extend(ParseGnList(arg))
+    return ret
+  # Convert normal GN list.
+  if value.startswith('['):
+    return gn_helpers.GNValueParser(value).ParseList()
+  # Convert a single string value to a list.
+  return [value]
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True, mode='w+b'):
+  """Helper to prevent half-written outputs.
+
+  Args:
+    path: Path to the final output file, which will be written atomically.
+    only_if_changed: If True (the default), do not touch the filesystem
+      if the content has not changed.
+    mode: The mode to open the file in (str).
+  Returns:
+    A python context manager that yelds a NamedTemporaryFile instance
+    that must be used by clients to write the data to. On exit, the
+    manager will try to replace the final output file with the
+    temporary one if necessary. The temporary file is always destroyed
+    on exit.
+  Example:
+    with build_utils.AtomicOutput(output_path) as tmp_file:
+      subprocess.check_call(['prog', '--output', tmp_file.name])
+  """
+  # Create in same directory to ensure same filesystem when moving.
+  dirname = os.path.dirname(path)
+  if not os.path.exists(dirname):
+    MakeDirectory(dirname)
+  with tempfile.NamedTemporaryFile(
+      mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f:
+    try:
+      yield f
+
+      # file should be closed before comparison/move.
+      f.close()
+      if not (only_if_changed and os.path.exists(path) and
+              filecmp.cmp(f.name, path)):
+        shutil.move(f.name, path)
+    finally:
+      if os.path.exists(f.name):
+        os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+  """Output filter from build_utils.CheckOutput.
+
+  Args:
+    output: Executable output as from build_utils.CheckOutput.
+    filter_string: An RE string that will filter (remove) matching
+        lines from |output|.
+
+  Returns:
+    The filtered output, as a single string.
+  """
+  re_filter = re.compile(filter_string)
+  return '\n'.join(
+      line for line in output.split('\n') if not re_filter.search(line))
+
+
+def FilterReflectiveAccessJavaWarnings(output):
+  """Filters out warnings about illegal reflective access operation.
+
+  These warnings were introduced in Java 9, and generally mean that dependencies
+  need to be updated.
+  """
+  #  WARNING: An illegal reflective access operation has occurred
+  #  WARNING: Illegal reflective access by ...
+  #  WARNING: Please consider reporting this to the maintainers of ...
+  #  WARNING: Use --illegal-access=warn to enable warnings of further ...
+  #  WARNING: All illegal access operations will be denied in a future release
+  return FilterLines(
+      output, r'WARNING: ('
+      'An illegal reflective|'
+      'Illegal reflective access|'
+      'Please consider reporting this to|'
+      'Use --illegal-access=warn|'
+      'All illegal access operations)')
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args,
+                cwd=None,
+                env=None,
+                print_stdout=False,
+                print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_on_output=True,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+  stdout, stderr = child.communicate()
+
+  # For Python3 only:
+  if isinstance(stdout, bytes) and sys.version_info >= (3, ):
+    stdout = stdout.decode('utf-8')
+    stderr = stderr.decode('utf-8')
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func and fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  has_stdout = print_stdout and stdout
+  has_stderr = print_stderr and stderr
+  if fail_on_output and (has_stdout or has_stderr):
+    MSG = """\
+Command failed because it wrote to {}.
+You can often set treat_warnings_as_errors=false to not treat output as \
+failure (useful when developing locally)."""
+    if has_stdout and has_stderr:
+      stream_string = 'stdout and stderr'
+    elif has_stdout:
+      stream_string = 'stdout'
+    else:
+      stream_string = 'stderr'
+    raise CalledProcessError(cwd, args, MSG.format(stream_string))
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def _CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+  zi = zip_file.getinfo(name)
+
+  # The two high-order bytes of ZipInfo.external_attr represent
+  # UNIX permissions and file type bits.
+  return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+               predicate=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  if not zipfile.is_zipfile(zip_path):
+    raise Exception('Invalid zip file: %s' % zip_path)
+
+  extracted = []
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        MakeDirectory(os.path.join(path, name))
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      if predicate and not predicate(name):
+        continue
+      _CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+      if _IsSymlink(z, name):
+        dest = os.path.join(path, name)
+        MakeDirectory(os.path.dirname(dest))
+        os.symlink(z.read(name), dest)
+        extracted.append(dest)
+      else:
+        z.extract(name, path)
+        extracted.append(os.path.join(path, name))
+
+  return extracted
+
+
+def HermeticDateTime(timestamp=None):
+  """Returns a constant ZipInfo.date_time tuple.
+
+  Args:
+    timestamp: Unix timestamp to use for files in the archive.
+
+  Returns:
+    A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp.
+  """
+  if not timestamp:
+    return (2001, 1, 1, 0, 0, 0)
+  utc_time = time.gmtime(timestamp)
+  return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
+          utc_time.tm_min, utc_time.tm_sec)
+
+
+def HermeticZipInfo(*args, **kwargs):
+  """Creates a zipfile.ZipInfo with a constant timestamp and external_attr.
+
+  If a date_time value is not provided in the positional or keyword arguments,
+  the default value from HermeticDateTime is used.
+
+  Args:
+    See zipfile.ZipInfo.
+
+  Returns:
+    A zipfile.ZipInfo.
+  """
+  # The caller may have provided a date_time either as a positional parameter
+  # (args[1]) or as a keyword parameter. Use the default hermetic date_time if
+  # none was provided.
+  date_time = None
+  if len(args) >= 2:
+    date_time = args[1]
+  elif 'date_time' in kwargs:
+    date_time = kwargs['date_time']
+  if not date_time:
+    kwargs['date_time'] = HermeticDateTime()
+  ret = zipfile.ZipInfo(*args, **kwargs)
+  ret.external_attr = (0o644 << 16)
+  return ret
+
+
+def AddToZipHermetic(zip_file,
+                     zip_path,
+                     src_path=None,
+                     data=None,
+                     compress=None,
+                     date_time=None):
+  """Adds a file to the given ZipFile with a hard-coded modified time.
+
+  Args:
+    zip_file: ZipFile instance to add the file to.
+    zip_path: Destination path within the zip file (or ZipInfo instance).
+    src_path: Path of the source file. Mutually exclusive with |data|.
+    data: File data as a string.
+    compress: Whether to enable compression. Default is taken from ZipFile
+        constructor.
+    date_time: The last modification date and time for the archive member.
+  """
+  assert (src_path is None) != (data is None), (
+      '|src_path| and |data| are mutually exclusive.')
+  if isinstance(zip_path, zipfile.ZipInfo):
+    zipinfo = zip_path
+    zip_path = zipinfo.filename
+  else:
+    zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time)
+
+  _CheckZipPath(zip_path)
+
+  if src_path and os.path.islink(src_path):
+    zipinfo.filename = zip_path
+    zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
+    zip_file.writestr(zipinfo, os.readlink(src_path))
+    return
+
+  # zipfile.write() does
+  #     external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+  # but we want to use _HERMETIC_FILE_ATTR, so manually set
+  # the few attr bits we care about.
+  if src_path:
+    st = os.stat(src_path)
+    for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+      if st.st_mode & mode:
+        zipinfo.external_attr |= mode << 16
+
+  if src_path:
+    with open(src_path, 'rb') as f:
+      data = f.read()
+
+  # zipfile will deflate even when it makes the file bigger. To avoid
+  # growing files, disable compression at an arbitrary cut off point.
+  if len(data) < 16:
+    compress = False
+
+  # None converts to ZIP_STORED, when passed explicitly rather than the
+  # default passed to the ZipFile constructor.
+  compress_type = zip_file.compression
+  if compress is not None:
+    compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+  zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs,
+          output,
+          base_dir=None,
+          compress_fn=None,
+          zip_prefix_path=None,
+          timestamp=None):
+  """Creates a zip file from a list of files.
+
+  Args:
+    inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+    output: Path, fileobj, or ZipFile instance to add files to.
+    base_dir: Prefix to strip from inputs.
+    compress_fn: Applied to each input to determine whether or not to compress.
+        By default, items will be |zipfile.ZIP_STORED|.
+    zip_prefix_path: Path prepended to file path in zip file.
+    timestamp: Unix timestamp to use for files in the archive.
+  """
+  if base_dir is None:
+    base_dir = '.'
+  input_tuples = []
+  for tup in inputs:
+    if isinstance(tup, string_types):
+      tup = (os.path.relpath(tup, base_dir), tup)
+      if tup[0].startswith('..'):
+        raise Exception('Invalid zip_path: ' + tup[0])
+    input_tuples.append(tup)
+
+  # Sort by zip path to ensure stable zip ordering.
+  input_tuples.sort(key=lambda tup: tup[0])
+
+  out_zip = output
+  if not isinstance(output, zipfile.ZipFile):
+    out_zip = zipfile.ZipFile(output, 'w')
+
+  date_time = HermeticDateTime(timestamp)
+  try:
+    for zip_path, fs_path in input_tuples:
+      if zip_prefix_path:
+        zip_path = os.path.join(zip_prefix_path, zip_path)
+      compress = compress_fn(zip_path) if compress_fn else None
+      AddToZipHermetic(out_zip,
+                       zip_path,
+                       src_path=fs_path,
+                       compress=compress,
+                       date_time=date_time)
+  finally:
+    if output is not out_zip:
+      out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+  """Creates a zip file from a directory."""
+  inputs = []
+  for root, _, files in os.walk(base_dir):
+    for f in files:
+      inputs.append(os.path.join(root, f))
+
+  if isinstance(output, zipfile.ZipFile):
+    DoZip(
+        inputs,
+        output,
+        base_dir,
+        compress_fn=compress_fn,
+        zip_prefix_path=zip_prefix_path)
+  else:
+    with AtomicOutput(output) as f:
+      DoZip(
+          inputs,
+          f,
+          base_dir,
+          compress_fn=compress_fn,
+          zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+  """Returns whether the given path matches any of the given glob patterns."""
+  return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+  """Combines all files from |input_zips| into |output|.
+
+  Args:
+    output: Path, fileobj, or ZipFile instance to add files to.
+    input_zips: Iterable of paths to zip files to merge.
+    path_transform: Called for each entry path. Returns a new path, or None to
+        skip the file.
+    compress: Overrides compression setting from origin zip entries.
+  """
+  path_transform = path_transform or (lambda p: p)
+  added_names = set()
+
+  out_zip = output
+  if not isinstance(output, zipfile.ZipFile):
+    out_zip = zipfile.ZipFile(output, 'w')
+
+  try:
+    for in_file in input_zips:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        for info in in_zip.infolist():
+          # Ignore directories.
+          if info.filename[-1] == '/':
+            continue
+          dst_name = path_transform(info.filename)
+          if not dst_name:
+            continue
+          already_added = dst_name in added_names
+          if not already_added:
+            if compress is not None:
+              compress_entry = compress
+            else:
+              compress_entry = info.compress_type != zipfile.ZIP_STORED
+            AddToZipHermetic(
+                out_zip,
+                dst_name,
+                data=in_zip.read(info),
+                compress=compress_entry)
+            added_names.add(dst_name)
+  finally:
+    if output is not out_zip:
+      out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph (crashes if cycles exist).
+
+  Args:
+    top: A list of the top level nodes
+    deps_func: A function that takes a node and returns a list of its direct
+        dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  # Find all deps depth-first, maintaining original order in the case of ties.
+  deps_map = collections.OrderedDict()
+  def discover(nodes):
+    for node in nodes:
+      if node in deps_map:
+        continue
+      deps = deps_func(node)
+      discover(deps)
+      deps_map[node] = deps
+
+  discover(top)
+  return list(deps_map)
+
+
+def InitLogging(enabling_env):
+  logging.basicConfig(
+      level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
+      format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s')
+  script_name = os.path.basename(sys.argv[0])
+  logging.info('Started (%s)', script_name)
+
+  my_pid = os.getpid()
+
+  def log_exit():
+    # Do not log for fork'ed processes.
+    if os.getpid() == my_pid:
+      logging.info("Job's done (%s)", script_name)
+
+  atexit.register(log_exit)
+
+
+def AddDepfileOption(parser):
+  # TODO(agrieve): Get rid of this once we've moved to argparse.
+  if hasattr(parser, 'add_option'):
+    func = parser.add_option
+  else:
+    func = parser.add_argument
+  func('--depfile',
+       help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
+  assert depfile_path != first_gn_output  # http://crbug.com/646165
+  assert not isinstance(inputs, string_types)  # Easy mistake to make
+  inputs = inputs or []
+  MakeDirectory(os.path.dirname(depfile_path))
+  # Ninja does not support multiple outputs in depfiles.
+  with open(depfile_path, 'w') as depfile:
+    depfile.write(first_gn_output.replace(' ', '\\ '))
+    depfile.write(': ')
+    depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]'
+  suffix the (intermediate) value will be interpreted as a single item list and
+  the single item will be returned or used for further traversal.
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    def get_key(key):
+      if key.endswith('[]'):
+        return key[:-2], True
+      return key, False
+
+    lookup_path = match.group(1).split(':')
+    file_path, _ = get_key(lookup_path[0])
+    if not file_path in file_jsons:
+      with open(file_path) as f:
+        file_jsons[file_path] = json.load(f)
+
+    expansion = file_jsons
+    for k in lookup_path:
+      k, flatten = get_key(k)
+      expansion = expansion[k]
+      if flatten:
+        if not isinstance(expansion, list) or not len(expansion) == 1:
+          raise Exception('Expected single item list but got %s' % expansion)
+        expansion = expansion[0]
+
+    # This should match ParseGnList. The output is either a GN-formatted list
+    # or a literal (with no quotes).
+    if isinstance(expansion, list):
+      new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+                     arg[match.end():])
+    else:
+      new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+  return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+  """Reads a GN-written file containing list of file names and returns a list.
+
+  Note that this function should not be used to parse response files.
+  """
+  with open(sources_list_file_name) as f:
+    return [file_name.strip() for file_name in f]
diff --git a/src/build/android/gyp/util/build_utils_test.py b/src/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000..008ea11
--- /dev/null
+++ b/src/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+  def testGetSortedTransitiveDependencies_all(self):
+    TOP = _DEPS.keys()
+    EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+  def testGetSortedTransitiveDependencies_leaves(self):
+    TOP = ['c', 'e', 'g', 'h', 'i']
+    EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+  def testGetSortedTransitiveDependencies_leavesReverse(self):
+    TOP = ['i', 'h', 'g', 'e', 'c']
+    EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+    actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+    self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/diff_utils.py b/src/build/android/gyp/util/diff_utils.py
new file mode 100644
index 0000000..530a688
--- /dev/null
+++ b/src/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,127 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import difflib
+from util import build_utils
+
+
+def _SkipOmitted(line):
+  """
+  Skip lines that are to be intentionally omitted from the expectations file.
+
+  This is required when the file to be compared against expectations contains
+  a line that changes from build to build because - for instance - it contains
+  version information.
+  """
+  if line.rstrip().endswith('# OMIT FROM EXPECTATIONS'):
+    return '# THIS LINE WAS OMITTED\n'
+  return line
+
+
+def _GenerateDiffWithOnlyAdditons(expected_path, actual_data):
+  """Generate a diff that only contains additions"""
+  # Ignore blank lines when creating the diff to cut down on whitespace-only
+  # lines in the diff. Also remove trailing whitespaces and add the new lines
+  # manually (ndiff expects new lines but we don't care about trailing
+  # whitespace).
+  with open(expected_path) as expected:
+    expected_lines = [l for l in expected.readlines() if l.strip()]
+  actual_lines = [
+      '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip()
+  ]
+
+  diff = difflib.ndiff(expected_lines, actual_lines)
+  filtered_diff = (l for l in diff if l.startswith('+'))
+  return ''.join(filtered_diff)
+
+
+def _DiffFileContents(expected_path, actual_data):
+  """Check file contents for equality and return the diff or None."""
+  # Remove all trailing whitespace and add it explicitly in the end.
+  with open(expected_path) as f_expected:
+    expected_lines = [l.rstrip() for l in f_expected.readlines()]
+  actual_lines = [
+      _SkipOmitted(line).rstrip() for line in actual_data.splitlines()
+  ]
+
+  if expected_lines == actual_lines:
+    return None
+
+  expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+
+  diff = difflib.unified_diff(
+      expected_lines,
+      actual_lines,
+      fromfile=os.path.join('before', expected_path),
+      tofile=os.path.join('after', expected_path),
+      n=0,
+      lineterm='',
+  )
+
+  return '\n'.join(diff)
+
+
+def AddCommandLineFlags(parser):
+  group = parser.add_argument_group('Expectations')
+  group.add_argument(
+      '--expected-file',
+      help='Expected contents for the check. If --expected-file-base  is set, '
+      'this is a diff of --actual-file and --expected-file-base.')
+  group.add_argument(
+      '--expected-file-base',
+      help='File to diff against before comparing to --expected-file.')
+  group.add_argument('--actual-file',
+                     help='Path to write actual file (for reference).')
+  group.add_argument('--failure-file',
+                     help='Write to this file if expectations fail.')
+  group.add_argument('--fail-on-expectations',
+                     action="store_true",
+                     help='Fail on expectation mismatches.')
+  group.add_argument('--only-verify-expectations',
+                     action='store_true',
+                     help='Verify the expectation and exit.')
+
+
+def CheckExpectations(actual_data, options, custom_msg=''):
+  if options.actual_file:
+    with build_utils.AtomicOutput(options.actual_file) as f:
+      f.write(actual_data.encode('utf8'))
+  if options.expected_file_base:
+    actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
+                                                actual_data)
+  diff_text = _DiffFileContents(options.expected_file, actual_data)
+
+  if not diff_text:
+    fail_msg = ''
+  else:
+    fail_msg = """
+Expectations need updating:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expectations/README.md
+
+LogDog tip: Use "Raw log" or "Switch to lite mode" before copying:
+https://bugs.chromium.org/p/chromium/issues/detail?id=984616
+
+{}
+
+To update expectations, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(custom_msg, diff_text)
+
+    sys.stderr.write(fail_msg)
+
+  if fail_msg and options.fail_on_expectations:
+    # Don't write failure file when failing on expectations or else the target
+    # will not be re-run on subsequent ninja invocations.
+    sys.exit(1)
+
+  if options.failure_file:
+    with open(options.failure_file, 'w') as f:
+      f.write(fail_msg)
diff --git a/src/build/android/gyp/util/jar_info_utils.py b/src/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000..9759455
--- /dev/null
+++ b/src/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,59 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ReadAarSourceInfo(info_path):
+  """Returns the source= path from an .aar's source.info file."""
+  # The .info looks like: "source=path/to/.aar\n".
+  with open(info_path) as f:
+    return f.read().rstrip().split('=', 1)[1]
+
+
+def ParseJarInfoFile(info_path):
+  """Parse a given .jar.info file as a dictionary.
+
+  Args:
+    info_path: input .jar.info file path.
+  Returns:
+    A new dictionary mapping fully-qualified Java class names to file paths.
+  """
+  info_data = dict()
+  if os.path.exists(info_path):
+    with open(info_path, 'r') as info_file:
+      for line in info_file:
+        line = line.strip()
+        if line:
+          fully_qualified_name, path = line.split(',', 1)
+          info_data[fully_qualified_name] = path
+  return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+  """Generate a .jar.info file from a given dictionary.
+
+  Args:
+    output_obj: output file object.
+    info_data: a mapping of fully qualified Java class names to filepaths.
+    source_file_map: an optional mapping from java source file paths to the
+      corresponding source .srcjar. This is because info_data may contain the
+      path of Java source files that where extracted from an .srcjar into a
+      temporary location.
+  """
+  for fully_qualified_name, path in sorted(info_data.items()):
+    if source_file_map and path in source_file_map:
+      path = source_file_map[path]
+      assert not path.startswith('/tmp'), (
+          'Java file path should not be in temp dir: {}'.format(path))
+    output_obj.write(('{},{}\n'.format(fully_qualified_name,
+                                       path)).encode('utf8'))
diff --git a/src/build/android/gyp/util/java_cpp_utils.py b/src/build/android/gyp/util/java_cpp_utils.py
new file mode 100644
index 0000000..5180400
--- /dev/null
+++ b/src/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,194 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+  return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def GetJavaFilePath(java_package, class_name):
+  package_path = java_package.replace('.', os.path.sep)
+  file_name = class_name + '.java'
+  return os.path.join(package_path, file_name)
+
+
+def KCamelToShouty(s):
+  """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+  kFooBar -> FOO_BAR
+  FooBar -> FOO_BAR
+  FooBAR9 -> FOO_BAR9
+  FooBARBaz -> FOO_BAR_BAZ
+  """
+  if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+    return s
+  # Strip the leading k.
+  s = re.sub(r'^k', '', s)
+  # Treat "WebView" like one word.
+  s = re.sub(r'WebView', r'Webview', s)
+  # Add _ between title words and anything else.
+  s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+  # Add _ between lower -> upper transitions.
+  s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+  return s.upper()
+
+
+class JavaString(object):
+  def __init__(self, name, value, comments):
+    self.name = KCamelToShouty(name)
+    self.value = value
+    self.comments = '\n'.join('    ' + x for x in comments)
+
+  def Format(self):
+    return '%s\n    public static final String %s = %s;' % (
+        self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+  package_re = re.compile(r'^package (.*);')
+  class_re = re.compile(r'.*class (.*) {')
+  package = ''
+  class_name = ''
+  for line in lines:
+    package_line = package_re.match(line)
+    if package_line:
+      package = package_line.groups()[0]
+    class_line = class_re.match(line)
+    if class_line:
+      class_name = class_line.groups()[0]
+      break
+  return package, class_name
+
+
+# TODO(crbug.com/937282): Work will be needed if we want to annotate specific
+# constants in the file to be parsed.
+class CppConstantParser(object):
+  """Parses C++ constants, retaining their comments.
+
+  The Delegate subclass is responsible for matching and extracting the
+  constant's variable name and value, as well as generating an object to
+  represent the Java representation of this value.
+  """
+  SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+
+  class Delegate(object):
+    def ExtractConstantName(self, line):
+      """Extracts a constant's name from line or None if not a match."""
+      raise NotImplementedError()
+
+    def ExtractValue(self, line):
+      """Extracts a constant's value from line or None if not a match."""
+      raise NotImplementedError()
+
+    def CreateJavaConstant(self, name, value, comments):
+      """Creates an object representing the Java analog of a C++ constant.
+
+      CppConstantParser will not interact with the object created by this
+      method. Instead, it will store this value in a list and return a list of
+      all objects from the Parse() method. In this way, the caller may define
+      whatever class suits their need.
+
+      Args:
+        name: the constant's variable name, as extracted by
+          ExtractConstantName()
+        value: the constant's value, as extracted by ExtractValue()
+        comments: the code comments describing this constant
+      """
+      raise NotImplementedError()
+
+  def __init__(self, delegate, lines):
+    self._delegate = delegate
+    self._lines = lines
+    self._in_variable = False
+    self._in_comment = False
+    self._package = ''
+    self._current_comments = []
+    self._current_name = ''
+    self._current_value = ''
+    self._constants = []
+
+  def _ExtractVariable(self, line):
+    match = StringFileParser.STRING_RE.match(line)
+    return match.group(1) if match else None
+
+  def _ExtractValue(self, line):
+    match = StringFileParser.VALUE_RE.search(line)
+    return match.group(1) if match else None
+
+  def _Reset(self):
+    self._current_comments = []
+    self._current_name = ''
+    self._current_value = ''
+    self._in_variable = False
+    self._in_comment = False
+
+  def _AppendConstant(self):
+    self._constants.append(
+        self._delegate.CreateJavaConstant(self._current_name,
+                                          self._current_value,
+                                          self._current_comments))
+    self._Reset()
+
+  def _ParseValue(self, line):
+    current_value = self._delegate.ExtractValue(line)
+    if current_value is not None:
+      self._current_value = current_value
+      self._AppendConstant()
+    else:
+      self._Reset()
+
+  def _ParseComment(self, line):
+    comment_line = CppConstantParser.SINGLE_LINE_COMMENT_RE.match(line)
+    if comment_line:
+      self._current_comments.append(comment_line.groups()[0])
+      self._in_comment = True
+      self._in_variable = True
+      return True
+    else:
+      self._in_comment = False
+      return False
+
+  def _ParseVariable(self, line):
+    current_name = self._delegate.ExtractConstantName(line)
+    if current_name is not None:
+      self._current_name = current_name
+      current_value = self._delegate.ExtractValue(line)
+      if current_value is not None:
+        self._current_value = current_value
+        self._AppendConstant()
+      else:
+        self._in_variable = True
+      return True
+    else:
+      self._in_variable = False
+      return False
+
+  def _ParseLine(self, line):
+    if not self._in_variable:
+      if not self._ParseVariable(line):
+        self._ParseComment(line)
+      return
+
+    if self._in_comment:
+      if self._ParseComment(line):
+        return
+      if not self._ParseVariable(line):
+        self._Reset()
+      return
+
+    if self._in_variable:
+      self._ParseValue(line)
+
+  def Parse(self):
+    """Returns a list of objects representing C++ constants.
+
+    Each object in the list was created by Delegate.CreateJavaValue().
+    """
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._constants
diff --git a/src/build/android/gyp/util/manifest_utils.py b/src/build/android/gyp/util/manifest_utils.py
new file mode 100644
index 0000000..a517708
--- /dev/null
+++ b/src/build/android/gyp/util/manifest_utils.py
@@ -0,0 +1,321 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for working with Android manifests."""
+
+import hashlib
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+from xml.etree import ElementTree
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution'
+EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml'))
+# When normalizing for expectation matching, wrap these tags when they are long
+# or else they become very hard to read.
+_WRAP_CANDIDATES = (
+    '<manifest',
+    '<application',
+    '<activity',
+    '<provider',
+    '<receiver',
+    '<service',
+)
+# Don't wrap lines shorter than this.
+_WRAP_LINE_LENGTH = 100
+
+_xml_namespace_initialized = False
+
+
+def _RegisterElementTreeNamespaces():
+  global _xml_namespace_initialized
+  if _xml_namespace_initialized:
+    return
+  _xml_namespace_initialized = True
+  ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+  ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+  ElementTree.register_namespace('dist', DIST_NAMESPACE)
+
+
+def ParseManifest(path):
+  """Parses an AndroidManifest.xml using ElementTree.
+
+  Registers required namespaces, creates application node if missing, adds any
+  missing namespaces for 'android', 'tools' and 'dist'.
+
+  Returns tuple of:
+    doc: Root xml document.
+    manifest_node: the <manifest> node.
+    app_node: the <application> node.
+  """
+  _RegisterElementTreeNamespaces()
+  doc = ElementTree.parse(path)
+  # ElementTree.find does not work if the required tag is the root.
+  if doc.getroot().tag == 'manifest':
+    manifest_node = doc.getroot()
+  else:
+    manifest_node = doc.find('manifest')
+
+  app_node = doc.find('application')
+  if app_node is None:
+    app_node = ElementTree.SubElement(manifest_node, 'application')
+
+  return doc, manifest_node, app_node
+
+
+def SaveManifest(doc, path):
+  with build_utils.AtomicOutput(path) as f:
+    f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+
+def GetPackage(manifest_node):
+  return manifest_node.get('package')
+
+
+def AssertUsesSdk(manifest_node,
+                  min_sdk_version=None,
+                  target_sdk_version=None,
+                  max_sdk_version=None,
+                  fail_if_not_exist=False):
+  """Asserts values of attributes of <uses-sdk> element.
+
+  Unless |fail_if_not_exist| is true, will only assert if both the passed value
+  is not None and the value of attribute exist. If |fail_if_not_exist| is true
+  will fail if passed value is not None but attribute does not exist.
+  """
+  uses_sdk_node = manifest_node.find('./uses-sdk')
+  if uses_sdk_node is None:
+    return
+  for prefix, sdk_version in (('min', min_sdk_version), ('target',
+                                                         target_sdk_version),
+                              ('max', max_sdk_version)):
+    value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
+    if fail_if_not_exist and not value and sdk_version:
+      assert False, (
+          '%sSdkVersion in Android manifest does not exist but we expect %s' %
+          (prefix, sdk_version))
+    if not value or not sdk_version:
+      continue
+    assert value == sdk_version, (
+        '%sSdkVersion in Android manifest is %s but we expect %s' %
+        (prefix, value, sdk_version))
+
+
+def AssertPackage(manifest_node, package):
+  """Asserts that manifest package has desired value.
+
+  Will only assert if both |package| is not None and the package is set in the
+  manifest.
+  """
+  package_value = GetPackage(manifest_node)
+  if package_value is None or package is None:
+    return
+  assert package_value == package, (
+      'Package in Android manifest is %s but we expect %s' % (package_value,
+                                                              package))
+
+
+def _SortAndStripElementTree(root):
+  # Sort alphabetically with two exceptions:
+  # 1) Put <application> node last (since it's giant).
+  # 2) Put android:name before other attributes.
+  def element_sort_key(node):
+    if node.tag == 'application':
+      return 'z'
+    ret = ElementTree.tostring(node)
+    # ElementTree.tostring inserts namespace attributes for any that are needed
+    # for the node or any of its descendants. Remove them so as to prevent a
+    # change to a child that adds/removes a namespace usage from changing sort
+    # order.
+    return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8'))
+
+  name_attr = '{%s}name' % ANDROID_NAMESPACE
+
+  def attribute_sort_key(tup):
+    return ('', '') if tup[0] == name_attr else tup
+
+  def helper(node):
+    for child in node:
+      if child.text and child.text.isspace():
+        child.text = None
+      helper(child)
+
+    # Sort attributes (requires Python 3.8+).
+    node.attrib = dict(sorted(node.attrib.items(), key=attribute_sort_key))
+
+    # Sort nodes
+    node[:] = sorted(node, key=element_sort_key)
+
+  helper(root)
+
+
+def _SplitElement(line):
+  """Parses a one-line xml node into ('<tag', ['a="b"', ...]], '/>')."""
+
+  # Shlex splits nicely, but removes quotes. Need to put them back.
+  def restore_quotes(value):
+    return value.replace('=', '="', 1) + '"'
+
+  # Simplify restore_quotes by separating />.
+  assert line.endswith('>'), line
+  end_tag = '>'
+  if line.endswith('/>'):
+    end_tag = '/>'
+  line = line[:-len(end_tag)]
+
+  # Use shlex to avoid having to re-encode &quot;, etc.
+  parts = shlex.split(line)
+  start_tag = parts[0]
+  attrs = parts[1:]
+
+  return start_tag, [restore_quotes(x) for x in attrs], end_tag
+
+
+def _CreateNodeHash(lines):
+  """Computes a hash (md5) for the first XML node found in |lines|.
+
+  Args:
+    lines: List of strings containing pretty-printed XML.
+
+  Returns:
+    Positive 32-bit integer hash of the node (including children).
+  """
+  target_indent = lines[0].find('<')
+  tag_closed = False
+  for i, l in enumerate(lines[1:]):
+    cur_indent = l.find('<')
+    if cur_indent != -1 and cur_indent <= target_indent:
+      tag_lines = lines[:i + 1]
+      break
+    elif not tag_closed and 'android:name="' in l:
+      # To reduce noise of node tags changing, use android:name as the
+      # basis the hash since they usually unique.
+      tag_lines = [l]
+      break
+    tag_closed = tag_closed or '>' in l
+  else:
+    assert False, 'Did not find end of node:\n' + '\n'.join(lines)
+
+  # Insecure and truncated hash as it only needs to be unique vs. its neighbors.
+  return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8]
+
+
+def _IsSelfClosing(lines):
+  """Given pretty-printed xml, returns whether first node is self-closing."""
+  for l in lines:
+    idx = l.find('>')
+    if idx != -1:
+      return l[idx - 1] == '/'
+  assert False, 'Did not find end of tag:\n' + '\n'.join(lines)
+
+
+def _AddDiffTags(lines):
+  # When multiple identical tags appear sequentially, XML diffs can look like:
+  # +  </tag>
+  # +  <tag>
+  # rather than:
+  # +  <tag>
+  # +  </tag>
+  # To reduce confusion, add hashes to tags.
+  # This also ensures changed tags show up with outer <tag> elements rather than
+  # showing only changed attributes.
+  hash_stack = []
+  for i, l in enumerate(lines):
+    stripped = l.lstrip()
+    # Ignore non-indented tags and lines that are not the start/end of a node.
+    if l[0] != ' ' or stripped[0] != '<':
+      continue
+    # Ignore self-closing nodes that fit on one line.
+    if l[-2:] == '/>':
+      continue
+    # Ignore <application> since diff tag changes with basically any change.
+    if stripped.lstrip('</').startswith('application'):
+      continue
+
+    # Check for the closing tag (</foo>).
+    if stripped[1] != '/':
+      cur_hash = _CreateNodeHash(lines[i:])
+      if not _IsSelfClosing(lines[i:]):
+        hash_stack.append(cur_hash)
+    else:
+      cur_hash = hash_stack.pop()
+    lines[i] += '  # DIFF-ANCHOR: {}'.format(cur_hash)
+  assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
+
+
+def NormalizeManifest(manifest_contents):
+  _RegisterElementTreeNamespaces()
+  # This also strips comments and sorts node attributes alphabetically.
+  root = ElementTree.fromstring(manifest_contents)
+  package = GetPackage(root)
+
+  app_node = root.find('application')
+  if app_node is not None:
+    # android:debuggable is added when !is_official_build. Strip it out to avoid
+    # expectation diffs caused by not adding is_official_build. Play store
+    # blocks uploading apps with it set, so there's no risk of it slipping in.
+    debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE
+    if debuggable_name in app_node.attrib:
+      del app_node.attrib[debuggable_name]
+
+    # Trichrome's static library version number is updated daily. To avoid
+    # frequent manifest check failures, we remove the exact version number
+    # during normalization.
+    for node in app_node:
+      if (node.tag in ['uses-static-library', 'static-library']
+          and '{%s}version' % ANDROID_NAMESPACE in node.keys()
+          and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
+        node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+
+  # We also remove the exact package name (except the one at the root level)
+  # to avoid noise during manifest comparison.
+  def blur_package_name(node):
+    for key in node.keys():
+      node.set(key, node.get(key).replace(package, '$PACKAGE'))
+
+    for child in node:
+      blur_package_name(child)
+
+  # We only blur the package names of non-root nodes because they generate a lot
+  # of diffs when doing manifest checks for upstream targets. We still want to
+  # have 1 piece of package name not blurred just in case the package name is
+  # mistakenly changed.
+  for child in root:
+    blur_package_name(child)
+
+  _SortAndStripElementTree(root)
+
+  # Fix up whitespace/indentation.
+  dom = minidom.parseString(ElementTree.tostring(root))
+  out_lines = []
+  for l in dom.toprettyxml(indent='  ').splitlines():
+    if not l or l.isspace():
+      continue
+    if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES):
+      indent = ' ' * l.find('<')
+      start_tag, attrs, end_tag = _SplitElement(l)
+      out_lines.append('{}{}'.format(indent, start_tag))
+      for attribute in attrs:
+        out_lines.append('{}    {}'.format(indent, attribute))
+      out_lines[-1] += '>'
+      # Heuristic: Do not allow multi-line tags to be self-closing since these
+      # can generally be allowed to have nested elements. When diffing, it adds
+      # noise if the base file is self-closing and the non-base file is not
+      # self-closing.
+      if end_tag == '/>':
+        out_lines.append('{}{}>'.format(indent, start_tag.replace('<', '</')))
+    else:
+      out_lines.append(l)
+
+  # Make output more diff-friendly.
+  _AddDiffTags(out_lines)
+
+  return '\n'.join(out_lines) + '\n'
diff --git a/src/build/android/gyp/util/manifest_utils_test.py b/src/build/android/gyp/util/manifest_utils_test.py
new file mode 100755
index 0000000..52bf458
--- /dev/null
+++ b/src/build/android/gyp/util/manifest_utils_test.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
+from util import manifest_utils
+
+_TEST_MANIFEST = """\
+<?xml version="1.0" ?>
+<manifest package="test.pkg"
+    tools:ignore="MissingVersion"
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools">
+  <!-- Should be one line. -->
+  <uses-sdk android:minSdkVersion="24"
+      android:targetSdkVersion="30"/>
+  <!-- Should have attrs sorted-->
+  <uses-feature android:required="false" android:version="1"
+    android:name="android.hardware.vr.headtracking" />
+  <!-- Should not be wrapped since < 100 chars. -->
+  <application
+      android:name="testname">
+    <activity
+        {extra_activity_attr}
+        android:icon="@drawable/ic_devices_48dp"
+        android:label="label with spaces"
+        android:name="to be hashed"
+        android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+      <intent-filter>
+        {extra_intent_filter_elem}
+        <action android:name="android.intent.action.SEND"/>
+        <category android:name="android.intent.category.DEFAULT"/>
+        <data android:mimeType="text/plain"/>
+      </intent-filter>
+    </activity>
+    <!-- Should be made non-self-closing. -->
+    <receiver android:exported="false" android:name="\
+org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"/>
+  </application>
+</manifest>
+"""
+
+_TEST_MANIFEST_NORMALIZED = """\
+<?xml version="1.0" ?>
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    package="test.pkg"
+    tools:ignore="MissingVersion">
+  <uses-feature android:name="android.hardware.vr.headtracking" \
+android:required="false" android:version="1"/>
+  <uses-sdk android:minSdkVersion="24" android:targetSdkVersion="30"/>
+  <application android:name="testname">
+    <activity  # DIFF-ANCHOR: {activity_diff_anchor}
+        android:name="to be hashed"
+        {extra_activity_attr}android:icon="@drawable/ic_devices_48dp"
+        android:label="label with spaces"
+        android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+      <intent-filter>  # DIFF-ANCHOR: {intent_filter_diff_anchor}
+        {extra_intent_filter_elem}\
+<action android:name="android.intent.action.SEND"/>
+        <category android:name="android.intent.category.DEFAULT"/>
+        <data android:mimeType="text/plain"/>
+      </intent-filter>  # DIFF-ANCHOR: {intent_filter_diff_anchor}
+    </activity>  # DIFF-ANCHOR: {activity_diff_anchor}
+    <receiver  # DIFF-ANCHOR: ddab3320
+        android:name=\
+"org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"
+        android:exported="false">
+    </receiver>  # DIFF-ANCHOR: ddab3320
+  </application>
+</manifest>
+"""
+
+_ACTIVITY_DIFF_ANCHOR = '32b3a641'
+_INTENT_FILTER_DIFF_ANCHOR = '4ee601b7'
+
+
+def _CreateTestData(intent_filter_diff_anchor=_INTENT_FILTER_DIFF_ANCHOR,
+                    extra_activity_attr='',
+                    extra_intent_filter_elem=''):
+  if extra_activity_attr:
+    extra_activity_attr += '\n        '
+  if extra_intent_filter_elem:
+    extra_intent_filter_elem += '\n        '
+  test_manifest = _TEST_MANIFEST.format(
+      extra_activity_attr=extra_activity_attr,
+      extra_intent_filter_elem=extra_intent_filter_elem)
+  expected = _TEST_MANIFEST_NORMALIZED.format(
+      activity_diff_anchor=_ACTIVITY_DIFF_ANCHOR,
+      intent_filter_diff_anchor=intent_filter_diff_anchor,
+      extra_activity_attr=extra_activity_attr,
+      extra_intent_filter_elem=extra_intent_filter_elem)
+  return test_manifest, expected
+
+
+class ManifestUtilsTest(unittest.TestCase):
+  # Enable diff output.
+  maxDiff = None
+
+  def testNormalizeManifest_golden(self):
+    test_manifest, expected = _CreateTestData()
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    self.assertMultiLineEqual(expected, actual)
+
+  def testNormalizeManifest_nameUsedForActivity(self):
+    test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"')
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    # Checks that the DIFF-ANCHOR does not change with the added attribute.
+    self.assertMultiLineEqual(expected, actual)
+
+  def testNormalizeManifest_nameNotUsedForIntentFilter(self):
+    test_manifest, expected = _CreateTestData(
+        extra_intent_filter_elem='<a/>', intent_filter_diff_anchor='5f5c8a70')
+    actual = manifest_utils.NormalizeManifest(test_manifest)
+    # Checks that the DIFF-ANCHOR does change with the added element despite
+    # having a nested element with an android:name set.
+    self.assertMultiLineEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/md5_check.py b/src/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..87ee723
--- /dev/null
+++ b/src/build/android/gyp/util/md5_check.py
@@ -0,0 +1,471 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndWriteDepfileIfStale(on_stale_md5,
+                               options,
+                               record_path=None,
+                               input_paths=None,
+                               input_strings=None,
+                               output_paths=None,
+                               force=False,
+                               pass_changes=False,
+                               track_subpaths_allowlist=None,
+                               depfile_deps=None):
+  """Wraps CallAndRecordIfStale() and writes a depfile if applicable.
+
+  Depfiles are automatically added to output_paths when present in the |options|
+  argument. They are then created after |on_stale_md5| is called.
+
+  By default, only python dependencies are added to the depfile. If there are
+  other input paths that are not captured by GN deps, then they should be listed
+  in depfile_deps. It's important to write paths to the depfile that are already
+  captured by GN deps since GN args can cause GN deps to change, and such
+  changes are not immediately reflected in depfiles (http://crbug.com/589311).
+  """
+  if not output_paths:
+    raise Exception('At least one output_path must be specified.')
+  input_paths = list(input_paths or [])
+  input_strings = list(input_strings or [])
+  output_paths = list(output_paths or [])
+
+  input_paths += print_python_deps.ComputePythonDependencies()
+
+  CallAndRecordIfStale(
+      on_stale_md5,
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths,
+      force=force,
+      pass_changes=pass_changes,
+      track_subpaths_allowlist=track_subpaths_allowlist)
+
+  # Write depfile even when inputs have not changed to ensure build correctness
+  # on bots that build with & without patch, and the patch changes the depfile
+  # location.
+  if hasattr(options, 'depfile') and options.depfile:
+    build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
+
+
+def CallAndRecordIfStale(function,
+                         record_path=None,
+                         input_paths=None,
+                         input_strings=None,
+                         output_paths=None,
+                         force=False,
+                         pass_changes=False,
+                         track_subpaths_allowlist=None):
+  """Calls function if outputs are stale.
+
+  Outputs are considered stale if:
+  - any output_paths are missing, or
+  - the contents of any file within input_paths has changed, or
+  - the contents of input_strings has changed.
+
+  To debug which files are out-of-date, set the environment variable:
+      PRINT_MD5_DIFFS=1
+
+  Args:
+    function: The function to call.
+    record_path: Path to record metadata.
+      Defaults to output_paths[0] + '.md5.stamp'
+    input_paths: List of paths to calcualte an md5 sum on.
+    input_strings: List of strings to record verbatim.
+    output_paths: List of output paths.
+    force: Whether to treat outputs as missing regardless of whether they
+      actually are.
+    pass_changes: Whether to pass a Changes instance to |function|.
+    track_subpaths_allowlist: Relevant only when pass_changes=True. List of .zip
+      files from |input_paths| to make subpath information available for.
+  """
+  assert record_path or output_paths
+  input_paths = input_paths or []
+  input_strings = input_strings or []
+  output_paths = output_paths or []
+  record_path = record_path or output_paths[0] + '.md5.stamp'
+
+  assert record_path.endswith('.stamp'), (
+      'record paths must end in \'.stamp\' so that they are easy to find '
+      'and delete')
+
+  new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+  new_metadata.AddStrings(input_strings)
+
+  zip_allowlist = set(track_subpaths_allowlist or [])
+  for path in input_paths:
+    # It's faster to md5 an entire zip file than it is to just locate & hash
+    # its central directory (which is what this used to do).
+    if path in zip_allowlist:
+      entries = _ExtractZipEntries(path)
+      new_metadata.AddZipFile(path, entries)
+    else:
+      new_metadata.AddFile(path, _ComputeTagForPath(path))
+
+  old_metadata = None
+  force = force or _FORCE_REBUILD
+  missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+  too_new = []
+  # When outputs are missing, don't bother gathering change information.
+  if not missing_outputs and os.path.exists(record_path):
+    record_mtime = os.path.getmtime(record_path)
+    # Outputs newer than the change information must have been modified outside
+    # of the build, and should be considered stale.
+    too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+    if not too_new:
+      with open(record_path, 'r') as jsonfile:
+        try:
+          old_metadata = _Metadata.FromFile(jsonfile)
+        except:  # pylint: disable=bare-except
+          pass  # Not yet using new file format.
+
+  changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
+  if not changes.HasChanges():
+    return
+
+  if PRINT_EXPLANATIONS:
+    print('=' * 80)
+    print('Target is stale: %s' % record_path)
+    print(changes.DescribeDifference())
+    print('=' * 80)
+
+  args = (changes,) if pass_changes else ()
+  function(*args)
+
+  with open(record_path, 'w') as f:
+    new_metadata.ToFile(f)
+
+
+class Changes(object):
+  """Provides and API for querying what changed between runs."""
+
+  def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+               too_new):
+    self.old_metadata = old_metadata
+    self.new_metadata = new_metadata
+    self.force = force
+    self.missing_outputs = missing_outputs
+    self.too_new = too_new
+
+  def _GetOldTag(self, path, subpath=None):
+    return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+  def HasChanges(self):
+    """Returns whether any changes exist."""
+    return (self.HasStringChanges()
+            or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+  def HasStringChanges(self):
+    """Returns whether string metadata changed."""
+    return (self.force or not self.old_metadata
+            or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
+
+  def AddedOrModifiedOnly(self):
+    """Returns whether the only changes were from added or modified (sub)files.
+
+    No missing outputs, no removed paths/subpaths.
+    """
+    if self.HasStringChanges():
+      return False
+    if any(self.IterRemovedPaths()):
+      return False
+    for path in self.IterModifiedPaths():
+      if any(self.IterRemovedSubpaths(path)):
+        return False
+    return True
+
+  def IterAllPaths(self):
+    """Generator for paths."""
+    return self.new_metadata.IterPaths();
+
+  def IterAllSubpaths(self, path):
+    """Generator for subpaths."""
+    return self.new_metadata.IterSubpaths(path);
+
+  def IterAddedPaths(self):
+    """Generator for paths that were added."""
+    for path in self.new_metadata.IterPaths():
+      if self._GetOldTag(path) is None:
+        yield path
+
+  def IterAddedSubpaths(self, path):
+    """Generator for paths that were added within the given zip file."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      if self._GetOldTag(path, subpath) is None:
+        yield subpath
+
+  def IterRemovedPaths(self):
+    """Generator for paths that were removed."""
+    if self.old_metadata:
+      for path in self.old_metadata.IterPaths():
+        if self.new_metadata.GetTag(path) is None:
+          yield path
+
+  def IterRemovedSubpaths(self, path):
+    """Generator for paths that were removed within the given zip file."""
+    if self.old_metadata:
+      for subpath in self.old_metadata.IterSubpaths(path):
+        if self.new_metadata.GetTag(path, subpath) is None:
+          yield subpath
+
+  def IterModifiedPaths(self):
+    """Generator for paths whose contents have changed."""
+    for path in self.new_metadata.IterPaths():
+      old_tag = self._GetOldTag(path)
+      new_tag = self.new_metadata.GetTag(path)
+      if old_tag is not None and old_tag != new_tag:
+        yield path
+
+  def IterModifiedSubpaths(self, path):
+    """Generator for paths within a zip file whose contents have changed."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      old_tag = self._GetOldTag(path, subpath)
+      new_tag = self.new_metadata.GetTag(path, subpath)
+      if old_tag is not None and old_tag != new_tag:
+        yield subpath
+
+  def IterChangedPaths(self):
+    """Generator for all changed paths (added/removed/modified)."""
+    return itertools.chain(self.IterRemovedPaths(),
+                           self.IterModifiedPaths(),
+                           self.IterAddedPaths())
+
+  def IterChangedSubpaths(self, path):
+    """Generator for paths within a zip that were added/removed/modified."""
+    return itertools.chain(self.IterRemovedSubpaths(path),
+                           self.IterModifiedSubpaths(path),
+                           self.IterAddedSubpaths(path))
+
+  def DescribeDifference(self):
+    """Returns a human-readable description of what changed."""
+    if self.force:
+      return 'force=True'
+    elif self.missing_outputs:
+      return 'Outputs do not exist:\n  ' + '\n  '.join(self.missing_outputs)
+    elif self.too_new:
+      return 'Outputs newer than stamp file:\n  ' + '\n  '.join(self.too_new)
+    elif self.old_metadata is None:
+      return 'Previous stamp file not found.'
+
+    if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+      ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+                            self.new_metadata.GetStrings())
+      changed = [s for s in ndiff if not s.startswith(' ')]
+      return 'Input strings changed:\n  ' + '\n  '.join(changed)
+
+    if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+      return "There's no difference."
+
+    lines = []
+    lines.extend('Added: ' + p for p in self.IterAddedPaths())
+    lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+    for path in self.IterModifiedPaths():
+      lines.append('Modified: ' + path)
+      lines.extend('  -> Subpath added: ' + p
+                   for p in self.IterAddedSubpaths(path))
+      lines.extend('  -> Subpath removed: ' + p
+                   for p in self.IterRemovedSubpaths(path))
+      lines.extend('  -> Subpath modified: ' + p
+                   for p in self.IterModifiedSubpaths(path))
+    if lines:
+      return 'Input files changed:\n  ' + '\n  '.join(lines)
+    return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+  """Data model for tracking change metadata.
+
+  Args:
+    track_entries: Enables per-file change tracking. Slower, but required for
+        Changes functionality.
+  """
+  # Schema:
+  # {
+  #   "files-md5": "VALUE",
+  #   "strings-md5": "VALUE",
+  #   "input-files": [
+  #     {
+  #       "path": "path.jar",
+  #       "tag": "{MD5 of entries}",
+  #       "entries": [
+  #         { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+  #       ]
+  #     }, {
+  #       "path": "path.txt",
+  #       "tag": "{MD5}",
+  #     }
+  #   ],
+  #   "input-strings": ["a", "b", ...],
+  # }
+  def __init__(self, track_entries=False):
+    self._track_entries = track_entries
+    self._files_md5 = None
+    self._strings_md5 = None
+    self._files = []
+    self._strings = []
+    # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+    self._file_map = None
+
+  @classmethod
+  def FromFile(cls, fileobj):
+    """Returns a _Metadata initialized from a file object."""
+    ret = cls()
+    obj = json.load(fileobj)
+    ret._files_md5 = obj['files-md5']
+    ret._strings_md5 = obj['strings-md5']
+    ret._files = obj.get('input-files', [])
+    ret._strings = obj.get('input-strings', [])
+    return ret
+
+  def ToFile(self, fileobj):
+    """Serializes metadata to the given file object."""
+    obj = {
+        'files-md5': self.FilesMd5(),
+        'strings-md5': self.StringsMd5(),
+    }
+    if self._track_entries:
+      obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+      obj['input-strings'] = self._strings
+
+    json.dump(obj, fileobj, indent=2)
+
+  def _AssertNotQueried(self):
+    assert self._files_md5 is None
+    assert self._strings_md5 is None
+    assert self._file_map is None
+
+  def AddStrings(self, values):
+    self._AssertNotQueried()
+    self._strings.extend(str(v) for v in values)
+
+  def AddFile(self, path, tag):
+    """Adds metadata for a non-zip file.
+
+    Args:
+      path: Path to the file.
+      tag: A short string representative of the file contents.
+    """
+    self._AssertNotQueried()
+    self._files.append({
+        'path': path,
+        'tag': tag,
+    })
+
+  def AddZipFile(self, path, entries):
+    """Adds metadata for a zip file.
+
+    Args:
+      path: Path to the file.
+      entries: List of (subpath, tag) tuples for entries within the zip.
+    """
+    self._AssertNotQueried()
+    tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+                                            (e[1] for e in entries)))
+    self._files.append({
+        'path': path,
+        'tag': tag,
+        'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+    })
+
+  def GetStrings(self):
+    """Returns the list of input strings."""
+    return self._strings
+
+  def FilesMd5(self):
+    """Lazily computes and returns the aggregate md5 of input files."""
+    if self._files_md5 is None:
+      # Omit paths from md5 since temporary files have random names.
+      self._files_md5 = _ComputeInlineMd5(
+          self.GetTag(p) for p in sorted(self.IterPaths()))
+    return self._files_md5
+
+  def StringsMd5(self):
+    """Lazily computes and returns the aggregate md5 of input strings."""
+    if self._strings_md5 is None:
+      self._strings_md5 = _ComputeInlineMd5(self._strings)
+    return self._strings_md5
+
+  def _GetEntry(self, path, subpath=None):
+    """Returns the JSON entry for the given path / subpath."""
+    if self._file_map is None:
+      self._file_map = {}
+      for entry in self._files:
+        self._file_map[(entry['path'], None)] = entry
+        for subentry in entry.get('entries', ()):
+          self._file_map[(entry['path'], subentry['path'])] = subentry
+    return self._file_map.get((path, subpath))
+
+  def GetTag(self, path, subpath=None):
+    """Returns the tag for the given path / subpath."""
+    ret = self._GetEntry(path, subpath)
+    return ret and ret['tag']
+
+  def IterPaths(self):
+    """Returns a generator for all top-level paths."""
+    return (e['path'] for e in self._files)
+
+  def IterSubpaths(self, path):
+    """Returns a generator for all subpaths in the given zip.
+
+    If the given path is not a zip file or doesn't exist, returns an empty
+    iterable.
+    """
+    outer_entry = self._GetEntry(path)
+    if not outer_entry:
+      return ()
+    subentries = outer_entry.get('entries', [])
+    return (entry['path'] for entry in subentries)
+
+
+def _ComputeTagForPath(path):
+  stat = os.stat(path)
+  if stat.st_size > 1 * 1024 * 1024:
+    # Fallback to mtime for large files so that md5_check does not take too long
+    # to run.
+    return stat.st_mtime
+  md5 = hashlib.md5()
+  with open(path, 'rb') as f:
+    md5.update(f.read())
+  return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+  """Computes the md5 of the concatenated parameters."""
+  md5 = hashlib.md5()
+  for item in iterable:
+    md5.update(str(item).encode('ascii'))
+  return md5.hexdigest()
+
+
+def _ExtractZipEntries(path):
+  """Returns a list of (path, CRC32) of all files within |path|."""
+  entries = []
+  with zipfile.ZipFile(path) as zip_file:
+    for zip_info in zip_file.infolist():
+      # Skip directories and empty files.
+      if zip_info.CRC:
+        entries.append(
+            (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+  return entries
diff --git a/src/build/android/gyp/util/md5_check_test.py b/src/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000..e11bbd5
--- /dev/null
+++ b/src/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+  with zipfile.ZipFile(path, 'w') as zip_file:
+    for subpath, data in entries:
+      zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+    self.changes = None
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+    input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+    file1_contents = b'input file 1'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    # Test out empty zip file to start.
+    _WriteZipFile(input_file2.name, [])
+    input_files = [input_file1.name, input_file2.name]
+    zip_paths = [input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call,
+                           message,
+                           force=False,
+                           outputs_specified=False,
+                           outputs_missing=False,
+                           expected_changes=None,
+                           added_or_modified_only=None,
+                           track_subentries=False,
+                           output_newer_than_record=False):
+      output_paths = None
+      if outputs_specified:
+        output_file1 = tempfile.NamedTemporaryFile()
+        if outputs_missing:
+          output_file1.close()  # Gets deleted on close().
+        output_paths = [output_file1.name]
+      if output_newer_than_record:
+        output_mtime = os.path.getmtime(output_file1.name)
+        os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+      else:
+        # touch the record file so it doesn't look like it's older that
+        # the output we've just created
+        os.utime(record_path.name, None)
+
+      self.called = False
+      self.changes = None
+      if expected_changes or added_or_modified_only is not None:
+        def MarkCalled(changes):
+          self.called = True
+          self.changes = changes
+      else:
+        def MarkCalled():
+          self.called = True
+
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          output_paths=output_paths,
+          force=force,
+          pass_changes=(expected_changes or added_or_modified_only) is not None,
+          track_subpaths_allowlist=zip_paths if track_subentries else None)
+      self.assertEqual(should_call, self.called, message)
+      if expected_changes:
+        description = self.changes.DescribeDifference()
+        self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+                        'Expected %s to match %s' % (
+                        repr(description), repr(expected_changes)))
+      if should_call and added_or_modified_only is not None:
+        self.assertEqual(added_or_modified_only,
+                         self.changes.AddedOrModifiedOnly())
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+                       expected_changes='Previous stamp file not found.',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    CheckCallAndRecord(False, 'should not call when nothing changed #2',
+                       outputs_specified=True, outputs_missing=False)
+    CheckCallAndRecord(True, 'should call when output missing',
+                       outputs_specified=True, outputs_missing=True,
+                       expected_changes='Outputs do not exist:*',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(True,
+                       'should call when output is newer than record',
+                       expected_changes='Outputs newer than stamp file:*',
+                       outputs_specified=True,
+                       outputs_missing=False,
+                       added_or_modified_only=False,
+                       output_newer_than_record=True)
+    CheckCallAndRecord(True, force=True, message='should call when forced',
+                       expected_changes='force=True',
+                       added_or_modified_only=False)
+
+    input_file1.write(b'some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call',
+                       expected_changes='*Modified: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call',
+                       expected_changes='*Removed: %s' % input_file1.name,
+                       added_or_modified_only=False)
+
+    input_files.append(input_file1.name)
+    CheckCallAndRecord(True, 'added input file should trigger call',
+                       expected_changes='*Added: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call',
+                       expected_changes='*Input strings changed*',
+                       added_or_modified_only=False)
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+                       expected_changes='*Input strings changed*')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(
+        True,
+        'added input string should trigger call',
+        added_or_modified_only=False)
+
+    _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+    CheckCallAndRecord(
+        True,
+        'added subpath should trigger call',
+        expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+                                                              'path/1.txt'),
+        added_or_modified_only=True,
+        track_subentries=True)
+    _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+    CheckCallAndRecord(
+        True,
+        'changed subpath should trigger call',
+        expected_changes='*Modified: %s*Subpath modified: %s' %
+        (input_file2.name, 'path/1.txt'),
+        added_or_modified_only=True,
+        track_subentries=True)
+
+    _WriteZipFile(input_file2.name, [])
+    CheckCallAndRecord(True, 'removed subpath should trigger call',
+                       expected_changes='*Modified: %s*Subpath removed: %s' % (
+                                        input_file2.name, 'path/1.txt'),
+                       added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/parallel.py b/src/build/android/gyp/util/parallel.py
new file mode 100644
index 0000000..c26875a
--- /dev/null
+++ b/src/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+  logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+  def __init__(self, value):
+    self._value = value
+
+  def get(self):
+    return self._value
+
+  def wait(self):
+    pass
+
+  def ready(self):
+    return True
+
+  def successful(self):
+    return True
+
+
+class _ExceptionWrapper(object):
+  """Used to marshal exception messages back to main process."""
+
+  def __init__(self, msg, exception_type=None):
+    self.msg = msg
+    self.exception_type = exception_type
+
+  def MaybeThrow(self):
+    if self.exception_type:
+      raise getattr(__builtins__,
+                    self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+  """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+  def __init__(self, func):
+    global _is_child_process
+    _is_child_process = True
+    self._func = func
+
+  def __call__(self, index, _=None):
+    try:
+      return self._func(*_fork_params[index], **_fork_kwargs)
+    except Exception as e:
+      # Only keep the exception type for builtin exception types or else risk
+      # further marshalling exceptions.
+      exception_type = None
+      if hasattr(__builtins__, type(e).__name__):
+        exception_type = type(e).__name__
+      # multiprocessing is supposed to catch and return exceptions automatically
+      # but it doesn't seem to work properly :(.
+      return _ExceptionWrapper(traceback.format_exc(), exception_type)
+    except:  # pylint: disable=bare-except
+      return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+  """Allows for host-side logic to be run after child process has terminated.
+
+  * Unregisters associated pool _all_pools.
+  * Raises exception caught by _FuncWrapper.
+  """
+
+  def __init__(self, result, pool=None):
+    self._result = result
+    self._pool = pool
+
+  def get(self):
+    self.wait()
+    value = self._result.get()
+    _CheckForException(value)
+    return value
+
+  def wait(self):
+    self._result.wait()
+    if self._pool:
+      _all_pools.remove(self._pool)
+      self._pool = None
+
+  def ready(self):
+    return self._result.ready()
+
+  def successful(self):
+    return self._result.successful()
+
+
+def _TerminatePools():
+  """Calls .terminate() on all active process pools.
+
+  Not supposed to be necessary according to the docs, but seems to be required
+  when child process throws an exception or Ctrl-C is hit.
+  """
+  global _silence_exceptions
+  _silence_exceptions = True
+  # Child processes cannot have pools, but atexit runs this function because
+  # it was registered before fork()ing.
+  if _is_child_process:
+    return
+
+  def close_pool(pool):
+    try:
+      pool.terminate()
+    except:  # pylint: disable=bare-except
+      pass
+
+  for i, pool in enumerate(_all_pools):
+    # Without calling terminate() on a separate thread, the call can block
+    # forever.
+    thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+                              target=close_pool,
+                              args=(pool, ))
+    thread.daemon = True
+    thread.start()
+
+
+def _CheckForException(value):
+  if isinstance(value, _ExceptionWrapper):
+    global _silence_exceptions
+    if not _silence_exceptions:
+      value.MaybeThrow()
+      _silence_exceptions = True
+      logging.error('Subprocess raised an exception:\n%s', value.msg)
+    sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+  global _all_pools
+  global _fork_params
+  global _fork_kwargs
+  assert _fork_params is None
+  assert _fork_kwargs is None
+  pool_size = min(len(job_params), multiprocessing.cpu_count())
+  _fork_params = job_params
+  _fork_kwargs = job_kwargs
+  ret = multiprocessing.Pool(pool_size)
+  _fork_params = None
+  _fork_kwargs = None
+  if _all_pools is None:
+    _all_pools = []
+    atexit.register(_TerminatePools)
+  _all_pools.append(ret)
+  return ret
+
+
+def ForkAndCall(func, args):
+  """Runs |func| in a fork'ed process.
+
+  Returns:
+    A Result object (call .get() to get the return value)
+  """
+  if DISABLE_ASYNC:
+    pool = None
+    result = _ImmediateResult(func(*args))
+  else:
+    pool = _MakeProcessPool([args])  # Omit |kwargs|.
+    result = pool.apply_async(_FuncWrapper(func), (0, ))
+    pool.close()
+  return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+  """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+  Args:
+    kwargs: Common keyword arguments to be passed to |func|.
+
+  Yields the return values in order.
+  """
+  arg_tuples = list(arg_tuples)
+  if not arg_tuples:
+    return
+
+  if DISABLE_ASYNC:
+    for args in arg_tuples:
+      yield func(*args, **kwargs)
+    return
+
+  pool = _MakeProcessPool(arg_tuples, **kwargs)
+  wrapped_func = _FuncWrapper(func)
+  try:
+    for result in pool.imap(wrapped_func, range(len(arg_tuples))):
+      _CheckForException(result)
+      yield result
+  finally:
+    pool.close()
+    pool.join()
+    _all_pools.remove(pool)
diff --git a/src/build/android/gyp/util/protoresources.py b/src/build/android/gyp/util/protoresources.py
new file mode 100644
index 0000000..272574f
--- /dev/null
+++ b/src/build/android/gyp/util/protoresources.py
@@ -0,0 +1,308 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions that modify resources in protobuf format.
+
+Format reference:
+https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto
+"""
+
+import logging
+import os
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+from util import resource_utils
+
+sys.path[1:1] = [
+    # `Resources_pb2` module imports `descriptor`, which imports `six`.
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'),
+    # Make sure the pb2 files are able to import google.protobuf
+    os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
+                 'python'),
+]
+
+from proto import Resources_pb2
+
+# First bytes in an .flat.arsc file.
+# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0)
+_FLAT_ARSC_HEADER = b'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
+
+# The package ID hardcoded for shared libraries. See
+# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value
+# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java.
+SHARED_LIBRARY_HARDCODED_ID = 36
+
+
+def _ProcessZip(zip_path, process_func):
+  """Filters a .zip file via: new_bytes = process_func(filename, data)."""
+  has_changes = False
+  zip_entries = []
+  with zipfile.ZipFile(zip_path) as src_zip:
+    for info in src_zip.infolist():
+      data = src_zip.read(info)
+      new_data = process_func(info.filename, data)
+      if new_data is not data:
+        has_changes = True
+        data = new_data
+      zip_entries.append((info, data))
+
+  # Overwrite the original zip file.
+  if has_changes:
+    with zipfile.ZipFile(zip_path, 'w') as f:
+      for info, data in zip_entries:
+        f.writestr(info, data)
+
+
+def _ProcessProtoItem(item):
+  if not item.HasField('ref'):
+    return
+
+  # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
+  # the package to SHARED_LIBRARY_HARDCODED_ID.
+  if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id
+                                                                 & 0xff000000):
+    item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID)
+    item.ref.ClearField('is_dynamic')
+
+
+def _ProcessProtoValue(value):
+  if value.HasField('item'):
+    _ProcessProtoItem(value.item)
+    return
+
+  compound_value = value.compound_value
+  if compound_value.HasField('style'):
+    for entry in compound_value.style.entry:
+      _ProcessProtoItem(entry.item)
+  elif compound_value.HasField('array'):
+    for element in compound_value.array.element:
+      _ProcessProtoItem(element.item)
+  elif compound_value.HasField('plural'):
+    for entry in compound_value.plural.entry:
+      _ProcessProtoItem(entry.item)
+
+
+def _ProcessProtoXmlNode(xml_node):
+  if not xml_node.HasField('element'):
+    return
+
+  for attribute in xml_node.element.attribute:
+    _ProcessProtoItem(attribute.compiled_item)
+
+  for child in xml_node.element.child:
+    _ProcessProtoXmlNode(child)
+
+
+def _SplitLocaleResourceType(_type, allowed_resource_names):
+  """Splits locale specific resources out of |_type| and returns them.
+
+  Any locale specific resources will be removed from |_type|, and a new
+  Resources_pb2.Type value will be returned which contains those resources.
+
+  Args:
+    _type: A Resources_pb2.Type value
+    allowed_resource_names: Names of locale resources that should be kept in the
+        main type.
+  """
+  locale_entries = []
+  for entry in _type.entry:
+    if entry.name in allowed_resource_names:
+      continue
+
+    # First collect all resources values with a locale set.
+    config_values_with_locale = []
+    for config_value in entry.config_value:
+      if config_value.config.locale:
+        config_values_with_locale.append(config_value)
+
+    if config_values_with_locale:
+      # Remove the locale resources from the original entry
+      for value in config_values_with_locale:
+        entry.config_value.remove(value)
+
+      # Add locale resources to a new Entry, and save for later.
+      locale_entry = Resources_pb2.Entry()
+      locale_entry.CopyFrom(entry)
+      del locale_entry.config_value[:]
+      locale_entry.config_value.extend(config_values_with_locale)
+      locale_entries.append(locale_entry)
+
+  if not locale_entries:
+    return None
+
+  # Copy the original type and replace the entries with |locale_entries|.
+  locale_type = Resources_pb2.Type()
+  locale_type.CopyFrom(_type)
+  del locale_type.entry[:]
+  locale_type.entry.extend(locale_entries)
+  return locale_type
+
+
+def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist):
+  translations_package = None
+  if is_bundle_module:
+    # A separate top level package will be added to the resources, which
+    # contains only locale specific resources. The package ID of the locale
+    # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes
+    # resources in locale splits to all get assigned
+    # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug
+    # in shared library bundles where each split APK gets a separate dynamic
+    # ID, and cannot be accessed by the main APK.
+    translations_package = Resources_pb2.Package()
+    translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID
+    translations_package.package_name = (table.package[0].package_name +
+                                         '_translations')
+
+    # These resources are allowed in the base resources, since they are needed
+    # by WebView.
+    allowed_resource_names = set()
+    if shared_resources_allowlist:
+      allowed_resource_names = set(
+          resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist))
+
+  for package in table.package:
+    for _type in package.type:
+      for entry in _type.entry:
+        for config_value in entry.config_value:
+          _ProcessProtoValue(config_value.value)
+
+      if translations_package is not None:
+        locale_type = _SplitLocaleResourceType(_type, allowed_resource_names)
+        if locale_type:
+          translations_package.type.add().CopyFrom(locale_type)
+
+  if translations_package is not None:
+    table.package.add().CopyFrom(translations_package)
+
+
+def HardcodeSharedLibraryDynamicAttributes(zip_path,
+                                           is_bundle_module,
+                                           shared_resources_allowlist=None):
+  """Hardcodes the package IDs of dynamic attributes and locale resources.
+
+  Hardcoding dynamic attribute package IDs is a workaround for b/147674078,
+  which affects Android versions pre-N. Hardcoding locale resource package IDs
+  is a workaround for b/155437035, which affects resources built with
+  --shared-lib on all Android versions
+
+  Args:
+    zip_path: Path to proto APK file.
+    is_bundle_module: True for bundle modules.
+    shared_resources_allowlist: Set of resource names to not extract out of the
+        main package.
+  """
+
+  def process_func(filename, data):
+    if filename == 'resources.pb':
+      table = Resources_pb2.ResourceTable()
+      table.ParseFromString(data)
+      _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist)
+      data = table.SerializeToString()
+    elif filename.endswith('.xml') and not filename.startswith('res/raw'):
+      xml_node = Resources_pb2.XmlNode()
+      xml_node.ParseFromString(data)
+      _ProcessProtoXmlNode(xml_node)
+      data = xml_node.SerializeToString()
+    return data
+
+  _ProcessZip(zip_path, process_func)
+
+
+class _ResourceStripper(object):
+  def __init__(self, partial_path, keep_predicate):
+    self.partial_path = partial_path
+    self.keep_predicate = keep_predicate
+    self._has_changes = False
+
+  @staticmethod
+  def _IterStyles(entry):
+    for config_value in entry.config_value:
+      value = config_value.value
+      if value.HasField('compound_value'):
+        compound_value = value.compound_value
+        if compound_value.HasField('style'):
+          yield compound_value.style
+
+  def _StripStyles(self, entry, type_and_name):
+    # Strip style entries that refer to attributes that have been stripped.
+    for style in self._IterStyles(entry):
+      entries = style.entry
+      new_entries = []
+      for entry in entries:
+        full_name = '{}/{}'.format(type_and_name, entry.key.name)
+        if not self.keep_predicate(full_name):
+          logging.debug('Stripped %s/%s', self.partial_path, full_name)
+        else:
+          new_entries.append(entry)
+
+      if len(new_entries) != len(entries):
+        self._has_changes = True
+        del entries[:]
+        entries.extend(new_entries)
+
+  def _StripEntries(self, entries, type_name):
+    new_entries = []
+    for entry in entries:
+      type_and_name = '{}/{}'.format(type_name, entry.name)
+      if not self.keep_predicate(type_and_name):
+        logging.debug('Stripped %s/%s', self.partial_path, type_and_name)
+      else:
+        new_entries.append(entry)
+        self._StripStyles(entry, type_and_name)
+
+    if len(new_entries) != len(entries):
+      self._has_changes = True
+      del entries[:]
+      entries.extend(new_entries)
+
+  def StripTable(self, table):
+    self._has_changes = False
+    for package in table.package:
+      for _type in package.type:
+        self._StripEntries(_type.entry, _type.name)
+    return self._has_changes
+
+
+def _TableFromFlatBytes(data):
+  # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp
+  size_idx = len(_FLAT_ARSC_HEADER)
+  proto_idx = size_idx + 8
+  if data[:size_idx] != _FLAT_ARSC_HEADER:
+    raise Exception('Error parsing {} in {}'.format(info.filename, zip_path))
+  # Size is stored as uint64.
+  size = struct.unpack('<Q', data[size_idx:proto_idx])[0]
+  table = Resources_pb2.ResourceTable()
+  proto_bytes = data[proto_idx:proto_idx + size]
+  table.ParseFromString(proto_bytes)
+  return table
+
+
+def _FlatBytesFromTable(table):
+  proto_bytes = table.SerializeToString()
+  size = struct.pack('<Q', len(proto_bytes))
+  overage = len(proto_bytes) % 4
+  padding = b'\0' * (4 - overage) if overage else b''
+  return b''.join((_FLAT_ARSC_HEADER, size, proto_bytes, padding))
+
+
+def StripUnwantedResources(partial_path, keep_predicate):
+  """Removes resources from .arsc.flat files inside of a .zip.
+
+  Args:
+    partial_path: Path to a .zip containing .arsc.flat entries
+    keep_predicate: Given "$partial_path/$res_type/$res_name", returns
+      whether to keep the resource.
+  """
+  stripper = _ResourceStripper(partial_path, keep_predicate)
+
+  def process_file(filename, data):
+    if filename.endswith('.arsc.flat'):
+      table = _TableFromFlatBytes(data)
+      if stripper.StripTable(table):
+        data = _FlatBytesFromTable(table)
+    return data
+
+  _ProcessZip(partial_path, process_file)
diff --git a/src/build/android/gyp/util/resource_utils.py b/src/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000..263b7c2
--- /dev/null
+++ b/src/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,1066 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+    'es-419': 'es-rUS',
+    'sr-Latn': 'b+sr+Latn',
+    'fil': 'tl',
+    'he': 'iw',
+    'id': 'in',
+    'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+    'tl': 'fil',
+    'iw': 'he',
+    'in': 'id',
+    'ji': 'yi',
+    'no': 'nb',  # 'no' is not a real language. http://crbug.com/920960
+}
+
+_ALL_RESOURCE_TYPES = {
+    'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable',
+    'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'menu',
+    'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable', 'transition',
+    'xml'
+}
+
+AAPT_IGNORE_PATTERN = ':'.join([
+    '*OWNERS',  # Allow OWNERS files within res/
+    'DIR_METADATA', # Allow DIR_METADATA files within res/
+    '*.py',  # PRESUBMIT.py sometimes exist.
+    '*.pyc',
+    '*~',  # Some editors create these as temp files.
+    '.*',  # Never makes sense to include dot(files/dirs).
+    '*.d.stamp',  # Ignore stamp files
+    '*.backup',  # Some tools create temporary backup files.
+])
+
+MULTIPLE_RES_MAGIC_STRING = b'magic'
+
+
+def ToAndroidLocaleName(chromium_locale):
+  """Convert a Chromium locale name into a corresponding Android one."""
+  # Should be in sync with build/config/locales.gni.
+  # First handle the special cases, these are needed to deal with Android
+  # releases *before* 5.0/Lollipop.
+  android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+  if android_locale:
+    return android_locale
+
+  # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+  # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+  # and region is a capitalized locale region name.
+  lang, _, region = chromium_locale.partition('-')
+  if not region:
+    return lang
+
+  # Translate newer language tags into obsolete ones. Only necessary if
+  #  region is not None (e.g. 'he-IL' -> 'iw-rIL')
+  lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+  # Using '<lang>-r<region>' is now acceptable as a locale name for all
+  # versions of Android.
+  return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags.
+#  e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+
+def ToChromiumLocaleName(android_locale):
+  """Convert an Android locale name into a Chromium one."""
+  lang = None
+  region = None
+  script = None
+  m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+  if m:
+    lang = m.group(1)
+    if m.group(2):
+      region = m.group(3)
+  elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale):
+    # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS)
+    tags = android_locale.split('+')
+
+    # The Lang tag is always the first tag.
+    lang = tags[1]
+
+    # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2.
+    # The optional script tag is 4ALPHA and always in pos 1.
+    optional_tags = iter(tags[2:])
+
+    next_tag = next(optional_tags, None)
+    if next_tag and len(next_tag) == 4:
+      script = next_tag
+      next_tag = next(optional_tags, None)
+    if next_tag and len(next_tag) < 4:
+      region = next_tag
+
+  if not lang:
+    return None
+
+  # Special case for es-rUS -> es-419
+  if lang == 'es' and region == 'US':
+    return 'es-419'
+
+  lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+
+  if script:
+    lang = '%s-%s' % (lang, script)
+
+  if not region:
+    return lang
+
+  return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+  """Returns true if |string| is a valid Android resource locale qualifier."""
+  return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+          or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+  """Return Android locale name of a string resource file path.
+
+  Args:
+    file_path: A file path.
+  Returns:
+    If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+    the value of <locale> (and Android locale qualifier). Otherwise return None.
+  """
+  if not file_path.endswith('.xml'):
+    return None
+  prefix = 'values-'
+  dir_name = os.path.basename(os.path.dirname(file_path))
+  if not dir_name.startswith(prefix):
+    return None
+  qualifier = dir_name[len(prefix):]
+  return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+  """Convert a list of Chromium locales into the corresponding Android list."""
+  return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+    ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _GenerateGlobs(pattern):
+  # This function processes the aapt ignore assets pattern into a list of globs
+  # to be used to exclude files using build_utils.MatchesGlob. It removes the
+  # '!', which is used by aapt to mean 'not chatty' so it does not output if the
+  # file is ignored (we dont output anyways, so it is not required). This
+  # function does not handle the <dir> and <file> prefixes used by aapt and are
+  # assumed not to be included in the pattern string.
+  return pattern.replace('!', '').split(':')
+
+
+def DeduceResourceDirsFromFileList(resource_files):
+  """Return a list of resource directories from a list of resource files."""
+  # Directory list order is important, cannot use set or other data structures
+  # that change order. This is because resource files of the same name in
+  # multiple res/ directories ellide one another (the last one passed is used).
+  # Thus the order must be maintained to prevent non-deterministic and possibly
+  # flakey builds.
+  resource_dirs = []
+  for resource_path in resource_files:
+    # Resources are always 1 directory deep under res/.
+    res_dir = os.path.dirname(os.path.dirname(resource_path))
+    if res_dir not in resource_dirs:
+      resource_dirs.append(res_dir)
+
+  # Check if any resource_dirs are children of other ones. This indicates that a
+  # file was listed that is not exactly 1 directory deep under res/.
+  # E.g.:
+  # sources = ["java/res/values/foo.xml", "java/res/README.md"]
+  # ^^ This will cause "java" to be detected as resource directory.
+  for a, b in itertools.permutations(resource_dirs, 2):
+    if not os.path.relpath(a, b).startswith('..'):
+      bad_sources = (s for s in resource_files
+                     if os.path.dirname(os.path.dirname(s)) == b)
+      msg = """\
+Resource(s) found that are not in a proper directory structure:
+  {}
+All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE"."""
+      raise Exception(msg.format('\n  '.join(bad_sources)))
+
+  return resource_dirs
+
+
+def IterResourceFilesInDirectories(directories,
+                                   ignore_pattern=AAPT_IGNORE_PATTERN):
+  globs = _GenerateGlobs(ignore_pattern)
+  for d in directories:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = f
+        parent_dir = os.path.relpath(root, d)
+        if parent_dir != '.':
+          archive_path = os.path.join(parent_dir, f)
+        path = os.path.join(root, f)
+        if build_utils.MatchesGlob(archive_path, globs):
+          continue
+        yield path, archive_path
+
+
+class ResourceInfoFile(object):
+  """Helper for building up .res.info files."""
+
+  def __init__(self):
+    # Dict of archive_path -> source_path for the current target.
+    self._entries = {}
+    # List of (old_archive_path, new_archive_path) tuples.
+    self._renames = []
+    # We don't currently support using both AddMapping and MergeInfoFile.
+    self._add_mapping_was_called = False
+
+  def AddMapping(self, archive_path, source_path):
+    """Adds a single |archive_path| -> |source_path| entry."""
+    self._add_mapping_was_called = True
+    # "values/" files do not end up in the apk except through resources.arsc.
+    if archive_path.startswith('values'):
+      return
+    source_path = os.path.normpath(source_path)
+    new_value = self._entries.setdefault(archive_path, source_path)
+    if new_value != source_path:
+      raise Exception('Duplicate AddMapping for "{}". old={} new={}'.format(
+          archive_path, new_value, source_path))
+
+  def RegisterRename(self, old_archive_path, new_archive_path):
+    """Records an archive_path rename.
+
+    |old_archive_path| does not need to currently exist in the mappings. Renames
+    are buffered and replayed only when Write() is called.
+    """
+    if not old_archive_path.startswith('values'):
+      self._renames.append((old_archive_path, new_archive_path))
+
+  def MergeInfoFile(self, info_file_path):
+    """Merges the mappings from |info_file_path| into this object.
+
+    Any existing entries are overridden.
+    """
+    assert not self._add_mapping_was_called
+    # Allows clobbering, which is used when overriding resources.
+    with open(info_file_path) as f:
+      self._entries.update(l.rstrip().split('\t') for l in f)
+
+  def _ApplyRenames(self):
+    applied_renames = set()
+    ret = self._entries
+    for rename_tup in self._renames:
+      # Duplicate entries happen for resource overrides.
+      # Use a "seen" set to ensure we still error out if multiple renames
+      # happen for the same old_archive_path with different new_archive_paths.
+      if rename_tup in applied_renames:
+        continue
+      applied_renames.add(rename_tup)
+      old_archive_path, new_archive_path = rename_tup
+      ret[new_archive_path] = ret[old_archive_path]
+      del ret[old_archive_path]
+
+    self._entries = None
+    self._renames = None
+    return ret
+
+  def Write(self, info_file_path):
+    """Applies renames and writes out the file.
+
+    No other methods may be called after this.
+    """
+    entries = self._ApplyRenames()
+    lines = []
+    for archive_path, source_path in entries.items():
+      lines.append('{}\t{}\n'.format(archive_path, source_path))
+    with open(info_file_path, 'w') as info_file:
+      info_file.writelines(sorted(lines))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+  """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+  Args:
+    path: Input file path.
+    fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+      will be fixed to 0x7f.
+  Returns:
+    A list of _TextSymbolEntry instances.
+  Raises:
+    Exception: An unexpected line was detected in the input.
+  """
+  ret = []
+  with open(path) as f:
+    for line in f:
+      m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+      if not m:
+        raise Exception('Unexpected line in R.txt: %s' % line)
+      java_type, resource_type, name, value = m.groups()
+      if fix_package_ids:
+        value = _FixPackageIds(value)
+      ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+  return ret
+
+
+def _FixPackageIds(resource_value):
+  # Resource IDs for resources belonging to regular APKs have their first byte
+  # as 0x7f (package id). However with webview, since it is not a regular apk
+  # but used as a shared library, aapt is passed the --shared-resources flag
+  # which changes some of the package ids to 0x00.  This function normalises
+  # these (0x00) package ids to 0x7f, which the generated code in R.java changes
+  # to the correct package id at runtime.  resource_value is a string with
+  # either, a single value '0x12345678', or an array of values like '{
+  # 0xfedcba98, 0x01234567, 0x56789abc }'
+  return resource_value.replace('0x00', '0x7f')
+
+
+def _GetRTxtResourceNames(r_txt_path):
+  """Parse an R.txt file and extract the set of resource names from it."""
+  return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+  """Parse an R.txt file and the list of its string resource names."""
+  return sorted({
+      entry.name
+      for entry in _ParseTextSymbolsFile(r_txt_path)
+      if entry.resource_type == 'string'
+  })
+
+
+def GenerateStringResourcesAllowList(module_r_txt_path, allowlist_r_txt_path):
+  """Generate a allowlist of string resource IDs.
+
+  Args:
+    module_r_txt_path: Input base module R.txt path.
+    allowlist_r_txt_path: Input allowlist R.txt path.
+  Returns:
+    A dictionary mapping numerical resource IDs to the corresponding
+    string resource names. The ID values are taken from string resources in
+    |module_r_txt_path| that are also listed by name in |allowlist_r_txt_path|.
+  """
+  allowlisted_names = {
+      entry.name
+      for entry in _ParseTextSymbolsFile(allowlist_r_txt_path)
+      if entry.resource_type == 'string'
+  }
+  return {
+      int(entry.value, 0): entry.name
+      for entry in _ParseTextSymbolsFile(module_r_txt_path)
+      if entry.resource_type == 'string' and entry.name in allowlisted_names
+  }
+
+
+class RJavaBuildOptions:
+  """A class used to model the various ways to build an R.java file.
+
+  This is used to control which resource ID variables will be final or
+  non-final, and whether an onResourcesLoaded() method will be generated
+  to adjust the non-final ones, when the corresponding library is loaded
+  at runtime.
+
+  Note that by default, all resources are final, and there is no
+  method generated, which corresponds to calling ExportNoResources().
+  """
+  def __init__(self):
+    self.has_constant_ids = True
+    self.resources_allowlist = None
+    self.has_on_resources_loaded = False
+    self.export_const_styleable = False
+    self.final_package_id = None
+    self.fake_on_resources_loaded = False
+
+  def ExportNoResources(self):
+    """Make all resource IDs final, and don't generate a method."""
+    self.has_constant_ids = True
+    self.resources_allowlist = None
+    self.has_on_resources_loaded = False
+    self.export_const_styleable = False
+
+  def ExportAllResources(self):
+    """Make all resource IDs non-final in the R.java file."""
+    self.has_constant_ids = False
+    self.resources_allowlist = None
+
+  def ExportSomeResources(self, r_txt_file_path):
+    """Only select specific resource IDs to be non-final.
+
+    Args:
+      r_txt_file_path: The path to an R.txt file. All resources named
+        int it will be non-final in the generated R.java file, all others
+        will be final.
+    """
+    self.has_constant_ids = True
+    self.resources_allowlist = _GetRTxtResourceNames(r_txt_file_path)
+
+  def ExportAllStyleables(self):
+    """Make all styleable constants non-final, even non-resources ones.
+
+    Resources that are styleable but not of int[] type are not actually
+    resource IDs but constants. By default they are always final. Call this
+    method to make them non-final anyway in the final R.java file.
+    """
+    self.export_const_styleable = True
+
+  def GenerateOnResourcesLoaded(self, fake=False):
+    """Generate an onResourcesLoaded() method.
+
+    This Java method will be called at runtime by the framework when
+    the corresponding library (which includes the R.java source file)
+    will be loaded at runtime. This corresponds to the --shared-resources
+    or --app-as-shared-lib flags of 'aapt package'.
+
+    if |fake|, then the method will be empty bodied to compile faster. This
+    useful for dummy R.java files that will eventually be replaced by real
+    ones.
+    """
+    self.has_on_resources_loaded = True
+    self.fake_on_resources_loaded = fake
+
+  def SetFinalPackageId(self, package_id):
+    """Sets a package ID to be used for resources marked final."""
+    self.final_package_id = package_id
+
+  def _MaybeRewriteRTxtPackageIds(self, r_txt_path):
+    """Rewrites package IDs in the R.txt file if necessary.
+
+    If SetFinalPackageId() was called, some of the resource IDs may have had
+    their package ID changed. This function rewrites the R.txt file to match
+    those changes.
+    """
+    if self.final_package_id is None:
+      return
+
+    entries = _ParseTextSymbolsFile(r_txt_path)
+    with open(r_txt_path, 'w') as f:
+      for entry in entries:
+        value = entry.value
+        if self._IsResourceFinal(entry):
+          value = re.sub(r'0x(?:00|7f)',
+                         '0x{:02x}'.format(self.final_package_id), value)
+        f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type,
+                                       entry.name, value))
+
+  def _IsResourceFinal(self, entry):
+    """Determines whether a resource should be final or not.
+
+  Args:
+    entry: A _TextSymbolEntry instance.
+  Returns:
+    True iff the corresponding entry should be final.
+  """
+    if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+      # A styleable constant may be exported as non-final after all.
+      return not self.export_const_styleable
+    elif not self.has_constant_ids:
+      # Every resource is non-final
+      return False
+    elif not self.resources_allowlist:
+      # No allowlist means all IDs are non-final.
+      return True
+    else:
+      # Otherwise, only those in the
+      return entry.name not in self.resources_allowlist
+
+
+def CreateRJavaFiles(srcjar_dir,
+                     package,
+                     main_r_txt_file,
+                     extra_res_packages,
+                     rjava_build_options,
+                     srcjar_out,
+                     custom_root_package_name=None,
+                     grandparent_custom_package_name=None,
+                     extra_main_r_text_files=None,
+                     ignore_mismatched_values=False):
+  """Create all R.java files for a set of packages and R.txt files.
+
+  Args:
+    srcjar_dir: The top-level output directory for the generated files.
+    package: Package name for R java source files which will inherit
+      from the root R java file.
+    main_r_txt_file: The main R.txt file containing the valid values
+      of _all_ resource IDs.
+    extra_res_packages: A list of extra package names.
+    rjava_build_options: An RJavaBuildOptions instance that controls how
+      exactly the R.java file is generated.
+    srcjar_out: Path of desired output srcjar.
+    custom_root_package_name: Custom package name for module root R.java file,
+      (eg. vr for gen.vr package).
+    grandparent_custom_package_name: Custom root package name for the root
+      R.java file to inherit from. DFM root R.java files will have "base"
+      as the grandparent_custom_package_name. The format of this package name
+      is identical to custom_root_package_name.
+      (eg. for vr grandparent_custom_package_name would be "base")
+    extra_main_r_text_files: R.txt files to be added to the root R.java file.
+    ignore_mismatched_values: If True, ignores if a resource appears multiple
+      times with different entry values (useful when all the values are
+      dummy anyways).
+  Raises:
+    Exception if a package name appears several times in |extra_res_packages|
+  """
+  rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file)
+
+  packages = list(extra_res_packages)
+
+  if package and package not in packages:
+    # Sometimes, an apk target and a resources target share the same
+    # AndroidManifest.xml and thus |package| will already be in |packages|.
+    packages.append(package)
+
+  # Map of (resource_type, name) -> Entry.
+  # Contains the correct values for resources.
+  all_resources = {}
+  all_resources_by_type = collections.defaultdict(list)
+
+  main_r_text_files = [main_r_txt_file]
+  if extra_main_r_text_files:
+    main_r_text_files.extend(extra_main_r_text_files)
+  for r_txt_file in main_r_text_files:
+    for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True):
+      entry_key = (entry.resource_type, entry.name)
+      if entry_key in all_resources:
+        if not ignore_mismatched_values:
+          assert entry == all_resources[entry_key], (
+              'Input R.txt %s provided a duplicate resource with a different '
+              'entry value. Got %s, expected %s.' %
+              (r_txt_file, entry, all_resources[entry_key]))
+      else:
+        all_resources[entry_key] = entry
+        all_resources_by_type[entry.resource_type].append(entry)
+        assert entry.resource_type in _ALL_RESOURCE_TYPES, (
+            'Unknown resource type: %s, add to _ALL_RESOURCE_TYPES!' %
+            entry.resource_type)
+
+  if custom_root_package_name:
+    # Custom package name is available, thus use it for root_r_java_package.
+    root_r_java_package = GetCustomPackagePath(custom_root_package_name)
+  else:
+    # Create a unique name using srcjar_out. Underscores are added to ensure
+    # no reserved keywords are used for directory names.
+    root_r_java_package = re.sub('[^\w\.]', '', srcjar_out.replace('/', '._'))
+
+  root_r_java_dir = os.path.join(srcjar_dir, *root_r_java_package.split('.'))
+  build_utils.MakeDirectory(root_r_java_dir)
+  root_r_java_path = os.path.join(root_r_java_dir, 'R.java')
+  root_java_file_contents = _RenderRootRJavaSource(
+      root_r_java_package, all_resources_by_type, rjava_build_options,
+      grandparent_custom_package_name)
+  with open(root_r_java_path, 'w') as f:
+    f.write(root_java_file_contents)
+
+  for package in packages:
+    _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+                           rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+                           rjava_build_options):
+  """Generates an R.java source file."""
+  package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+  build_utils.MakeDirectory(package_r_java_dir)
+  package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+  java_file_contents = _RenderRJavaSource(package, root_r_java_package,
+                                          rjava_build_options)
+  with open(package_r_java_path, 'w') as f:
+    f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+  """Get the index of the first application resource ID within a resource
+  array."""
+  res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+  for i, res_id in enumerate(res_ids):
+    if res_id.startswith('0x7f'):
+      return i
+  return len(res_ids)
+
+
+def _RenderRJavaSource(package, root_r_java_package, rjava_build_options):
+  """Generates the contents of a R.java file."""
+  template = Template(
+      """/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resource_types %}
+    public static final class {{ resource_type }} extends
+            {{ root_package }}.R.{{ resource_type }} {}
+    {% endfor %}
+    {% if has_on_resources_loaded %}
+    public static void onResourcesLoaded(int packageId) {
+        {{ root_package }}.R.onResourcesLoaded(packageId);
+    }
+    {% endif %}
+}
+""",
+      trim_blocks=True,
+      lstrip_blocks=True)
+
+  return template.render(
+      package=package,
+      resource_types=sorted(_ALL_RESOURCE_TYPES),
+      root_package=root_r_java_package,
+      has_on_resources_loaded=rjava_build_options.has_on_resources_loaded)
+
+
+def GetCustomPackagePath(package_name):
+  return 'gen.' + package_name + '_module'
+
+
+def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
+                           grandparent_custom_package_name):
+  """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+  final_resources_by_type = collections.defaultdict(list)
+  non_final_resources_by_type = collections.defaultdict(list)
+  for res_type, resources in all_resources_by_type.items():
+    for entry in resources:
+      # Entries in stylable that are not int[] are not actually resource ids
+      # but constants.
+      if rjava_build_options._IsResourceFinal(entry):
+        final_resources_by_type[res_type].append(entry)
+      else:
+        non_final_resources_by_type[res_type].append(entry)
+
+  # Keep these assignments all on one line to make diffing against regular
+  # aapt-generated files easier.
+  create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
+  create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
+                   ' packageIdTransform;')
+  for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
+                        '{{ e.resource_type }}.{{ e.name }}.length; ++i')
+
+  # Here we diverge from what aapt does. Because we have so many
+  # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+  # Java imposes. For this reason we split onResourcesLoaded into different
+  # methods for each resource type.
+  extends_string = ''
+  dep_path = ''
+  if grandparent_custom_package_name:
+    extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} '
+    dep_path = GetCustomPackagePath(grandparent_custom_package_name)
+
+  template = Template("""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resource_types %}
+    public static class {{ resource_type }} """ + extends_string + """ {
+        {% for e in final_resources[resource_type] %}
+        public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+        {% endfor %}
+        {% for e in non_final_resources[resource_type] %}
+            {% if e.value != '0' %}
+        public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+            {% else %}
+        public static {{ e.java_type }} {{ e.name }};
+            {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+    {% if has_on_resources_loaded %}
+      {% if fake_on_resources_loaded %}
+    public static void onResourcesLoaded(int packageId) {
+    }
+      {% else %}
+    private static boolean sResourcesDidLoad;
+    public static void onResourcesLoaded(int packageId) {
+        if (sResourcesDidLoad) {
+            return;
+        }
+        sResourcesDidLoad = true;
+        int packageIdTransform = (packageId ^ 0x7f) << 24;
+        {% for resource_type in resource_types %}
+        onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+        {% for e in non_final_resources[resource_type] %}
+        {% if e.java_type == 'int[]' %}
+        for(""" + for_loop_condition + """) {
+            """ + create_id_arr + """
+        }
+        {% endif %}
+        {% endfor %}
+        {% endfor %}
+    }
+    {% for res_type in resource_types %}
+    private static void onResourcesLoaded{{ res_type|title }} (
+            int packageIdTransform) {
+        {% for e in non_final_resources[res_type] %}
+        {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+        """ + create_id + """
+        {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+      {% endif %}
+    {% endif %}
+}
+""",
+                      trim_blocks=True,
+                      lstrip_blocks=True)
+  return template.render(
+      package=package,
+      resource_types=sorted(_ALL_RESOURCE_TYPES),
+      has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+      fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded,
+      final_resources=final_resources_by_type,
+      non_final_resources=non_final_resources_by_type,
+      startIndex=_GetNonSystemIndex,
+      parent_path=dep_path)
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+  """Returns (version_code, version_name, package_name) for the given apk."""
+  output = subprocess.check_output([
+      aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+  ]).decode('utf-8')
+  version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+  version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+  package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+  return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+  """Returns (package_name, package_id) of resources.arsc from apk_path."""
+  proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE)
+  for line in proc.stdout:
+    line = line.decode('utf-8')
+    # Package name=org.chromium.webview_shell id=7f
+    if line.startswith('Package'):
+      proc.kill()
+      parts = line.split()
+      package_name = parts[1].split('=')[1]
+      package_id = parts[2][3:]
+      return package_name, int(package_id, 16)
+
+  # aapt2 currently crashes when dumping webview resources, but not until after
+  # it prints the "Package" line (b/130553900).
+  sys.stderr.write(proc.stderr.read())
+  raise Exception('Failed to find arsc package name')
+
+
+def _RenameSubdirsWithPrefix(dir_path, prefix):
+  subdirs = [
+      d for d in os.listdir(dir_path)
+      if os.path.isdir(os.path.join(dir_path, d))
+  ]
+  renamed_subdirs = []
+  for d in subdirs:
+    old_path = os.path.join(dir_path, d)
+    new_path = os.path.join(dir_path, '{}_{}'.format(prefix, d))
+    renamed_subdirs.append(new_path)
+    os.rename(old_path, new_path)
+  return renamed_subdirs
+
+
+def _HasMultipleResDirs(zip_path):
+  """Checks for magic comment set by prepare_resources.py
+
+  Returns: True iff the zipfile has the magic comment that means it contains
+  multiple res/ dirs inside instead of just contents of a single res/ dir
+  (without a wrapping res/).
+  """
+  with zipfile.ZipFile(zip_path) as z:
+    return z.comment == MULTIPLE_RES_MAGIC_STRING
+
+
+def ExtractDeps(dep_zips, deps_dir):
+  """Extract a list of resource dependency zip files.
+
+  Args:
+     dep_zips: A list of zip file paths, each one will be extracted to
+       a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+       '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+    deps_dir: Top-level extraction directory.
+  Returns:
+    The list of all sub-directory paths, relative to |deps_dir|.
+  Raises:
+    Exception: If a sub-directory already exists with the same name before
+      extraction.
+  """
+  dep_subdirs = []
+  for z in dep_zips:
+    subdirname = z.replace(os.path.sep, '_')
+    subdir = os.path.join(deps_dir, subdirname)
+    if os.path.exists(subdir):
+      raise Exception('Resource zip name conflict: ' + subdirname)
+    build_utils.ExtractAll(z, path=subdir)
+    if _HasMultipleResDirs(z):
+      # basename of the directory is used to create a zip during resource
+      # compilation, include the path in the basename to help blame errors on
+      # the correct target. For example directory 0_res may be renamed
+      # chrome_android_chrome_app_java_resources_0_res pointing to the name and
+      # path of the android_resources target from whence it came.
+      subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname)
+      dep_subdirs.extend(subdir_subdirs)
+    else:
+      dep_subdirs.append(subdir)
+  return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+  """A temporary directory for packaging and compiling Android resources.
+
+  Args:
+    temp_dir: Optional root build directory path. If None, a temporary
+      directory will be created, and removed in Close().
+  """
+
+  def __init__(self, temp_dir=None, keep_files=False):
+    """Initialized the context."""
+    # The top-level temporary directory.
+    if temp_dir:
+      self.temp_dir = temp_dir
+      os.makedirs(temp_dir)
+    else:
+      self.temp_dir = tempfile.mkdtemp()
+    self.remove_on_exit = not keep_files
+
+    # A location to store resources extracted form dependency zip files.
+    self.deps_dir = os.path.join(self.temp_dir, 'deps')
+    os.mkdir(self.deps_dir)
+    # A location to place aapt-generated files.
+    self.gen_dir = os.path.join(self.temp_dir, 'gen')
+    os.mkdir(self.gen_dir)
+    # A location to place generated R.java files.
+    self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+    os.mkdir(self.srcjar_dir)
+    # Temporary file locacations.
+    self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+    self.srcjar_path = os.path.join(self.temp_dir, 'R.srcjar')
+    self.info_path = os.path.join(self.temp_dir, 'size.info')
+    self.stable_ids_path = os.path.join(self.temp_dir, 'in_ids.txt')
+    self.emit_ids_path = os.path.join(self.temp_dir, 'out_ids.txt')
+    self.proguard_path = os.path.join(self.temp_dir, 'keeps.flags')
+    self.proguard_main_dex_path = os.path.join(self.temp_dir, 'maindex.flags')
+    self.arsc_path = os.path.join(self.temp_dir, 'out.ap_')
+    self.proto_path = os.path.join(self.temp_dir, 'out.proto.ap_')
+    self.optimized_arsc_path = os.path.join(self.temp_dir, 'out.opt.ap_')
+    self.optimized_proto_path = os.path.join(self.temp_dir, 'out.opt.proto.ap_')
+
+  def Close(self):
+    """Close the context and destroy all temporary files."""
+    if self.remove_on_exit:
+      shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None, keep_files=False):
+  """Generator for a _ResourceBuildContext instance."""
+  context = None
+  try:
+    context = _ResourceBuildContext(temp_dir, keep_files)
+    yield context
+  finally:
+    if context:
+      context.Close()
+
+
+def ResourceArgsParser():
+  """Create an argparse.ArgumentParser instance with common argument groups.
+
+  Returns:
+    A tuple of (parser, in_group, out_group) corresponding to the parser
+    instance, and the input and output argument groups for it, respectively.
+  """
+  parser = argparse.ArgumentParser(description=__doc__)
+
+  input_opts = parser.add_argument_group('Input options')
+  output_opts = parser.add_argument_group('Output options')
+
+  build_utils.AddDepfileOption(output_opts)
+
+  input_opts.add_argument('--include-resources', required=True, action="append",
+                        help='Paths to arsc resource files used to link '
+                             'against. Can be specified multiple times.')
+
+  input_opts.add_argument('--dependencies-res-zips', required=True,
+                    help='Resources zip archives from dependents. Required to '
+                         'resolve @type/foo references into dependent '
+                         'libraries.')
+
+  input_opts.add_argument(
+      '--r-text-in',
+       help='Path to pre-existing R.txt. Its resource IDs override those found '
+            'in the aapt-generated R.txt when generating R.java.')
+
+  input_opts.add_argument(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for.')
+
+  return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+  """Handle common command-line options after parsing.
+
+  Args:
+    options: the result of parse_args() on the parser returned by
+        ResourceArgsParser(). This function updates a few common fields.
+  """
+  options.include_resources = [build_utils.ParseGnList(r) for r in
+                               options.include_resources]
+  # Flatten list of include resources list to make it easier to use.
+  options.include_resources = [r for resources in options.include_resources
+                               for r in resources]
+
+  options.dependencies_res_zips = (
+      build_utils.ParseGnList(options.dependencies_res_zips))
+
+  # Don't use [] as default value since some script explicitly pass "".
+  if options.extra_res_packages:
+    options.extra_res_packages = (
+        build_utils.ParseGnList(options.extra_res_packages))
+  else:
+    options.extra_res_packages = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+  """Parse and Android xml resource file and extract strings from it.
+
+  Args:
+    xml_data: XML file data.
+  Returns:
+    A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+    encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+    corresponding to namespaces declared in the <resources> element.
+  """
+  # NOTE: This uses regular expression matching because parsing with something
+  # like ElementTree makes it tedious to properly parse some of the structured
+  # text found in string resources, e.g.:
+  #      <string msgid="3300176832234831527" \
+  #         name="abc_shareactionprovider_share_with_application">\
+  #             "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+  #      </string>
+  result = {}
+
+  # Find <resources> start tag and extract namespaces from it.
+  m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+  if not m:
+    raise Exception('<resources> start tag expected: ' + xml_data)
+  input_data = xml_data[m.end():]
+  resource_attrs = m.group(1)
+  re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+  namespaces = {}
+  while resource_attrs:
+    m = re_namespace.match(resource_attrs)
+    if not m:
+      break
+    namespaces[m.group(2)] = m.group(3)
+    resource_attrs = resource_attrs[m.end(1):]
+
+  # Find each string element now.
+  re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+  re_string_element_end = re.compile('</string>')
+  while input_data:
+    m = re_string_element_start.search(input_data)
+    if not m:
+      break
+    name = m.group(2)
+    input_data = input_data[m.end():]
+    m2 = re_string_element_end.search(input_data)
+    if not m2:
+      raise Exception('Expected closing string tag: ' + input_data)
+    text = input_data[:m2.start()]
+    input_data = input_data[m2.end():]
+    if len(text) and text[0] == '"' and text[-1] == '"':
+      text = text[1:-1]
+    result[name] = text
+
+  return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+  """Generate an XML text corresponding to an Android resource strings map.
+
+  Args:
+    names_to_text: A dictionary mapping resource names to localized
+      text (encoded as UTF-8).
+    namespaces: A map of namespace prefix to URL.
+  Returns:
+    New non-Unicode string containing an XML data structure describing the
+    input as an Android resource .xml file.
+  """
+  result = '<?xml version="1.0" encoding="utf-8"?>\n'
+  result += '<resources'
+  if namespaces:
+    for prefix, url in sorted(namespaces.items()):
+      result += ' xmlns:%s="%s"' % (prefix, url)
+  result += '>\n'
+  if not names_to_utf8_text:
+    result += '<!-- this file intentionally empty -->\n'
+  else:
+    for name, utf8_text in sorted(names_to_utf8_text.items()):
+      result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+  result += '</resources>\n'
+  return result.encode('utf8')
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+  """Remove unwanted localized strings from an Android resource .xml file.
+
+  This function takes a |string_predicate| callable object that will
+  receive a resource string name, and should return True iff the
+  corresponding <string> element should be kept in the file.
+
+  Args:
+    xml_file_path: Android resource strings xml file path.
+    string_predicate: A predicate function which will receive the string name
+      and shal
+  """
+  with open(xml_file_path) as f:
+    xml_data = f.read()
+  strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+  string_deletion = False
+  for name in list(strings_map.keys()):
+    if not string_predicate(name):
+      del strings_map[name]
+      string_deletion = True
+
+  if string_deletion:
+    new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+    with open(xml_file_path, 'wb') as f:
+      f.write(new_xml_data)
diff --git a/src/build/android/gyp/util/resource_utils_test.py b/src/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000..62d5b43
--- /dev/null
+++ b/src/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+    0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils  # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+    'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+    'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+    'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+    'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_ALLOWLIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test allowlist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_ALLOWLIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+    0x7f0c0105: 'AllowedDomainsForAppsDesc',
+    0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+    'AllowedDomainsForAppsDesc',
+    'AllowedDomainsForAppsTitle',
+    'AlternateErrorPagesEnabledDesc',
+    'AlternateErrorPagesEnabledTitle',
+    'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+  file_path = os.path.join(tmp_dir, file_name)
+  with open(file_path, 'wt') as f:
+    f.write(file_data)
+  return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+  def test_GetRTxtStringResourceNames(self):
+    with build_utils.TempDir() as tmp_dir:
+      tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+      self.assertListEqual(
+          resource_utils.GetRTxtStringResourceNames(tmp_file),
+          _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+  def test_GenerateStringResourcesAllowList(self):
+    with build_utils.TempDir() as tmp_dir:
+      tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+      tmp_allowlist_rtxt_file = _CreateTestFile(tmp_dir, "test_allowlist_R.txt",
+                                                _TEST_ALLOWLIST_R_TXT)
+      self.assertDictEqual(
+          resource_utils.GenerateStringResourcesAllowList(
+              tmp_module_rtxt_file, tmp_allowlist_rtxt_file),
+          _TEST_R_TEXT_RESOURCES_IDS)
+
+  def test_IsAndroidLocaleQualifier(self):
+    good_locales = [
+        'en',
+        'en-rUS',
+        'fil',
+        'fil-rPH',
+        'iw',
+        'iw-rIL',
+        'b+en',
+        'b+en+US',
+        'b+ja+Latn',
+        'b+ja+JP+Latn',
+        'b+cmn+Hant-TW',
+    ]
+    bad_locales = [
+        'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+    ]
+    for locale in good_locales:
+      self.assertTrue(
+          resource_utils.IsAndroidLocaleQualifier(locale),
+          msg="'%s' should be a good locale!" % locale)
+
+    for locale in bad_locales:
+      self.assertFalse(
+          resource_utils.IsAndroidLocaleQualifier(locale),
+          msg="'%s' should be a bad locale!" % locale)
+
+  def test_ToAndroidLocaleName(self):
+    _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+        'en': 'en',
+        'en-US': 'en-rUS',
+        'en-FOO': 'en-rFOO',
+        'fil': 'tl',
+        'tl': 'tl',
+        'he': 'iw',
+        'he-IL': 'iw-rIL',
+        'id': 'in',
+        'id-BAR': 'in-rBAR',
+        'nb': 'nb',
+        'yi': 'ji'
+    }
+    for chromium_locale, android_locale in \
+        _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.items():
+      result = resource_utils.ToAndroidLocaleName(chromium_locale)
+      self.assertEqual(result, android_locale)
+
+  def test_ToChromiumLocaleName(self):
+    _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+        'foo': 'foo',
+        'foo-rBAR': 'foo-BAR',
+        'b+lll': 'lll',
+        'b+ll+Extra': 'll',
+        'b+ll+RR': 'll-RR',
+        'b+lll+RR+Extra': 'lll-RR',
+        'b+ll+RRR+Extra': 'll-RRR',
+        'b+ll+Ssss': 'll-Ssss',
+        'b+ll+Ssss+Extra': 'll-Ssss',
+        'b+ll+Ssss+RR': 'll-Ssss-RR',
+        'b+ll+Ssss+RRR': 'll-Ssss-RRR',
+        'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR',
+        'b+ll+Whatever': 'll',
+        'en': 'en',
+        'en-rUS': 'en-US',
+        'en-US': None,
+        'en-FOO': None,
+        'en-rFOO': 'en-FOO',
+        'es-rES': 'es-ES',
+        'es-rUS': 'es-419',
+        'tl': 'fil',
+        'fil': 'fil',
+        'iw': 'he',
+        'iw-rIL': 'he-IL',
+        'b+iw+IL': 'he-IL',
+        'in': 'id',
+        'in-rBAR': 'id-BAR',
+        'id-rBAR': 'id-BAR',
+        'nb': 'nb',
+        'no': 'nb',  # http://crbug.com/920960
+    }
+    for android_locale, chromium_locale in \
+        _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.items():
+      result = resource_utils.ToChromiumLocaleName(android_locale)
+      self.assertEqual(result, chromium_locale)
+
+  def test_FindLocaleInStringResourceFilePath(self):
+    self.assertEqual(
+        None,
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values/whatever.xml'))
+    self.assertEqual(
+        'foo',
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo/whatever.xml'))
+    self.assertEqual(
+        'foo-rBAR',
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo-rBAR/whatever.xml'))
+    self.assertEqual(
+        None,
+        resource_utils.FindLocaleInStringResourceFilePath(
+            'res/values-foo/ignore-subdirs/whatever.xml'))
+
+  def test_ParseAndroidResourceStringsFromXml(self):
+    ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+        _TEST_XML_INPUT_1)
+    self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+    self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+  def test_GenerateAndroidResourceStringsXml(self):
+    # Fist, an empty strings map, with no namespaces
+    result = resource_utils.GenerateAndroidResourceStringsXml({})
+    self.assertEqual(result.decode('utf8'), _TEST_XML_OUTPUT_EMPTY)
+
+    result = resource_utils.GenerateAndroidResourceStringsXml(
+        _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+    self.assertEqual(result.decode('utf8'), _TEST_XML_INPUT_1)
+
+  @staticmethod
+  def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+    values_dir = os.path.join(output_dir, 'values-' + locale)
+    build_utils.MakeDirectory(values_dir)
+    file_path = os.path.join(values_dir, 'strings.xml')
+    with open(file_path, 'wb') as f:
+      file_data = resource_utils.GenerateAndroidResourceStringsXml(
+          string_map, namespaces)
+      f.write(file_data)
+    return file_path
+
+  def _CheckTestResourceFile(self, file_path, expected_data):
+    with open(file_path) as f:
+      file_data = f.read()
+    self.assertEqual(file_data, expected_data)
+
+  def test_FilterAndroidResourceStringsXml(self):
+    with build_utils.TempDir() as tmp_path:
+      test_file = self._CreateTestResourceFile(
+          tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+      resource_utils.FilterAndroidResourceStringsXml(
+          test_file, lambda x: x in _TEST_RESOURCES_ALLOWLIST_1)
+      self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/gyp/util/resources_parser.py b/src/build/android/gyp/util/resources_parser.py
new file mode 100644
index 0000000..8d8d69c
--- /dev/null
+++ b/src/build/android/gyp/util/resources_parser.py
@@ -0,0 +1,142 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import re
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+_TextSymbolEntry = collections.namedtuple(
+    'RTextEntry', ('java_type', 'resource_type', 'name', 'value'))
+
+_DUMMY_RTXT_ID = '0x7f010001'
+_DUMMY_RTXT_INDEX = '1'
+
+
+def _ResourceNameToJavaSymbol(resource_name):
+  return re.sub('[\.:]', '_', resource_name)
+
+
+class RTxtGenerator(object):
+  def __init__(self,
+               res_dirs,
+               ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN):
+    self.res_dirs = res_dirs
+    self.ignore_pattern = ignore_pattern
+
+  def _ParseDeclareStyleable(self, node):
+    ret = set()
+    stylable_name = _ResourceNameToJavaSymbol(node.attrib['name'])
+    ret.add(
+        _TextSymbolEntry('int[]', 'styleable', stylable_name,
+                         '{{{}}}'.format(_DUMMY_RTXT_ID)))
+    for child in node:
+      if child.tag == 'eat-comment':
+        continue
+      if child.tag != 'attr':
+        # This parser expects everything inside <declare-stylable/> to be either
+        # an attr or an eat-comment. If new resource xml files are added that do
+        # not conform to this, this parser needs updating.
+        raise Exception('Unexpected tag {} inside <delcare-stylable/>'.format(
+            child.tag))
+      entry_name = '{}_{}'.format(
+          stylable_name, _ResourceNameToJavaSymbol(child.attrib['name']))
+      ret.add(
+          _TextSymbolEntry('int', 'styleable', entry_name, _DUMMY_RTXT_INDEX))
+      if not child.attrib['name'].startswith('android:'):
+        resource_name = _ResourceNameToJavaSymbol(child.attrib['name'])
+        ret.add(_TextSymbolEntry('int', 'attr', resource_name, _DUMMY_RTXT_ID))
+      for entry in child:
+        if entry.tag not in ('enum', 'flag'):
+          # This parser expects everything inside <attr/> to be either an
+          # <enum/> or an <flag/>. If new resource xml files are added that do
+          # not conform to this, this parser needs updating.
+          raise Exception('Unexpected tag {} inside <attr/>'.format(entry.tag))
+        resource_name = _ResourceNameToJavaSymbol(entry.attrib['name'])
+        ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+    return ret
+
+  def _ExtractNewIdsFromNode(self, node):
+    ret = set()
+    # Sometimes there are @+id/ in random attributes (not just in android:id)
+    # and apparently that is valid. See:
+    # https://developer.android.com/reference/android/widget/RelativeLayout.LayoutParams.html
+    for value in node.attrib.values():
+      if value.startswith('@+id/'):
+        resource_name = value[5:]
+        ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+    for child in node:
+      ret.update(self._ExtractNewIdsFromNode(child))
+    return ret
+
+  def _ExtractNewIdsFromXml(self, xml_path):
+    root = ElementTree.parse(xml_path).getroot()
+    return self._ExtractNewIdsFromNode(root)
+
+  def _ParseValuesXml(self, xml_path):
+    ret = set()
+    root = ElementTree.parse(xml_path).getroot()
+    assert root.tag == 'resources'
+    for child in root:
+      if child.tag == 'eat-comment':
+        # eat-comment is just a dummy documentation element.
+        continue
+      if child.tag == 'skip':
+        # skip is just a dummy element.
+        continue
+      if child.tag == 'declare-styleable':
+        ret.update(self._ParseDeclareStyleable(child))
+      else:
+        if child.tag == 'item':
+          resource_type = child.attrib['type']
+        elif child.tag in ('array', 'integer-array', 'string-array'):
+          resource_type = 'array'
+        else:
+          resource_type = child.tag
+        name = _ResourceNameToJavaSymbol(child.attrib['name'])
+        ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID))
+    return ret
+
+  def _CollectResourcesListFromDirectory(self, res_dir):
+    ret = set()
+    globs = resource_utils._GenerateGlobs(self.ignore_pattern)
+    for root, _, files in os.walk(res_dir):
+      resource_type = os.path.basename(root)
+      if '-' in resource_type:
+        resource_type = resource_type[:resource_type.index('-')]
+      for f in files:
+        if build_utils.MatchesGlob(f, globs):
+          continue
+        if resource_type == 'values':
+          ret.update(self._ParseValuesXml(os.path.join(root, f)))
+        else:
+          if '.' in f:
+            resource_name = f[:f.index('.')]
+          else:
+            resource_name = f
+          ret.add(
+              _TextSymbolEntry('int', resource_type, resource_name,
+                               _DUMMY_RTXT_ID))
+          # Other types not just layouts can contain new ids (eg: Menus and
+          # Drawables). Just in case, look for new ids in all files.
+          if f.endswith('.xml'):
+            ret.update(self._ExtractNewIdsFromXml(os.path.join(root, f)))
+    return ret
+
+  def _CollectResourcesListFromDirectories(self):
+    ret = set()
+    for res_dir in self.res_dirs:
+      ret.update(self._CollectResourcesListFromDirectory(res_dir))
+    return ret
+
+  def WriteRTxtFile(self, rtxt_path):
+    resources = self._CollectResourcesListFromDirectories()
+    with build_utils.AtomicOutput(rtxt_path, mode='w') as f:
+      for resource in resources:
+        line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
+            resource)
+        f.write(line)
diff --git a/src/build/android/gyp/util/server_utils.py b/src/build/android/gyp/util/server_utils.py
new file mode 100644
index 0000000..e050ef6
--- /dev/null
+++ b/src/build/android/gyp/util/server_utils.py
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import json
+import os
+import socket
+
+# Use a unix abstract domain socket:
+# https://man7.org/linux/man-pages/man7/unix.7.html#:~:text=abstract:
+SOCKET_ADDRESS = '\0chromium_build_server_socket'
+BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER'
+
+
+def MaybeRunCommand(name, argv, stamp_file):
+  """Returns True if the command was successfully sent to the build server."""
+
+  # When the build server runs a command, it sets this environment variable.
+  # This prevents infinite recursion where the script sends a request to the
+  # build server, then the build server runs the script, and then the script
+  # sends another request to the build server.
+  if BUILD_SERVER_ENV_VARIABLE in os.environ:
+    return False
+  with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
+    try:
+      sock.connect(SOCKET_ADDRESS)
+      sock.sendall(
+          json.dumps({
+              'name': name,
+              'cmd': argv,
+              'cwd': os.getcwd(),
+              'stamp_file': stamp_file,
+          }).encode('utf8'))
+    except socket.error as e:
+      # [Errno 111] Connection refused. Either the server has not been started
+      #             or the server is not currently accepting new connections.
+      if e.errno == 111:
+        return False
+      raise e
+  return True
diff --git a/src/build/android/gyp/util/zipalign.py b/src/build/android/gyp/util/zipalign.py
new file mode 100644
index 0000000..c5c4ea8
--- /dev/null
+++ b/src/build/android/gyp/util/zipalign.py
@@ -0,0 +1,97 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+
+_FIXED_ZIP_HEADER_LEN = 30
+
+
+def _PatchedDecodeExtra(self):
+  # Try to decode the extra field.
+  extra = self.extra
+  unpack = struct.unpack
+  while len(extra) >= 4:
+    tp, ln = unpack('<HH', extra[:4])
+    if tp == 1:
+      if ln >= 24:
+        counts = unpack('<QQQ', extra[4:28])
+      elif ln == 16:
+        counts = unpack('<QQ', extra[4:20])
+      elif ln == 8:
+        counts = unpack('<Q', extra[4:12])
+      elif ln == 0:
+        counts = ()
+      else:
+        raise RuntimeError("Corrupt extra field %s" % (ln, ))
+
+      idx = 0
+
+      # ZIP64 extension (large files and/or large archives)
+      if self.file_size in (0xffffffffffffffff, 0xffffffff):
+        self.file_size = counts[idx]
+        idx += 1
+
+      if self.compress_size == 0xffffffff:
+        self.compress_size = counts[idx]
+        idx += 1
+
+      if self.header_offset == 0xffffffff:
+        self.header_offset = counts[idx]
+        idx += 1
+
+    extra = extra[ln + 4:]
+
+
+def ApplyZipFileZipAlignFix():
+  """Fix zipfile.ZipFile() to be able to open zipaligned .zip files.
+
+  Android's zip alignment uses not-quite-valid zip headers to perform alignment.
+  Python < 3.4 crashes when trying to load them.
+  https://bugs.python.org/issue14315
+  """
+  if sys.version_info < (3, 4):
+    zipfile.ZipInfo._decodeExtra = (  # pylint: disable=protected-access
+        _PatchedDecodeExtra)
+
+
+def _SetAlignment(zip_obj, zip_info, alignment):
+  """Sets a ZipInfo's extra field such that the file will be aligned.
+
+  Args:
+    zip_obj: The ZipFile object that is being written.
+    zip_info: The ZipInfo object about to be written.
+    alignment: The amount of alignment (e.g. 4, or 4*1024).
+  """
+  cur_offset = zip_obj.fp.tell()
+  header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
+  padding_needed = (alignment - (
+      (cur_offset + header_size) % alignment)) % alignment
+
+
+  # Python writes |extra| to both the local file header and the central
+  # directory's file header. Android's zipalign tool writes only to the
+  # local file header, so there is more overhead in using python to align.
+  zip_info.extra = b'\0' * padding_needed
+
+
+def AddToZipHermetic(zip_file,
+                     zip_path,
+                     src_path=None,
+                     data=None,
+                     compress=None,
+                     alignment=None):
+  """Same as build_utils.AddToZipHermetic(), but with alignment.
+
+  Args:
+    alignment: If set, align the data of the entry to this many bytes.
+  """
+  zipinfo = build_utils.HermeticZipInfo(filename=zip_path)
+  if alignment:
+    _SetAlignment(zip_file, zipinfo, alignment)
+  build_utils.AddToZipHermetic(
+      zip_file, zipinfo, src_path=src_path, data=data, compress=compress)
diff --git a/src/build/android/gyp/validate_static_library_dex_references.py b/src/build/android/gyp/validate_static_library_dex_references.py
new file mode 100755
index 0000000..b14ca3c
--- /dev/null
+++ b/src/build/android/gyp/validate_static_library_dex_references.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.dex import dex_parser
+from util import build_utils
+
+_FLAGS_PATH = (
+    '//chrome/android/java/static_library_dex_reference_workarounds.flags')
+
+
+def _FindIllegalStaticLibraryReferences(static_lib_dex_files,
+                                        main_apk_dex_files):
+  main_apk_defined_types = set()
+  for dex_file in main_apk_dex_files:
+    for class_def_item in dex_file.class_def_item_list:
+      main_apk_defined_types.add(
+          dex_file.GetTypeString(class_def_item.class_idx))
+
+  static_lib_referenced_types = set()
+  for dex_file in static_lib_dex_files:
+    for type_item in dex_file.type_item_list:
+      static_lib_referenced_types.add(
+          dex_file.GetString(type_item.descriptor_idx))
+
+  return main_apk_defined_types.intersection(static_lib_referenced_types)
+
+
+def _DexFilesFromPath(path):
+  if zipfile.is_zipfile(path):
+    with zipfile.ZipFile(path) as z:
+      return [
+          dex_parser.DexFile(bytearray(z.read(name))) for name in z.namelist()
+          if re.match(r'.*classes[0-9]*\.dex$', name)
+      ]
+  else:
+    with open(path) as f:
+      return dex_parser.DexFile(bytearray(f.read()))
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--depfile', required=True, help='Path to output depfile.')
+  parser.add_argument(
+      '--stamp', required=True, help='Path to file to touch upon success.')
+  parser.add_argument(
+      '--static-library-dex',
+      required=True,
+      help='classes.dex or classes.zip for the static library APK that was '
+      'proguarded with other dependent APKs')
+  parser.add_argument(
+      '--static-library-dependent-dex',
+      required=True,
+      action='append',
+      dest='static_library_dependent_dexes',
+      help='classes.dex or classes.zip for the APKs that use the static '
+      'library APK')
+  args = parser.parse_args(args)
+
+  static_library_dexfiles = _DexFilesFromPath(args.static_library_dex)
+  for path in args.static_library_dependent_dexes:
+    dependent_dexfiles = _DexFilesFromPath(path)
+    illegal_references = _FindIllegalStaticLibraryReferences(
+        static_library_dexfiles, dependent_dexfiles)
+
+    if illegal_references:
+      msg = 'Found illegal references from {} to {}\n'.format(
+          args.static_library_dex, path)
+      msg += 'Add a -keep rule to avoid this. '
+      msg += 'See {} for an example and why this is necessary.\n'.format(
+          _FLAGS_PATH)
+      msg += 'The illegal references are:\n'
+      msg += '\n'.join(illegal_references)
+      sys.stderr.write(msg)
+      sys.exit(1)
+
+  input_paths = [args.static_library_dex] + args.static_library_dependent_dexes
+  build_utils.Touch(args.stamp)
+  build_utils.WriteDepfile(args.depfile, args.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/validate_static_library_dex_references.pydeps b/src/build/android/gyp/validate_static_library_dex_references.pydeps
new file mode 100644
index 0000000..e57172d
--- /dev/null
+++ b/src/build/android/gyp/validate_static_library_dex_references.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/validate_static_library_dex_references.pydeps build/android/gyp/validate_static_library_dex_references.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+util/__init__.py
+util/build_utils.py
+validate_static_library_dex_references.py
diff --git a/src/build/android/gyp/write_build_config.py b/src/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..0600fdc
--- /dev/null
+++ b/src/build/android/gyp/write_build_config.py
@@ -0,0 +1,2087 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+    The following types are known by the internal GN build rules and the
+    build scripts altogether:
+
+    * [java_binary](#target_java_binary)
+    * [java_annotation_processor](#target_java_annotation_processor)
+    * [junit_binary](#target_junit_binary)
+    * [java_library](#target_java_library)
+    * [android_assets](#target_android_assets)
+    * [android_resources](#target_android_resources)
+    * [android_apk](#target_android_apk)
+    * [android_app_bundle_module](#target_android_app_bundle_module)
+    * [android_app_bundle](#target_android_app_bundle)
+    * [dist_jar](#target_dist_jar)
+    * [dist_aar](#target_dist_aar)
+    * [group](#target_group)
+
+    See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+    NOTE: Because the `.build_config` of a given target is always generated
+    after the `.build_config` of its dependencies, the `write_build_config.py`
+    script can use chains of `deps_configs` to compute transitive dependencies
+    for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+    NOTE: For `android_resources` targets,
+    this is the package name for the corresponding R class. For `android_apk`
+    targets, this is the corresponding package name. This does *not* appear for
+    other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+* `deps_info['base_module_config']`:
+Only seen for the [`android_app_bundle`](#target_android_app_bundle) type.
+Path to the base module for the bundle.
+
+* `deps_info['is_base_module']`:
+Only seen for the
+[`android_app_bundle_module`](#target_android_app_bundle_module)
+type. Whether or not this module is the base module for some bundle.
+
+* `deps_info['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `deps_info['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `deps_info['dependency_r_txt_files']`:
+Exists only on dist_aar. It is the list of deps_info['r_text_path'] from
+transitive dependencies. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+
+* `deps_info['res_sources_path']`:
+Path to file containing a list of resource source files used by the
+android_resources target. This replaces `deps_info['resource_dirs']` which is
+now no longer used.
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+    If `deps_info['resource_dirs']` is missing, this must point to a prebuilt
+    `.aar` archive containing resources. Otherwise, this will point to a
+    zip archive generated at build time, wrapping the content of
+    `deps_info['resource_dirs']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['resource_overlay']`:
+Optional. Whether the resources in resources_zip should override resources with
+the same name. Does not affect the behaviour of any android_resources()
+dependencies of this target.  If a target with resource_overlay=true depends
+on another target with resource_overlay=true the target with the dependency
+overrides the other.
+
+* `deps_info['r_text_path']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text_path']`. This is
+*always* generated from the content of `deps_info['r_text_path']` by the
+`build/android/gyp/process_resources.py` script.
+
+* `deps_info['static_library_dependent_classpath_configs']`:
+Sub dictionary mapping .build_config paths to lists of jar files. For static
+library APKs, this defines which input jars belong to each
+static_library_dependent_target.
+
+* `deps_info['static_library_proguard_mapping_output_paths']`:
+Additional paths to copy the ProGuard mapping file to for static library
+APKs.
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+    NOTE: When not empty, the first items of `assets['sources']` must match
+    every item in this list. Extra sources correspond to non-renamed sources.
+
+    NOTE: This comes from the `asset_renaming_destinations` parameter for the
+    `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['public_deps_configs']`: List of paths to the `.build_config` files
+of *direct* dependencies of the current target which are exposed as part of the
+current target's public API. This should be a subset of
+deps_info['deps_configs'].
+
+* `deps_info['ignore_dependency_public_deps']`: If true, 'public_deps' will not
+be collected from the current target's direct deps.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['device_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed).
+
+* `deps_info['host_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (use by java_binary).
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar` on
+`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files.
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from
+`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['lint_android_manifest']`:
+Path to an AndroidManifest.xml file to use for this lint target.
+
+* `deps_info['lint_java_sources']`:
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies that are chromium code. Note: this is a list of files, where each
+file contains a list of Java source files. This is used for lint.
+
+* `deps_info['lint_aars']`:
+List of all aars from transitive java dependencies. This allows lint to collect
+their custom annotations.zip and run checks like @IntDef on their annotations.
+
+* `deps_info['lint_srcjars']`:
+List of all bundled srcjars of all transitive java library targets. Excludes
+non-chromium java libraries.
+
+* `deps_info['lint_resource_sources']`:
+List of all resource sources files belonging to all transitive resource
+dependencies of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['lint_resource_zips']`:
+List of all resource zip files belonging to all transitive resource dependencies
+of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['device_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['device_jar_path']` entries for the target and all
+its dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['path']`:
+Path to the final classes.dex file (or classes.zip in case of multi-dex)
+for this APK.
+
+* `deps_info['final_dex']['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['uncompress_shared_libraries']`
+A boolean indicating whether native libraries are stored uncompressed in the
+APK.
+
+* `native['loadable_modules']`
+A list of native libraries to store within the APK, in addition to those from
+`native['libraries']`. These correspond to things like the Chromium linker
+or instrumentation libraries.
+
+* `native['secondary_abi_loadable_modules']`
+Secondary ABI version of loadable_modules
+
+* `native['library_always_compress']`
+A list of library files that we always compress.
+
+* `native['library_renames']`
+A list of library files that we prepend "crazy." to their file names.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed as android assets.
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs']`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_classpath_jars']`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+* `deps_info['proguard_under_test_mapping']`:
+Applicable to apks with proguard enabled that have an apk_under_test. This is
+the path to the apk_under_test's output proguard .mapping file.
+
+## <a name="target_android_app_bundle_module">Target type \
+`android_app_bundle_module`</a>:
+
+Corresponds to an Android app bundle module. Very similar to an APK and
+inherits the same fields, except that this does not generate an installable
+file (see `android_app_bundle`), and for the following omitted fields:
+
+* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and
+  `deps_info['incremental_install_json_path']` are omitted.
+
+* top-level `dist_jar` is omitted as well.
+
+In addition to `android_apk` targets though come these new fields:
+
+* `deps_info['proto_resources_path']`:
+The path of an zip archive containing the APK's resources compiled to the
+protocol buffer format (instead of regular binary xml + resources.arsc).
+
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
+* `deps_info['module_pathmap_path']`:
+The path of the pathmap file generated when compiling the resources for the
+bundle module, if resource path shortening is enabled.
+
+* `deps_info['base_allowlist_rtxt_path']`:
+Optional path to an R.txt file used as a allowlist for base string resources.
+This means that any string resource listed in this file *and* in
+`deps_info['module_rtxt_path']` will end up in the base split APK of any
+`android_app_bundle` target that uses this target as its base module.
+
+This ensures that such localized strings are available to all bundle installs,
+even when language based splits are enabled (e.g. required for WebView strings
+inside the Monochrome bundle).
+
+
+## <a name="target_android_app_bundle">Target type `android_app_bundle`</a>
+
+This target type corresponds to an Android app bundle, and is built from one
+or more `android_app_bundle_module` targets listed as dependencies.
+
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+  * `deps_info['supports_android']` (always True).
+  * `deps_info['requires_android']` (always True).
+  * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+  * `deps_info['proguard_enabled']` (False by default).
+  * `deps_info['proguard_configs']` (optional).
+  * `deps_info['supports_android']` (True by default).
+  * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['resource_packages']`
+For `java_library` targets, this is the list of package names for all resource
+dependencies for the current target. Order must match the one from
+`javac['srcjars']`. For other target types, this key does not exist.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+## <a name="android_app_bundle">Target type `android_app_bundle`</a>:
+
+This type corresponds to an Android app bundle (`.aab` file).
+
+--------------- END_MARKDOWN ---------------------------------------------------
+"""
+
+from __future__ import print_function
+
+import collections
+import itertools
+import json
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+from util import resource_utils
+
+# TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+if sys.version_info.major == 2:
+  zip_longest = itertools.izip_longest
+else:
+  zip_longest = itertools.zip_longest
+
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
+               'junit_binary', 'android_app_bundle')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+
+
+class OrderedSet(collections.OrderedDict):
+  # Value |parameter| is present to avoid presubmit warning due to different
+  # number of parameters from overridden method.
+  @staticmethod
+  def fromkeys(iterable, value=None):
+    out = OrderedSet()
+    out.update(iterable)
+    return out
+
+  def add(self, key):
+    self[key] = True
+
+  def update(self, iterable):
+    for v in iterable:
+      self.add(v)
+
+
+def _ExtractMarkdownDocumentation(input_text):
+  """Extract Markdown documentation from a list of input strings lines.
+
+     This generates a list of strings extracted from |input_text|, by looking
+     for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+  in_markdown = False
+  result = []
+  for line in input_text.splitlines():
+    if in_markdown:
+      if '-- END_MARKDOWN --' in line:
+        in_markdown = False
+      else:
+        result.append(line)
+    else:
+      if '-- BEGIN_MARKDOWN --' in line:
+        in_markdown = True
+
+  return result
+
+class AndroidManifest(object):
+  def __init__(self, path):
+    self.path = path
+    dom = xml.dom.minidom.parse(path)
+    manifests = dom.getElementsByTagName('manifest')
+    assert len(manifests) == 1
+    self.manifest = manifests[0]
+
+  def GetInstrumentationElements(self):
+    instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+    if len(instrumentation_els) == 0:
+      return None
+    return instrumentation_els
+
+  def CheckInstrumentationElements(self, expected_package):
+    instrs = self.GetInstrumentationElements()
+    if not instrs:
+      raise Exception('No <instrumentation> elements found in %s' % self.path)
+    for instr in instrs:
+      instrumented_package = instr.getAttributeNS(
+          'http://schemas.android.com/apk/res/android', 'targetPackage')
+      if instrumented_package != expected_package:
+        raise Exception(
+            'Wrong instrumented package. Expected %s, got %s'
+            % (expected_package, instrumented_package))
+
+  def GetPackageName(self):
+    return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    with open(path) as jsonfile:
+      dep_config_cache[path] = json.load(jsonfile)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None):
+  def GetDeps(path):
+    config = GetDepConfig(path)
+    if filter_func and not filter_func(config):
+      return []
+    return config['deps_configs']
+
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+def GetObjectByPath(obj, key_path):
+  """Given an object, return its nth child based on a key path.
+  """
+  return GetObjectByPath(obj[key_path[0]], key_path[1:]) if key_path else obj
+
+
+def RemoveObjDups(obj, base, *key_path):
+  """Remove array items from an object[*kep_path] that are also
+     contained in the base[*kep_path] (duplicates).
+  """
+  base_target = set(GetObjectByPath(base, key_path))
+  target = GetObjectByPath(obj, key_path)
+  target[:] = [x for x in target if x not in base_target]
+
+
+class Deps(object):
+  def __init__(self, direct_deps_config_paths):
+    self._all_deps_config_paths = GetAllDepsConfigsInOrder(
+        direct_deps_config_paths)
+    self._direct_deps_configs = [
+        GetDepConfig(p) for p in direct_deps_config_paths
+    ]
+    self._all_deps_configs = [
+        GetDepConfig(p) for p in self._all_deps_config_paths
+    ]
+    self._direct_deps_config_paths = direct_deps_config_paths
+
+  def All(self, wanted_type=None):
+    if wanted_type is None:
+      return self._all_deps_configs
+    return DepsOfType(wanted_type, self._all_deps_configs)
+
+  def Direct(self, wanted_type=None):
+    if wanted_type is None:
+      return self._direct_deps_configs
+    return DepsOfType(wanted_type, self._direct_deps_configs)
+
+  def DirectAndChildPublicDeps(self, wanted_type=None):
+    """Returns direct dependencies and dependencies exported via public_deps of
+       direct dependencies.
+    """
+    dep_paths = set(self._direct_deps_config_paths)
+    for direct_dep in self._direct_deps_configs:
+      dep_paths.update(direct_dep.get('public_deps_configs', []))
+    deps_list = [GetDepConfig(p) for p in dep_paths]
+    if wanted_type is None:
+      return deps_list
+    return DepsOfType(wanted_type, deps_list)
+
+  def AllConfigPaths(self):
+    return self._all_deps_config_paths
+
+  def GradlePrebuiltJarPaths(self):
+    ret = []
+
+    def helper(cur):
+      for config in cur.Direct('java_library'):
+        if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
+          if config['unprocessed_jar_path'] not in ret:
+            ret.append(config['unprocessed_jar_path'])
+
+    helper(self)
+    return ret
+
+  def GradleLibraryProjectDeps(self):
+    ret = []
+
+    def helper(cur):
+      for config in cur.Direct('java_library'):
+        if config['is_prebuilt']:
+          pass
+        elif config['gradle_treat_as_prebuilt']:
+          helper(Deps(config['deps_configs']))
+        elif config not in ret:
+          ret.append(config)
+
+    helper(self)
+    return ret
+
+
+def _MergeAssets(all_assets):
+  """Merges all assets from the given deps.
+
+  Returns:
+    A tuple of: (compressed, uncompressed, locale_paks)
+    |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is
+    the path of the asset to add, and zipPath is the location within the zip
+    (excluding assets/ prefix).
+    |locale_paks| is a set of all zipPaths that have been marked as
+    treat_as_locale_paks=true.
+  """
+  compressed = {}
+  uncompressed = {}
+  locale_paks = set()
+  for asset_dep in all_assets:
+    entry = asset_dep['assets']
+    disable_compression = entry.get('disable_compression')
+    treat_as_locale_paks = entry.get('treat_as_locale_paks')
+    dest_map = uncompressed if disable_compression else compressed
+    other_map = compressed if disable_compression else uncompressed
+    outputs = entry.get('outputs', [])
+    for src, dest in zip_longest(entry['sources'], outputs):
+      if not dest:
+        dest = os.path.basename(src)
+      # Merge so that each path shows up in only one of the lists, and that
+      # deps of the same target override previous ones.
+      other_map.pop(dest, 0)
+      dest_map[dest] = src
+      if treat_as_locale_paks:
+        locale_paks.add(dest)
+
+  def create_list(asset_map):
+    ret = ['%s:%s' % (src, dest) for dest, src in asset_map.items()]
+    # Sort to ensure deterministic ordering.
+    ret.sort()
+    return ret
+
+  return create_list(compressed), create_list(uncompressed), locale_paks
+
+
+def _ResolveGroups(configs):
+  """Returns a list of configs with all groups inlined."""
+  ret = list(configs)
+  while True:
+    groups = DepsOfType('group', ret)
+    if not groups:
+      return ret
+    for config in groups:
+      index = ret.index(config)
+      expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
+      ret[index:index + 1] = expanded_configs
+
+
+def _DepsFromPaths(dep_paths,
+                   target_type,
+                   filter_root_targets=True,
+                   recursive_resource_deps=False):
+  """Resolves all groups and trims dependency branches that we never want.
+
+  E.g. When a resource or asset depends on an apk target, the intent is to
+  include the .apk as a resource/asset, not to have the apk's classpath added.
+
+  This method is meant to be called to get the top nodes (i.e. closest to
+  current target) that we could then use to get a full transitive dependants
+  list (eg using Deps#all). So filtering single elements out of this list,
+  filters whole branches of dependencies. By resolving groups (i.e. expanding
+  them to their constituents), depending on a group is equivalent to directly
+  depending on each element of that group.
+  """
+  blocklist = []
+  allowlist = []
+
+  # Don't allow root targets to be considered as a dep.
+  if filter_root_targets:
+    blocklist.extend(_ROOT_TYPES)
+
+  # Don't allow java libraries to cross through assets/resources.
+  if target_type in _RESOURCE_TYPES:
+    allowlist.extend(_RESOURCE_TYPES)
+    # Pretend that this target directly depends on all of its transitive
+    # dependencies.
+    if recursive_resource_deps:
+      dep_paths = GetAllDepsConfigsInOrder(dep_paths)
+      # Exclude assets if recursive_resource_deps is set. The
+      # recursive_resource_deps arg is used to pull resources into the base
+      # module to workaround bugs accessing resources in isolated DFMs, but
+      # assets should be kept in the DFMs.
+      blocklist.append('android_assets')
+
+  return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist)
+
+
+def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None):
+  """Resolves all groups and trims dependency branches that we never want.
+
+  See _DepsFromPaths.
+
+  |blocklist| if passed, are the types of direct dependencies we do not care
+  about (i.e. tips of branches that we wish to prune).
+
+  |allowlist| if passed, are the only types of direct dependencies we care
+  about (i.e. we wish to prune all other branches that do not start from one of
+  these).
+  """
+  configs = [GetDepConfig(p) for p in dep_paths]
+  groups = DepsOfType('group', configs)
+  configs = _ResolveGroups(configs)
+  configs += groups
+  if blocklist:
+    configs = [c for c in configs if c['type'] not in blocklist]
+  if allowlist:
+    configs = [c for c in configs if c['type'] in allowlist]
+
+  return Deps([c['path'] for c in configs])
+
+
+def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
+  ret = []
+  with open(runtime_deps_file) as f:
+    for line in f:
+      line = line.rstrip()
+      if not line.endswith('.so'):
+        continue
+      # Only unstripped .so files are listed in runtime deps.
+      # Convert to the stripped .so by going up one directory.
+      ret.append(os.path.normpath(line.replace('lib.unstripped/', '')))
+  ret.reverse()
+  return ret
+
+
+def _CreateJavaLibrariesList(library_paths):
+  """Returns a java literal array with the "base" library names:
+  e.g. libfoo.so -> foo
+  """
+  names = ['"%s"' % os.path.basename(s)[3:-3] for s in library_paths]
+  return ('{%s}' % ','.join(sorted(set(names))))
+
+
+def _CreateJavaLocaleListFromAssets(assets, locale_paks):
+  """Returns a java literal array from a list of locale assets.
+
+  Args:
+    assets: A list of all APK asset paths in the form 'src:dst'
+    locale_paks: A list of asset paths that correponds to the locale pak
+      files of interest. Each |assets| entry will have its 'dst' part matched
+      against it to determine if they are part of the result.
+  Returns:
+    A string that is a Java source literal array listing the locale names
+    of the corresponding asset files, without directory or .pak suffix.
+    E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }'
+  """
+  assets_paths = [a.split(':')[1] for a in assets]
+  locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks]
+  return '{%s}' % ','.join('"%s"' % l for l in sorted(locales))
+
+
+def _AddJarMapping(jar_to_target, configs):
+  for config in configs:
+    jar = config.get('unprocessed_jar_path')
+    if jar:
+      jar_to_target[jar] = config['gn_target']
+    for jar in config.get('extra_classpath_jars', []):
+      jar_to_target[jar] = config['gn_target']
+
+
+def _CompareClasspathPriority(dep):
+  return 1 if dep.get('low_classpath_priority') else 0
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option('--gn-target', help='GN label for this target')
+  parser.add_option(
+      '--deps-configs',
+      help='GN-list of dependent build_config files.')
+  parser.add_option(
+      '--annotation-processor-configs',
+      help='GN-list of build_config files for annotation processors.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+  parser.add_option('--resource-dirs', action='append', default=[],
+                    help='GYP-list of resource dirs')
+  parser.add_option(
+      '--res-sources-path',
+      help='Path to file containing a list of paths to resources.')
+  parser.add_option(
+      '--resource-overlay',
+      action='store_true',
+      help='Whether resources passed in via --resources-zip should override '
+      'resources with the same name')
+  parser.add_option(
+      '--recursive-resource-deps',
+      action='store_true',
+      help='Whether deps should be walked recursively to find resource deps.')
+
+  # android_assets options
+  parser.add_option('--asset-sources', help='List of asset sources.')
+  parser.add_option('--asset-renaming-sources',
+                    help='List of asset sources with custom destinations.')
+  parser.add_option('--asset-renaming-destinations',
+                    help='List of asset custom destinations.')
+  parser.add_option('--disable-asset-compression', action='store_true',
+                    help='Whether to disable asset compression.')
+  parser.add_option('--treat-as-locale-paks', action='store_true',
+      help='Consider the assets as locale paks in BuildConfig.java')
+
+  # java library options
+
+  parser.add_option('--public-deps-configs',
+                    help='GN list of config files of deps which are exposed as '
+                    'part of the target\'s public API.')
+  parser.add_option(
+      '--ignore-dependency-public-deps',
+      action='store_true',
+      help='If true, \'public_deps\' will not be collected from the current '
+      'target\'s direct deps.')
+  parser.add_option('--aar-path', help='Path to containing .aar file.')
+  parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
+  parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
+  parser.add_option('--unprocessed-jar-path',
+      help='Path to the .jar to use for javac classpath purposes.')
+  parser.add_option(
+      '--interface-jar-path',
+      help='Path to the interface .jar to use for javac classpath purposes.')
+  parser.add_option('--is-prebuilt', action='store_true',
+                    help='Whether the jar was compiled or pre-compiled.')
+  parser.add_option('--java-sources-file', help='Path to .sources file')
+  parser.add_option('--bundled-srcjars',
+      help='GYP-list of .srcjars that have been included in this java_library.')
+  parser.add_option('--supports-android', action='store_true',
+      help='Whether this library supports running on the Android platform.')
+  parser.add_option('--requires-android', action='store_true',
+      help='Whether this library requires running on the Android platform.')
+  parser.add_option('--bypass-platform-checks', action='store_true',
+      help='Bypass checks for support/require Android platform.')
+  parser.add_option('--extra-classpath-jars',
+      help='GYP-list of .jar files to include on the classpath when compiling, '
+           'but not to include in the final binary.')
+  parser.add_option(
+      '--low-classpath-priority',
+      action='store_true',
+      help='Indicates that the library should be placed at the end of the '
+      'classpath.')
+  parser.add_option(
+      '--mergeable-android-manifests',
+      help='GN-list of AndroidManifest.xml to include in manifest merging.')
+  parser.add_option('--gradle-treat-as-prebuilt', action='store_true',
+      help='Whether this library should be treated as a prebuilt library by '
+           'generate_gradle.py.')
+  parser.add_option('--main-class',
+      help='Main class for java_binary or java_annotation_processor targets.')
+  parser.add_option('--java-resources-jar-path',
+                    help='Path to JAR that contains java resources. Everything '
+                    'from this JAR except meta-inf/ content and .class files '
+                    'will be added to the final APK.')
+  parser.add_option(
+      '--non-chromium-code',
+      action='store_true',
+      help='True if a java library is not chromium code, used for lint.')
+
+  # android library options
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # native library options
+  parser.add_option('--shared-libraries-runtime-deps',
+                    help='Path to file containing runtime deps for shared '
+                         'libraries.')
+  parser.add_option(
+      '--loadable-modules',
+      action='append',
+      help='GN-list of native libraries for primary '
+      'android-abi. Can be specified multiple times.',
+      default=[])
+  parser.add_option('--secondary-abi-shared-libraries-runtime-deps',
+                    help='Path to file containing runtime deps for secondary '
+                         'abi shared libraries.')
+  parser.add_option(
+      '--secondary-abi-loadable-modules',
+      action='append',
+      help='GN-list of native libraries for secondary '
+      'android-abi. Can be specified multiple times.',
+      default=[])
+  parser.add_option(
+      '--native-lib-placeholders',
+      action='append',
+      help='GN-list of native library placeholders to add.',
+      default=[])
+  parser.add_option(
+      '--secondary-native-lib-placeholders',
+      action='append',
+      help='GN-list of native library placeholders to add '
+      'for the secondary android-abi.',
+      default=[])
+  parser.add_option('--uncompress-shared-libraries', default=False,
+                    action='store_true',
+                    help='Whether to store native libraries uncompressed')
+  parser.add_option(
+      '--library-always-compress',
+      help='The list of library files that we always compress.')
+  parser.add_option(
+      '--library-renames',
+      default=[],
+      help='The list of library files that we prepend crazy. to their names.')
+
+  # apk options
+  parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+  parser.add_option('--incremental-apk-path',
+                    help="Path to the target's incremental apk output.")
+  parser.add_option('--incremental-install-json-path',
+                    help="Path to the target's generated incremental install "
+                    "json.")
+  parser.add_option(
+      '--tested-apk-config',
+      help='Path to the build config of the tested apk (for an instrumentation '
+      'test apk).')
+  parser.add_option(
+      '--proguard-enabled',
+      action='store_true',
+      help='Whether proguard is enabled for this apk or bundle module.')
+  parser.add_option(
+      '--proguard-configs',
+      help='GN-list of proguard flag files to use in final apk.')
+  parser.add_option(
+      '--proguard-mapping-path', help='Path to jar created by ProGuard step')
+
+  # apk options that are static library specific
+  parser.add_option(
+      '--static-library-dependent-configs',
+      help='GN list of .build_configs of targets that use this target as a '
+      'static library.')
+
+  # options shared between android_resources and apk targets
+  parser.add_option('--r-text-path', help='Path to target\'s R.txt file.')
+
+  parser.add_option('--fail',
+      help='GN-list of error message lines to fail with.')
+
+  parser.add_option('--final-dex-path',
+                    help='Path to final input classes.dex (or classes.zip) to '
+                    'use in final apk.')
+  parser.add_option('--res-size-info', help='Path to .ap_.info')
+  parser.add_option('--apk-proto-resources',
+                    help='Path to resources compiled in protocol buffer format '
+                         ' for this apk.')
+  parser.add_option(
+      '--module-pathmap-path',
+      help='Path to pathmap file for resource paths in a bundle module.')
+  parser.add_option(
+      '--base-allowlist-rtxt-path',
+      help='Path to R.txt file for the base resources allowlist.')
+  parser.add_option(
+      '--is-base-module',
+      action='store_true',
+      help='Specifies that this module is a base module for some app bundle.')
+
+  parser.add_option('--generate-markdown-format-doc', action='store_true',
+                    help='Dump the Markdown .build_config format documentation '
+                    'then exit immediately.')
+
+  parser.add_option(
+      '--base-module-build-config',
+      help='Path to the base module\'s build config '
+      'if this is a feature module.')
+
+  parser.add_option(
+      '--module-build-configs',
+      help='For bundles, the paths of all non-async module .build_configs '
+      'for modules that are part of the bundle.')
+
+  parser.add_option('--version-name', help='Version name for this APK.')
+  parser.add_option('--version-code', help='Version code for this APK.')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  if options.generate_markdown_format_doc:
+    doc_lines = _ExtractMarkdownDocumentation(__doc__)
+    for line in doc_lines:
+      print(line)
+    return 0
+
+  if options.fail:
+    parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+
+  lib_options = ['unprocessed_jar_path', 'interface_jar_path']
+  device_lib_options = ['device_jar_path', 'dex_path']
+  required_options_map = {
+      'android_apk': ['build_config'] + lib_options + device_lib_options,
+      'android_app_bundle_module':
+      ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
+      device_lib_options,
+      'android_assets': ['build_config'],
+      'android_resources': ['build_config', 'resources_zip'],
+      'dist_aar': ['build_config'],
+      'dist_jar': ['build_config'],
+      'group': ['build_config'],
+      'java_annotation_processor': ['build_config', 'main_class'],
+      'java_binary': ['build_config'],
+      'java_library': ['build_config', 'host_jar_path'] + lib_options,
+      'junit_binary': ['build_config'],
+      'system_java_library': ['build_config', 'unprocessed_jar_path'],
+      'android_app_bundle': ['build_config', 'module_build_configs'],
+  }
+  required_options = required_options_map.get(options.type)
+  if not required_options:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  if options.type != 'android_app_bundle_module':
+    if options.apk_proto_resources:
+      raise Exception('--apk-proto-resources can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.module_pathmap_path:
+      raise Exception('--module-pathmap-path can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.base_allowlist_rtxt_path:
+      raise Exception('--base-allowlist-rtxt-path can only be used with '
+                      '--type=android_app_bundle_module')
+    if options.is_base_module:
+      raise Exception('--is-base-module can only be used with '
+                      '--type=android_app_bundle_module')
+
+  is_apk_or_module_target = options.type in ('android_apk',
+      'android_app_bundle_module')
+
+  if not is_apk_or_module_target:
+    if options.uncompress_shared_libraries:
+      raise Exception('--uncompressed-shared-libraries can only be used '
+                      'with --type=android_apk or '
+                      '--type=android_app_bundle_module')
+    if options.library_always_compress:
+      raise Exception(
+          '--library-always-compress can only be used with --type=android_apk '
+          'or --type=android_app_bundle_module')
+    if options.library_renames:
+      raise Exception(
+          '--library-renames can only be used with --type=android_apk or '
+          '--type=android_app_bundle_module')
+
+  if options.device_jar_path and not options.dex_path:
+    raise Exception('java_library that supports Android requires a dex path.')
+  if any(getattr(options, x) for x in lib_options):
+    for attr in lib_options:
+      if not getattr(options, attr):
+        raise('Expected %s to be set.' % attr)
+
+  if options.requires_android and not options.supports_android:
+    raise Exception(
+        '--supports-android is required when using --requires-android')
+
+  is_java_target = options.type in (
+      'java_binary', 'junit_binary', 'java_annotation_processor',
+      'java_library', 'android_apk', 'dist_aar', 'dist_jar',
+      'system_java_library', 'android_app_bundle_module')
+
+  is_static_library_dex_provider_target = (
+      options.static_library_dependent_configs and options.proguard_enabled)
+  if is_static_library_dex_provider_target:
+    if options.type != 'android_apk':
+      raise Exception(
+          '--static-library-dependent-configs only supports --type=android_apk')
+  options.static_library_dependent_configs = build_utils.ParseGnList(
+      options.static_library_dependent_configs)
+  static_library_dependent_configs_by_path = {
+      p: GetDepConfig(p)
+      for p in options.static_library_dependent_configs
+  }
+
+  deps_configs_paths = build_utils.ParseGnList(options.deps_configs)
+  deps = _DepsFromPaths(deps_configs_paths,
+                        options.type,
+                        recursive_resource_deps=options.recursive_resource_deps)
+  processor_deps = _DepsFromPaths(
+      build_utils.ParseGnList(options.annotation_processor_configs or ''),
+      options.type, filter_root_targets=False)
+
+  all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths() +
+                list(static_library_dependent_configs_by_path))
+
+  if options.recursive_resource_deps:
+    # Include java_library targets since changes to these targets can remove
+    # resource deps from the build, which would require rebuilding this target's
+    # build config file: crbug.com/1168655.
+    recursive_java_deps = _DepsFromPathsWithFilters(
+        GetAllDepsConfigsInOrder(deps_configs_paths),
+        allowlist=['java_library'])
+    all_inputs.extend(recursive_java_deps.AllConfigPaths())
+
+  direct_deps = deps.Direct()
+  system_library_deps = deps.Direct('system_java_library')
+  all_deps = deps.All()
+  all_library_deps = deps.All('java_library')
+  all_resources_deps = deps.All('android_resources')
+
+  if options.type == 'java_library':
+    java_library_deps = _DepsFromPathsWithFilters(
+        deps_configs_paths, allowlist=['android_resources'])
+    # for java libraries, we only care about resources that are directly
+    # reachable without going through another java_library.
+    all_resources_deps = java_library_deps.All('android_resources')
+  if options.type == 'android_resources' and options.recursive_resource_deps:
+    # android_resources targets that want recursive resource deps also need to
+    # collect package_names from all library deps. This ensures the R.java files
+    # for these libraries will get pulled in along with the resources.
+    android_resources_library_deps = _DepsFromPathsWithFilters(
+        deps_configs_paths, allowlist=['java_library']).All('java_library')
+  if is_apk_or_module_target:
+    # android_resources deps which had recursive_resource_deps set should not
+    # have the manifests from the recursively collected deps added to this
+    # module. This keeps the manifest declarations in the child DFMs, since they
+    # will have the Java implementations.
+    def ExcludeRecursiveResourcesDeps(config):
+      return not config.get('includes_recursive_resources', False)
+
+    extra_manifest_deps = [
+        GetDepConfig(p) for p in GetAllDepsConfigsInOrder(
+            deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps)
+    ]
+
+  base_module_build_config = None
+  if options.base_module_build_config:
+    with open(options.base_module_build_config, 'r') as f:
+      base_module_build_config = json.load(f)
+
+  # Initialize some common config.
+  # Any value that needs to be queryable by dependents must go within deps_info.
+  config = {
+      'deps_info': {
+          'name': os.path.basename(options.build_config),
+          'path': options.build_config,
+          'type': options.type,
+          'gn_target': options.gn_target,
+          'deps_configs': [d['path'] for d in direct_deps],
+          'chromium_code': not options.non_chromium_code,
+      },
+      # Info needed only by generate_gradle.py.
+      'gradle': {}
+  }
+  deps_info = config['deps_info']
+  gradle = config['gradle']
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_config = tested_apk_deps.Direct()[0]
+    gradle['apk_under_test'] = tested_apk_config['name']
+
+  if options.type == 'android_app_bundle_module':
+    deps_info['is_base_module'] = bool(options.is_base_module)
+
+  # Required for generating gradle files.
+  if options.type == 'java_library':
+    deps_info['is_prebuilt'] = bool(options.is_prebuilt)
+    deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+
+  if options.android_manifest:
+    deps_info['android_manifest'] = options.android_manifest
+
+  if options.bundled_srcjars:
+    deps_info['bundled_srcjars'] = build_utils.ParseGnList(
+        options.bundled_srcjars)
+
+  if options.java_sources_file:
+    deps_info['java_sources_file'] = options.java_sources_file
+
+  if is_java_target:
+    if options.bundled_srcjars:
+      gradle['bundled_srcjars'] = deps_info['bundled_srcjars']
+
+    gradle['dependent_android_projects'] = []
+    gradle['dependent_java_projects'] = []
+    gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
+
+    if options.main_class:
+      deps_info['main_class'] = options.main_class
+
+    for c in deps.GradleLibraryProjectDeps():
+      if c['requires_android']:
+        gradle['dependent_android_projects'].append(c['path'])
+      else:
+        gradle['dependent_java_projects'].append(c['path'])
+
+  if options.r_text_path:
+    deps_info['r_text_path'] = options.r_text_path
+
+  # TODO(tiborg): Remove creation of JNI info for type group and java_library
+  # once we can generate the JNI registration based on APK / module targets as
+  # opposed to groups and libraries.
+  if is_apk_or_module_target or options.type in (
+      'group', 'java_library', 'junit_binary'):
+    deps_info['jni'] = {}
+    all_java_sources = [c['java_sources_file'] for c in all_library_deps
+                        if 'java_sources_file' in c]
+    if options.java_sources_file:
+      all_java_sources.append(options.java_sources_file)
+
+    if options.apk_proto_resources:
+      deps_info['proto_resources_path'] = options.apk_proto_resources
+
+    deps_info['version_name'] = options.version_name
+    deps_info['version_code'] = options.version_code
+    if options.module_pathmap_path:
+      deps_info['module_pathmap_path'] = options.module_pathmap_path
+    else:
+      # Ensure there is an entry, even if it is empty, for modules
+      # that have not enabled resource path shortening. Otherwise
+      # build_utils.ExpandFileArgs fails.
+      deps_info['module_pathmap_path'] = ''
+
+    if options.base_allowlist_rtxt_path:
+      deps_info['base_allowlist_rtxt_path'] = options.base_allowlist_rtxt_path
+    else:
+      # Ensure there is an entry, even if it is empty, for modules
+      # that don't need such a allowlist.
+      deps_info['base_allowlist_rtxt_path'] = ''
+
+  if is_java_target:
+    deps_info['requires_android'] = bool(options.requires_android)
+    deps_info['supports_android'] = bool(options.supports_android)
+
+    if not options.bypass_platform_checks:
+      deps_require_android = (all_resources_deps +
+          [d['name'] for d in all_library_deps if d['requires_android']])
+      deps_not_support_android = (
+          [d['name'] for d in all_library_deps if not d['supports_android']])
+
+      if deps_require_android and not options.requires_android:
+        raise Exception('Some deps require building for the Android platform: '
+            + str(deps_require_android))
+
+      if deps_not_support_android and options.supports_android:
+        raise Exception('Not all deps support the Android platform: '
+            + str(deps_not_support_android))
+
+  if is_apk_or_module_target or options.type == 'dist_jar':
+    all_dex_files = [c['dex_path'] for c in all_library_deps]
+
+  if is_java_target:
+    # Classpath values filled in below (after applying tested_apk_config).
+    config['javac'] = {}
+    if options.aar_path:
+      deps_info['aar_path'] = options.aar_path
+    if options.unprocessed_jar_path:
+      deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
+      deps_info['interface_jar_path'] = options.interface_jar_path
+    if options.public_deps_configs:
+      deps_info['public_deps_configs'] = build_utils.ParseGnList(
+          options.public_deps_configs)
+    if options.device_jar_path:
+      deps_info['device_jar_path'] = options.device_jar_path
+    if options.host_jar_path:
+      deps_info['host_jar_path'] = options.host_jar_path
+    if options.dex_path:
+      deps_info['dex_path'] = options.dex_path
+      if is_apk_or_module_target:
+        all_dex_files.append(options.dex_path)
+    if options.low_classpath_priority:
+      deps_info['low_classpath_priority'] = True
+    if options.type == 'android_apk':
+      deps_info['apk_path'] = options.apk_path
+      deps_info['incremental_apk_path'] = options.incremental_apk_path
+      deps_info['incremental_install_json_path'] = (
+          options.incremental_install_json_path)
+
+  if options.type == 'android_assets':
+    all_asset_sources = []
+    if options.asset_renaming_sources:
+      all_asset_sources.extend(
+          build_utils.ParseGnList(options.asset_renaming_sources))
+    if options.asset_sources:
+      all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+
+    deps_info['assets'] = {
+        'sources': all_asset_sources
+    }
+    if options.asset_renaming_destinations:
+      deps_info['assets']['outputs'] = (
+          build_utils.ParseGnList(options.asset_renaming_destinations))
+    if options.disable_asset_compression:
+      deps_info['assets']['disable_compression'] = True
+    if options.treat_as_locale_paks:
+      deps_info['assets']['treat_as_locale_paks'] = True
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.resource_overlay:
+      deps_info['resource_overlay'] = True
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.android_manifest:
+      manifest = AndroidManifest(options.android_manifest)
+      deps_info['package_name'] = manifest.GetPackageName()
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+    deps_info['res_sources_path'] = ''
+    if options.res_sources_path:
+      deps_info['res_sources_path'] = options.res_sources_path
+
+  if options.requires_android and options.type == 'java_library':
+    # Used to strip out R.class for android_prebuilt()s.
+    config['javac']['resource_packages'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c
+    ]
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+
+  if options.type in ('android_resources', 'android_apk', 'junit_binary',
+                      'dist_aar', 'android_app_bundle_module', 'java_library'):
+    dependency_zips = []
+    dependency_zip_overlays = []
+    for c in all_resources_deps:
+      if not c['resources_zip']:
+        continue
+
+      dependency_zips.append(c['resources_zip'])
+      if c.get('resource_overlay'):
+        dependency_zip_overlays.append(c['resources_zip'])
+
+    extra_package_names = []
+
+    if options.type != 'android_resources':
+      extra_package_names = [
+          c['package_name'] for c in all_resources_deps if 'package_name' in c
+      ]
+
+      # android_resources targets which specified recursive_resource_deps may
+      # have extra_package_names.
+      for resources_dep in all_resources_deps:
+        extra_package_names.extend(resources_dep['extra_package_names'])
+
+      # In final types (i.e. apks and modules) that create real R.java files,
+      # they must collect package names from java_libraries as well.
+      # https://crbug.com/1073476
+      if options.type != 'java_library':
+        extra_package_names.extend([
+            c['package_name'] for c in all_library_deps if 'package_name' in c
+        ])
+    elif options.recursive_resource_deps:
+      # Pull extra_package_names from library deps if recursive resource deps
+      # are required.
+      extra_package_names = [
+          c['package_name'] for c in android_resources_library_deps
+          if 'package_name' in c
+      ]
+      config['deps_info']['includes_recursive_resources'] = True
+
+    if options.type in ('dist_aar', 'java_library'):
+      r_text_files = [
+          c['r_text_path'] for c in all_resources_deps if 'r_text_path' in c
+      ]
+      deps_info['dependency_r_txt_files'] = r_text_files
+
+    # For feature modules, remove any resources that already exist in the base
+    # module.
+    if base_module_build_config:
+      dependency_zips = [
+          c for c in dependency_zips
+          if c not in base_module_build_config['deps_info']['dependency_zips']
+      ]
+      dependency_zip_overlays = [
+          c for c in dependency_zip_overlays if c not in
+          base_module_build_config['deps_info']['dependency_zip_overlays']
+      ]
+      extra_package_names = [
+          c for c in extra_package_names if c not in
+          base_module_build_config['deps_info']['extra_package_names']
+      ]
+
+    if options.type == 'android_apk' and options.tested_apk_config:
+      config['deps_info']['arsc_package_name'] = (
+          tested_apk_config['package_name'])
+      # We should not shadow the actual R.java files of the apk_under_test by
+      # creating new R.java files with the same package names in the tested apk.
+      extra_package_names = [
+          package for package in extra_package_names
+          if package not in tested_apk_config['extra_package_names']
+      ]
+    if options.res_size_info:
+      config['deps_info']['res_size_info'] = options.res_size_info
+
+    config['deps_info']['dependency_zips'] = dependency_zips
+    config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays
+    config['deps_info']['extra_package_names'] = extra_package_names
+
+  # These are .jars to add to javac classpath but not to runtime classpath.
+  extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+  if extra_classpath_jars:
+    deps_info['extra_classpath_jars'] = extra_classpath_jars
+
+  mergeable_android_manifests = build_utils.ParseGnList(
+      options.mergeable_android_manifests)
+  if mergeable_android_manifests:
+    deps_info['mergeable_android_manifests'] = mergeable_android_manifests
+
+  extra_proguard_classpath_jars = []
+  proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+  if proguard_configs:
+    # Make a copy of |proguard_configs| since it's mutated below.
+    deps_info['proguard_configs'] = list(proguard_configs)
+
+
+  if is_java_target:
+    if options.ignore_dependency_public_deps:
+      classpath_direct_deps = deps.Direct()
+      classpath_direct_library_deps = deps.Direct('java_library')
+    else:
+      classpath_direct_deps = deps.DirectAndChildPublicDeps()
+      classpath_direct_library_deps = deps.DirectAndChildPublicDeps(
+          'java_library')
+
+    # The classpath used to compile this target when annotation processors are
+    # present.
+    javac_classpath = set(c['unprocessed_jar_path']
+                          for c in classpath_direct_library_deps)
+    # The classpath used to compile this target when annotation processors are
+    # not present. These are also always used to know when a target needs to be
+    # rebuilt.
+    javac_interface_classpath = set(c['interface_jar_path']
+                                    for c in classpath_direct_library_deps)
+
+    # Preserve order of |all_library_deps|. Move low priority libraries to the
+    # end of the classpath.
+    all_library_deps_sorted_for_classpath = sorted(
+        all_library_deps[::-1], key=_CompareClasspathPriority)
+
+    # The classpath used for bytecode-rewritting.
+    javac_full_classpath = OrderedSet.fromkeys(
+        c['unprocessed_jar_path']
+        for c in all_library_deps_sorted_for_classpath)
+    # The classpath used for error prone.
+    javac_full_interface_classpath = OrderedSet.fromkeys(
+        c['interface_jar_path'] for c in all_library_deps_sorted_for_classpath)
+
+    # Adding base module to classpath to compile against its R.java file
+    if base_module_build_config:
+      javac_full_classpath.add(
+          base_module_build_config['deps_info']['unprocessed_jar_path'])
+      javac_full_interface_classpath.add(
+          base_module_build_config['deps_info']['interface_jar_path'])
+      # Turbine now compiles headers against only the direct classpath, so the
+      # base module's interface jar must be on the direct interface classpath.
+      javac_interface_classpath.add(
+          base_module_build_config['deps_info']['interface_jar_path'])
+
+    for dep in classpath_direct_deps:
+      if 'extra_classpath_jars' in dep:
+        javac_classpath.update(dep['extra_classpath_jars'])
+        javac_interface_classpath.update(dep['extra_classpath_jars'])
+    for dep in all_deps:
+      if 'extra_classpath_jars' in dep:
+        javac_full_classpath.update(dep['extra_classpath_jars'])
+        javac_full_interface_classpath.update(dep['extra_classpath_jars'])
+
+    # TODO(agrieve): Might be less confusing to fold these into bootclasspath.
+    # Deps to add to the compile-time classpath (but not the runtime classpath).
+    # These are jars specified by input_jars_paths that almost never change.
+    # Just add them directly to all the classpaths.
+    if options.extra_classpath_jars:
+      javac_classpath.update(extra_classpath_jars)
+      javac_interface_classpath.update(extra_classpath_jars)
+      javac_full_classpath.update(extra_classpath_jars)
+      javac_full_interface_classpath.update(extra_classpath_jars)
+
+  if is_java_target or options.type == 'android_app_bundle':
+    # The classpath to use to run this target (or as an input to ProGuard).
+    device_classpath = []
+    if is_java_target and options.device_jar_path:
+      device_classpath.append(options.device_jar_path)
+    device_classpath.extend(
+        c.get('device_jar_path') for c in all_library_deps
+        if c.get('device_jar_path'))
+    if options.type == 'android_app_bundle':
+      for d in deps.Direct('android_app_bundle_module'):
+        device_classpath.extend(c for c in d.get('device_classpath', [])
+                                if c not in device_classpath)
+
+  if options.type in ('dist_jar', 'java_binary', 'junit_binary'):
+    # The classpath to use to run this target.
+    host_classpath = []
+    if options.host_jar_path:
+      host_classpath.append(options.host_jar_path)
+    host_classpath.extend(c['host_jar_path'] for c in all_library_deps)
+    deps_info['host_classpath'] = host_classpath
+
+  # We allow lint to be run on android_apk targets, so we collect lint
+  # artifacts for them.
+  # We allow lint to be run on android_app_bundle targets, so we need to
+  # collect lint artifacts for the android_app_bundle_module targets that the
+  # bundle includes. Different android_app_bundle targets may include different
+  # android_app_bundle_module targets, so the bundle needs to be able to
+  # de-duplicate these lint artifacts.
+  if options.type in ('android_app_bundle_module', 'android_apk'):
+    # Collect all sources and resources at the apk/bundle_module level.
+    lint_aars = set()
+    lint_srcjars = set()
+    lint_java_sources = set()
+    lint_resource_sources = set()
+    lint_resource_zips = set()
+
+    if options.java_sources_file:
+      lint_java_sources.add(options.java_sources_file)
+    if options.bundled_srcjars:
+      lint_srcjars.update(deps_info['bundled_srcjars'])
+    for c in all_library_deps:
+      if c['chromium_code'] and c['requires_android']:
+        if 'java_sources_file' in c:
+          lint_java_sources.add(c['java_sources_file'])
+        lint_srcjars.update(c['bundled_srcjars'])
+      if 'aar_path' in c:
+        lint_aars.add(c['aar_path'])
+
+    if options.res_sources_path:
+      lint_resource_sources.add(options.res_sources_path)
+    if options.resources_zip:
+      lint_resource_zips.add(options.resources_zip)
+    for c in all_resources_deps:
+      if c['chromium_code']:
+        # Prefer res_sources_path to resources_zips so that lint errors have
+        # real paths and to avoid needing to extract during lint.
+        if c['res_sources_path']:
+          lint_resource_sources.add(c['res_sources_path'])
+        else:
+          lint_resource_zips.add(c['resources_zip'])
+
+    deps_info['lint_aars'] = sorted(lint_aars)
+    deps_info['lint_srcjars'] = sorted(lint_srcjars)
+    deps_info['lint_java_sources'] = sorted(lint_java_sources)
+    deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+    deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+    deps_info['lint_extra_android_manifests'] = []
+
+    if options.type == 'android_apk':
+      assert options.android_manifest, 'Android APKs must define a manifest'
+      deps_info['lint_android_manifest'] = options.android_manifest
+
+  if options.type == 'android_app_bundle':
+    module_configs = [
+        GetDepConfig(c)
+        for c in build_utils.ParseGnList(options.module_build_configs)
+    ]
+    jni_all_source = set()
+    lint_aars = set()
+    lint_srcjars = set()
+    lint_java_sources = set()
+    lint_resource_sources = set()
+    lint_resource_zips = set()
+    lint_extra_android_manifests = set()
+    for c in module_configs:
+      if c['is_base_module']:
+        assert 'base_module_config' not in deps_info, (
+            'Must have exactly 1 base module!')
+        deps_info['base_module_config'] = c['path']
+        # Use the base module's android manifest for linting.
+        deps_info['lint_android_manifest'] = c['android_manifest']
+      else:
+        lint_extra_android_manifests.add(c['android_manifest'])
+      jni_all_source.update(c['jni']['all_source'])
+      lint_aars.update(c['lint_aars'])
+      lint_srcjars.update(c['lint_srcjars'])
+      lint_java_sources.update(c['lint_java_sources'])
+      lint_resource_sources.update(c['lint_resource_sources'])
+      lint_resource_zips.update(c['lint_resource_zips'])
+    deps_info['jni'] = {'all_source': sorted(jni_all_source)}
+    deps_info['lint_aars'] = sorted(lint_aars)
+    deps_info['lint_srcjars'] = sorted(lint_srcjars)
+    deps_info['lint_java_sources'] = sorted(lint_java_sources)
+    deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+    deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+    deps_info['lint_extra_android_manifests'] = sorted(
+        lint_extra_android_manifests)
+
+  # Map configs to classpath entries that should be included in their final dex.
+  classpath_entries_by_owning_config = collections.defaultdict(list)
+  extra_main_r_text_files = []
+  if is_static_library_dex_provider_target:
+    # Map classpath entries to configs that include them in their classpath.
+    configs_by_classpath_entry = collections.defaultdict(list)
+    static_lib_jar_paths = {}
+    for config_path, dep_config in (sorted(
+        static_library_dependent_configs_by_path.items())):
+      # For bundles, only the jar path and jni sources of the base module
+      # are relevant for proguard. Should be updated when bundle feature
+      # modules support JNI.
+      base_config = dep_config
+      if dep_config['type'] == 'android_app_bundle':
+        base_config = GetDepConfig(dep_config['base_module_config'])
+      extra_main_r_text_files.append(base_config['r_text_path'])
+      static_lib_jar_paths[config_path] = base_config['device_jar_path']
+      proguard_configs.extend(dep_config['proguard_all_configs'])
+      extra_proguard_classpath_jars.extend(
+          dep_config['proguard_classpath_jars'])
+      all_java_sources.extend(base_config['jni']['all_source'])
+
+      # The srcjars containing the generated R.java files are excluded for APK
+      # targets the use static libraries, so we add them here to ensure the
+      # union of resource IDs are available in the static library APK.
+      for package in base_config['extra_package_names']:
+        if package not in extra_package_names:
+          extra_package_names.append(package)
+      for cp_entry in dep_config['device_classpath']:
+        configs_by_classpath_entry[cp_entry].append(config_path)
+
+    for cp_entry in device_classpath:
+      configs_by_classpath_entry[cp_entry].append(options.build_config)
+
+    for cp_entry, candidate_configs in configs_by_classpath_entry.items():
+      config_path = (candidate_configs[0]
+                     if len(candidate_configs) == 1 else options.build_config)
+      classpath_entries_by_owning_config[config_path].append(cp_entry)
+      device_classpath.append(cp_entry)
+
+    device_classpath = sorted(set(device_classpath))
+
+  deps_info['static_library_proguard_mapping_output_paths'] = sorted([
+      d['proguard_mapping_path']
+      for d in static_library_dependent_configs_by_path.values()
+  ])
+  deps_info['static_library_dependent_classpath_configs'] = {
+      path: sorted(set(classpath))
+      for path, classpath in classpath_entries_by_owning_config.items()
+  }
+  deps_info['extra_main_r_text_files'] = sorted(extra_main_r_text_files)
+
+  if is_apk_or_module_target or options.type in ('group', 'java_library',
+                                                 'junit_binary'):
+    deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+
+  system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
+  system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
+  if system_library_deps:
+    config['android'] = {}
+    config['android']['sdk_interface_jars'] = system_interface_jars
+    config['android']['sdk_jars'] = system_jars
+
+  if options.type in ('android_apk', 'dist_aar',
+      'dist_jar', 'android_app_bundle_module', 'android_app_bundle'):
+    for c in all_deps:
+      proguard_configs.extend(c.get('proguard_configs', []))
+      extra_proguard_classpath_jars.extend(c.get('extra_classpath_jars', []))
+    if options.type == 'android_app_bundle':
+      for c in deps.Direct('android_app_bundle_module'):
+        proguard_configs.extend(p for p in c.get('proguard_configs', []))
+    if options.type == 'android_app_bundle':
+      for d in deps.Direct('android_app_bundle_module'):
+        extra_proguard_classpath_jars.extend(
+            c for c in d.get('proguard_classpath_jars', [])
+            if c not in extra_proguard_classpath_jars)
+
+    if options.type == 'android_app_bundle':
+      deps_proguard_enabled = []
+      deps_proguard_disabled = []
+      for d in deps.Direct('android_app_bundle_module'):
+        if not d['device_classpath']:
+          # We don't care about modules that have no Java code for proguarding.
+          continue
+        if d['proguard_enabled']:
+          deps_proguard_enabled.append(d['name'])
+        else:
+          deps_proguard_disabled.append(d['name'])
+      if deps_proguard_enabled and deps_proguard_disabled:
+        raise Exception('Deps %s have proguard enabled while deps %s have '
+                        'proguard disabled' % (deps_proguard_enabled,
+                                               deps_proguard_disabled))
+    deps_info['proguard_enabled'] = bool(options.proguard_enabled)
+
+    if options.proguard_mapping_path:
+      deps_info['proguard_mapping_path'] = options.proguard_mapping_path
+
+  # The java code for an instrumentation test apk is assembled differently for
+  # ProGuard vs. non-ProGuard.
+  #
+  # Without ProGuard: Each library's jar is dexed separately and then combined
+  # into a single classes.dex. A test apk will include all dex files not already
+  # present in the apk-under-test. At runtime all test code lives in the test
+  # apk, and the program code lives in the apk-under-test.
+  #
+  # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs
+  # a single .jar, which is then dexed into a classes.dex. A test apk includes
+  # all jar files from the program and the tests because having them separate
+  # doesn't work with ProGuard's whole-program optimizations. Although the
+  # apk-under-test still has all of its code in its classes.dex, none of it is
+  # used at runtime because the copy of it within the test apk takes precidence.
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    if tested_apk_config['proguard_enabled']:
+      assert options.proguard_enabled, ('proguard must be enabled for '
+          'instrumentation apks if it\'s enabled for the tested apk.')
+      # Mutating lists, so no need to explicitly re-assign to dict.
+      proguard_configs.extend(
+          p for p in tested_apk_config['proguard_all_configs'])
+      extra_proguard_classpath_jars.extend(
+          p for p in tested_apk_config['proguard_classpath_jars'])
+      tested_apk_config = GetDepConfig(options.tested_apk_config)
+      deps_info['proguard_under_test_mapping'] = (
+          tested_apk_config['proguard_mapping_path'])
+    elif options.proguard_enabled:
+      # Not sure why you'd want to proguard the test apk when the under-test apk
+      # is not proguarded, but it's easy enough to support.
+      deps_info['proguard_under_test_mapping'] = ''
+
+    # Add all tested classes to the test's classpath to ensure that the test's
+    # java code is a superset of the tested apk's java code
+    device_classpath_extended = list(device_classpath)
+    device_classpath_extended.extend(
+        p for p in tested_apk_config['device_classpath']
+        if p not in device_classpath)
+    # Include in the classpath classes that are added directly to the apk under
+    # test (those that are not a part of a java_library).
+    javac_classpath.add(tested_apk_config['unprocessed_jar_path'])
+    javac_interface_classpath.add(tested_apk_config['interface_jar_path'])
+    javac_full_classpath.add(tested_apk_config['unprocessed_jar_path'])
+    javac_full_interface_classpath.add(tested_apk_config['interface_jar_path'])
+    javac_full_classpath.update(tested_apk_config['javac_full_classpath'])
+    javac_full_interface_classpath.update(
+        tested_apk_config['javac_full_interface_classpath'])
+
+    # Exclude .jar files from the test apk that exist within the apk under test.
+    tested_apk_library_deps = tested_apk_deps.All('java_library')
+    tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps}
+    all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files]
+    tested_apk_jar_files = set(tested_apk_config['device_classpath'])
+    device_classpath = [
+        p for p in device_classpath if p not in tested_apk_jar_files
+    ]
+
+  if options.type in ('android_apk', 'dist_aar', 'dist_jar',
+                      'android_app_bundle_module', 'android_app_bundle'):
+    deps_info['proguard_all_configs'] = sorted(set(proguard_configs))
+    deps_info['proguard_classpath_jars'] = sorted(
+        set(extra_proguard_classpath_jars))
+
+  # Dependencies for the final dex file of an apk.
+  if (is_apk_or_module_target or options.final_dex_path
+      or options.type == 'dist_jar'):
+    config['final_dex'] = {}
+    dex_config = config['final_dex']
+    dex_config['path'] = options.final_dex_path
+  if is_apk_or_module_target or options.type == 'dist_jar':
+    dex_config['all_dex_files'] = all_dex_files
+
+  if is_java_target:
+    config['javac']['classpath'] = sorted(javac_classpath)
+    config['javac']['interface_classpath'] = sorted(javac_interface_classpath)
+    # Direct() will be of type 'java_annotation_processor', and so not included
+    # in All('java_library').
+    # Annotation processors run as part of the build, so need host_jar_path.
+    config['javac']['processor_classpath'] = [
+        c['host_jar_path'] for c in processor_deps.Direct()
+        if c.get('host_jar_path')
+    ]
+    config['javac']['processor_classpath'] += [
+        c['host_jar_path'] for c in processor_deps.All('java_library')
+    ]
+    config['javac']['processor_classes'] = [
+        c['main_class'] for c in processor_deps.Direct()]
+    deps_info['javac_full_classpath'] = list(javac_full_classpath)
+    deps_info['javac_full_interface_classpath'] = list(
+        javac_full_interface_classpath)
+  elif options.type == 'android_app_bundle':
+    # bundles require javac_full_classpath to create .aab.jar.info and require
+    # javac_full_interface_classpath for lint.
+    javac_full_classpath = OrderedSet()
+    javac_full_interface_classpath = OrderedSet()
+    for d in deps.Direct('android_app_bundle_module'):
+      javac_full_classpath.update(d['javac_full_classpath'])
+      javac_full_interface_classpath.update(d['javac_full_interface_classpath'])
+      javac_full_classpath.add(d['unprocessed_jar_path'])
+      javac_full_interface_classpath.add(d['interface_jar_path'])
+    deps_info['javac_full_classpath'] = list(javac_full_classpath)
+    deps_info['javac_full_interface_classpath'] = list(
+        javac_full_interface_classpath)
+
+  if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
+                      'android_app_bundle'):
+    deps_info['device_classpath'] = device_classpath
+    if options.tested_apk_config:
+      deps_info['device_classpath_extended'] = device_classpath_extended
+
+  if options.type in ('android_apk', 'dist_jar'):
+    all_interface_jars = []
+    if options.interface_jar_path:
+      all_interface_jars.append(options.interface_jar_path)
+    all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
+
+    config['dist_jar'] = {
+      'all_interface_jars': all_interface_jars,
+    }
+
+  if is_apk_or_module_target:
+    manifest = AndroidManifest(options.android_manifest)
+    deps_info['package_name'] = manifest.GetPackageName()
+    if not options.tested_apk_config and manifest.GetInstrumentationElements():
+      # This must then have instrumentation only for itself.
+      manifest.CheckInstrumentationElements(manifest.GetPackageName())
+
+    library_paths = []
+    java_libraries_list = None
+    if options.shared_libraries_runtime_deps:
+      library_paths = _ExtractSharedLibsFromRuntimeDeps(
+          options.shared_libraries_runtime_deps)
+      java_libraries_list = _CreateJavaLibrariesList(library_paths)
+      all_inputs.append(options.shared_libraries_runtime_deps)
+
+    secondary_abi_library_paths = []
+    if options.secondary_abi_shared_libraries_runtime_deps:
+      secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
+          options.secondary_abi_shared_libraries_runtime_deps)
+      all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
+
+    native_library_placeholder_paths = build_utils.ParseGnList(
+        options.native_lib_placeholders)
+
+    secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+        options.secondary_native_lib_placeholders)
+
+    loadable_modules = build_utils.ParseGnList(options.loadable_modules)
+    secondary_abi_loadable_modules = build_utils.ParseGnList(
+        options.secondary_abi_loadable_modules)
+
+    config['native'] = {
+        'libraries':
+        library_paths,
+        'native_library_placeholders':
+        native_library_placeholder_paths,
+        'secondary_abi_libraries':
+        secondary_abi_library_paths,
+        'secondary_native_library_placeholders':
+        secondary_native_library_placeholder_paths,
+        'java_libraries_list':
+        java_libraries_list,
+        'uncompress_shared_libraries':
+        options.uncompress_shared_libraries,
+        'library_always_compress':
+        options.library_always_compress,
+        'library_renames':
+        options.library_renames,
+        'loadable_modules':
+        loadable_modules,
+        'secondary_abi_loadable_modules':
+        secondary_abi_loadable_modules,
+    }
+    config['assets'], config['uncompressed_assets'], locale_paks = (
+        _MergeAssets(deps.All('android_assets')))
+
+    deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets(
+        config['uncompressed_assets'], locale_paks)
+
+    config['extra_android_manifests'] = []
+    for c in extra_manifest_deps:
+      config['extra_android_manifests'].extend(
+          c.get('mergeable_android_manifests', []))
+
+    # Collect java resources
+    java_resources_jars = [d['java_resources_jar'] for d in all_library_deps
+                          if 'java_resources_jar' in d]
+    if options.tested_apk_config:
+      tested_apk_resource_jars = [d['java_resources_jar']
+                                  for d in tested_apk_library_deps
+                                  if 'java_resources_jar' in d]
+      java_resources_jars = [jar for jar in java_resources_jars
+                             if jar not in tested_apk_resource_jars]
+    config['java_resources_jars'] = java_resources_jars
+
+  if options.java_resources_jar_path:
+    deps_info['java_resources_jar'] = options.java_resources_jar_path
+
+  # DYNAMIC FEATURE MODULES:
+  # Make sure that dependencies that exist on the base module
+  # are not duplicated on the feature module.
+  if base_module_build_config:
+    base = base_module_build_config
+    RemoveObjDups(config, base, 'deps_info', 'device_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
+    RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
+    RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
+    RemoveObjDups(config, base, 'extra_android_manifests')
+
+  if is_java_target:
+    jar_to_target = {}
+    _AddJarMapping(jar_to_target, [deps_info])
+    _AddJarMapping(jar_to_target, all_deps)
+    if base_module_build_config:
+      _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+    if options.tested_apk_config:
+      _AddJarMapping(jar_to_target, [tested_apk_config])
+      for jar, target in zip(tested_apk_config['javac_full_classpath'],
+                             tested_apk_config['javac_full_classpath_targets']):
+        jar_to_target[jar] = target
+
+    # Used by bytecode_processor to give better error message when missing
+    # deps are found.
+    config['deps_info']['javac_full_classpath_targets'] = [
+        jar_to_target[x] for x in deps_info['javac_full_classpath']
+    ]
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, options.build_config,
+                             sorted(set(all_inputs)))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/gyp/write_build_config.pydeps b/src/build/android/gyp/write_build_config.pydeps
new file mode 100644
index 0000000..b1276bc
--- /dev/null
+++ b/src/build/android/gyp/write_build_config.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+write_build_config.py
diff --git a/src/build/android/gyp/write_native_libraries_java.py b/src/build/android/gyp/write_native_libraries_java.py
new file mode 100755
index 0000000..322b8b2
--- /dev/null
+++ b/src/build/android/gyp/write_native_libraries_java.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes list of native libraries to srcjar file."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+_NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+//     build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+    public static final int CPU_FAMILY_UNKNOWN = 0;
+    public static final int CPU_FAMILY_ARM = 1;
+    public static final int CPU_FAMILY_MIPS = 2;
+    public static final int CPU_FAMILY_X86 = 3;
+
+    // Set to true to enable the use of the Chromium Linker.
+    public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+    public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+    public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+    // This is the list of native libraries to be loaded (in the correct order)
+    // by LibraryLoader.java.
+    public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+    public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
+
+
+def _FormatLibraryName(library_name):
+  filename = os.path.split(library_name)[1]
+  assert filename.startswith('lib')
+  assert filename.endswith('.so')
+  # Remove lib prefix and .so suffix.
+  return '"%s"' % filename[3:-3]
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--final', action='store_true', help='Use final fields.')
+  parser.add_argument(
+      '--enable-chromium-linker',
+      action='store_true',
+      help='Enable Chromium linker.')
+  parser.add_argument(
+      '--load-library-from-apk',
+      action='store_true',
+      help='Load libaries from APK without uncompressing.')
+  parser.add_argument(
+      '--use-modern-linker', action='store_true', help='To use ModernLinker.')
+  parser.add_argument(
+      '--native-libraries-list', help='File with list of native libraries.')
+  parser.add_argument(
+      '--cpu-family',
+      choices={
+          'CPU_FAMILY_ARM', 'CPU_FAMILY_X86', 'CPU_FAMILY_MIPS',
+          'CPU_FAMILY_UNKNOWN'
+      },
+      required=True,
+      default='CPU_FAMILY_UNKNOWN',
+      help='CPU family.')
+  parser.add_argument(
+      '--main-component-library',
+      help='If used, the list of native libraries will only contain this '
+      'library. Dependencies are found in the library\'s "NEEDED" section.')
+
+  parser.add_argument(
+      '--output', required=True, help='Path to the generated srcjar file.')
+
+  options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+  assert (options.enable_chromium_linker or not options.load_library_from_apk)
+
+  native_libraries_list = []
+  if options.main_component_library:
+    native_libraries_list.append(
+        _FormatLibraryName(options.main_component_library))
+  elif options.native_libraries_list:
+    with open(options.native_libraries_list) as f:
+      for path in f:
+        path = path.strip()
+        native_libraries_list.append(_FormatLibraryName(path))
+
+  def bool_str(value):
+    if value:
+      return ' = true'
+    elif options.final:
+      return ' = false'
+    return ''
+
+  format_dict = {
+      'MAYBE_FINAL': 'final ' if options.final else '',
+      'USE_LINKER': bool_str(options.enable_chromium_linker),
+      'USE_LIBRARY_IN_ZIP_FILE': bool_str(options.load_library_from_apk),
+      'USE_MODERN_LINKER': bool_str(options.use_modern_linker),
+      'LIBRARIES': ','.join(native_libraries_list),
+      'CPU_FAMILY': options.cpu_family,
+  }
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f.name, 'w') as srcjar_file:
+      build_utils.AddToZipHermetic(
+          zip_file=srcjar_file,
+          zip_path='org/chromium/build/NativeLibraries.java',
+          data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
+
+  if options.depfile:
+    assert options.native_libraries_list
+    build_utils.WriteDepfile(options.depfile,
+                             options.output,
+                             inputs=[options.native_libraries_list])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/gyp/write_native_libraries_java.pydeps b/src/build/android/gyp/write_native_libraries_java.pydeps
new file mode 100644
index 0000000..f5176ef
--- /dev/null
+++ b/src/build/android/gyp/write_native_libraries_java.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+write_native_libraries_java.py
diff --git a/src/build/android/gyp/zip.py b/src/build/android/gyp/zip.py
new file mode 100755
index 0000000..6b40540
--- /dev/null
+++ b/src/build/android/gyp/zip.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Archives a set of files."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser(args)
+  parser.add_argument('--input-files', help='GN-list of files to zip.')
+  parser.add_argument(
+      '--input-files-base-dir',
+      help='Paths in the archive will be relative to this directory')
+  parser.add_argument('--input-zips', help='GN-list of zips to merge.')
+  parser.add_argument(
+      '--input-zips-excluded-globs',
+      help='GN-list of globs for paths to exclude.')
+  parser.add_argument('--output', required=True, help='Path to output archive.')
+  compress_group = parser.add_mutually_exclusive_group()
+  compress_group.add_argument(
+      '--compress', action='store_true', help='Compress entries')
+  compress_group.add_argument(
+      '--no-compress',
+      action='store_false',
+      dest='compress',
+      help='Do not compress entries')
+  build_utils.AddDepfileOption(parser)
+  options = parser.parse_args(args)
+
+  with build_utils.AtomicOutput(options.output) as f:
+    with zipfile.ZipFile(f.name, 'w') as out_zip:
+      depfile_deps = None
+      if options.input_files:
+        files = build_utils.ParseGnList(options.input_files)
+        build_utils.DoZip(
+            files,
+            out_zip,
+            base_dir=options.input_files_base_dir,
+            compress_fn=lambda _: options.compress)
+
+      if options.input_zips:
+        files = build_utils.ParseGnList(options.input_zips)
+        depfile_deps = files
+        path_transform = None
+        if options.input_zips_excluded_globs:
+          globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+          path_transform = (
+              lambda p: None if build_utils.MatchesGlob(p, globs) else p)
+        build_utils.MergeZips(
+            out_zip,
+            files,
+            path_transform=path_transform,
+            compress=options.compress)
+
+  # Depfile used only by dist_jar().
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             options.output,
+                             inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/gyp/zip.pydeps b/src/build/android/gyp/zip.pydeps
new file mode 100644
index 0000000..36affd1
--- /dev/null
+++ b/src/build/android/gyp/zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+zip.py
diff --git a/src/build/android/host_heartbeat.py b/src/build/android/host_heartbeat.py
new file mode 100755
index 0000000..4e11c5c
--- /dev/null
+++ b/src/build/android/host_heartbeat.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+import devil_chromium
+from devil.android import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+  devil_chromium.Initialize()
+
+  while True:
+    try:
+      devices = device_utils.DeviceUtils.HealthyDevices(denylist=None)
+      for d in devices:
+        d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+                          check_return=True)
+    except:
+      # Keep the heatbeat running bypassing all errors.
+      pass
+    time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/incremental_install/BUILD.gn b/src/build/android/incremental_install/BUILD.gn
new file mode 100644
index 0000000..8d26e96
--- /dev/null
+++ b/src/build/android/incremental_install/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("bootstrap_java") {
+  sources = [
+    "java/org/chromium/incrementalinstall/BootstrapApplication.java",
+    "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
+    "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java",
+    "java/org/chromium/incrementalinstall/LockFile.java",
+    "java/org/chromium/incrementalinstall/Reflect.java",
+    "java/org/chromium/incrementalinstall/SecondInstrumentation.java",
+  ]
+  jacoco_never_instrument = true
+  no_build_hooks = true
+}
+
+dist_dex("apk_dex") {
+  output = "$target_out_dir/apk.dex"
+  deps = [ ":bootstrap_java" ]
+}
diff --git a/src/build/android/incremental_install/README.md b/src/build/android/incremental_install/README.md
new file mode 100644
index 0000000..9a27b8c
--- /dev/null
+++ b/src/build/android/incremental_install/README.md
@@ -0,0 +1,83 @@
+# Incremental Install
+
+Incremental Install is a way of building & deploying an APK that tries to
+minimize the time it takes to make a change and see that change running on
+device. They work best with `is_component_build=true`, and do *not* require a
+rooted device.
+
+## Building
+
+Add the gn arg:
+
+    incremental_install = true
+
+This causes all apks to be built as incremental except for denylisted ones.
+
+## Running
+
+It is not enough to `adb install` them. You must use the generated wrapper
+script:
+
+    out/Debug/bin/your_apk run
+    out/Debug/bin/run_chrome_public_test_apk  # Automatically sets --fast-local-dev
+
+# How it Works
+
+## Overview
+
+The basic idea is to sideload .dex and .so files to `/data/local/tmp` rather
+than bundling them in the .apk. Then, when making a change, only the changed
+.dex / .so needs to be pushed to the device.
+
+Faster Builds:
+
+ * No `final_dex` step (where all .dex files are merged into one)
+ * No need to rebuild .apk for code-only changes (but required for resources)
+ * Apks sign faster because they are smaller.
+
+Faster Installs:
+
+ * The .apk is smaller, and so faster to verify.
+ * No need to run `adb install` for code-only changes.
+ * Only changed .so / .dex files are pushed. MD5s of existing on-device files
+   are cached on host computer.
+
+Slower Initial Runs:
+
+ * The first time you run an incremental .apk, the `DexOpt` needs to run on all
+   .dex files. This step is normally done during `adb install`, but is done on
+   start-up for incremental apks.
+   * DexOpt results are cached, so subsequent runs are faster.
+   * The slowdown varies significantly based on the Android version. Android O+
+     has almost no visible slow-down.
+
+Caveats:
+ * Isolated processes (on L+) are incompatible with incremental install. As a
+   work-around, isolated processes are disabled when building incremental apks.
+ * Android resources, assets, and `loadable_modules` are not sideloaded (they
+   remain in the apk), so builds & installs that modify any of these are not as
+   fast as those that modify only .java / .cc.
+ * Since files are sideloaded to `/data/local/tmp`, you need to use the wrapper
+   scripts to uninstall them fully. E.g.:
+   ```shell
+   out/Default/bin/chrome_public_apk uninstall
+   ```
+
+## The Code
+
+All incremental apks have the same classes.dex, which is built from:
+
+    //build/android/incremental_install:bootstrap_java
+
+They also have a transformed `AndroidManifest.xml`, which overrides the the
+main application class and any instrumentation classes so that they instead
+point to `BootstrapApplication`. This is built by:
+
+    //build/android/incremental_install/generate_android_manifest.py
+
+Wrapper scripts and install logic is contained in:
+
+    //build/android/incremental_install/create_install_script.py
+    //build/android/incremental_install/installer.py
+
+Finally, GN logic for incremental apks is sprinkled throughout.
diff --git a/src/build/android/incremental_install/__init__.py b/src/build/android/incremental_install/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/src/build/android/incremental_install/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/incremental_install/generate_android_manifest.py b/src/build/android/incremental_install/generate_android_manifest.py
new file mode 100755
index 0000000..e069dab
--- /dev/null
+++ b/src/build/android/incremental_install/generate_android_manifest.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an incremental APK.
+
+Given the manifest file for the real APK, generates an AndroidManifest.xml with
+the application class changed to IncrementalApplication.
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp'))
+from util import build_utils
+from util import manifest_utils
+from util import resource_utils
+
+_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
+_META_DATA_APP_NAME = 'incremental-install-real-app'
+_DEFAULT_APPLICATION_CLASS = 'android.app.Application'
+_META_DATA_INSTRUMENTATION_NAMES = [
+    'incremental-install-real-instrumentation-0',
+    'incremental-install-real-instrumentation-1',
+]
+_INCREMENTAL_INSTRUMENTATION_CLASSES = [
+    'android.app.Instrumentation',
+    'org.chromium.incrementalinstall.SecondInstrumentation',
+]
+
+
+def _AddNamespace(name):
+  """Adds the android namespace prefix to the given identifier."""
+  return '{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, name)
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--src-manifest', required=True, help='The main manifest of the app')
+  parser.add_argument('--disable-isolated-processes',
+                      help='Changes all android:isolatedProcess to false. '
+                           'This is required on Android M+',
+                      action='store_true')
+  parser.add_argument(
+      '--out-apk', required=True, help='Path to output .ap_ file')
+  parser.add_argument(
+      '--in-apk', required=True, help='Path to non-incremental .ap_ file')
+  parser.add_argument(
+      '--aapt2-path', required=True, help='Path to the Android aapt tool')
+  parser.add_argument(
+      '--android-sdk-jars', help='GN List of resource apks to include.')
+
+  ret = parser.parse_args(build_utils.ExpandFileArgs(args))
+  ret.android_sdk_jars = build_utils.ParseGnList(ret.android_sdk_jars)
+  return ret
+
+
+def _CreateMetaData(parent, name, value):
+  meta_data_node = ElementTree.SubElement(parent, 'meta-data')
+  meta_data_node.set(_AddNamespace('name'), name)
+  meta_data_node.set(_AddNamespace('value'), value)
+
+
+def _ProcessManifest(path, arsc_package_name, disable_isolated_processes):
+  doc, manifest_node, app_node = manifest_utils.ParseManifest(path)
+
+  # Ensure the manifest package matches that of the apk's arsc package
+  # So that resource references resolve correctly. The actual manifest
+  # package name is set via --rename-manifest-package.
+  manifest_node.set('package', arsc_package_name)
+
+  # Pylint for some reason things app_node is an int.
+  # pylint: disable=no-member
+  real_app_class = app_node.get(_AddNamespace('name'),
+                                _DEFAULT_APPLICATION_CLASS)
+  app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME)
+  # pylint: enable=no-member
+  _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class)
+
+  # Seems to be a bug in ElementTree, as doc.find() doesn't work here.
+  instrumentation_nodes = doc.findall('instrumentation')
+  assert len(instrumentation_nodes) <= 2, (
+      'Need to update incremental install to support >2 <instrumentation> tags')
+  for i, instrumentation_node in enumerate(instrumentation_nodes):
+    real_instrumentation_class = instrumentation_node.get(_AddNamespace('name'))
+    instrumentation_node.set(_AddNamespace('name'),
+                             _INCREMENTAL_INSTRUMENTATION_CLASSES[i])
+    _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i],
+                    real_instrumentation_class)
+
+  ret = ElementTree.tostring(doc.getroot(), encoding='UTF-8')
+  # Disable check for page-aligned native libraries.
+  ret = ret.replace(b'extractNativeLibs="false"', b'extractNativeLibs="true"')
+  if disable_isolated_processes:
+    ret = ret.replace(b'isolatedProcess="true"', b'isolatedProcess="false"')
+  return ret
+
+
+def main(raw_args):
+  options = _ParseArgs(raw_args)
+
+  arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path,
+                                                      options.in_apk)
+  # Extract version from the compiled manifest since it might have been set
+  # via aapt, and not exist in the manifest's text form.
+  version_code, version_name, manifest_package = (
+      resource_utils.ExtractBinaryManifestValues(options.aapt2_path,
+                                                 options.in_apk))
+
+  new_manifest_data = _ProcessManifest(options.src_manifest, arsc_package,
+                                       options.disable_isolated_processes)
+  with tempfile.NamedTemporaryFile() as tmp_manifest, \
+      tempfile.NamedTemporaryFile() as tmp_apk:
+    tmp_manifest.write(new_manifest_data)
+    tmp_manifest.flush()
+    cmd = [
+        options.aapt2_path, 'link', '-o', tmp_apk.name, '--manifest',
+        tmp_manifest.name, '-I', options.in_apk, '--replace-version',
+        '--version-code', version_code, '--version-name', version_name,
+        '--rename-manifest-package', manifest_package, '--debug-mode'
+    ]
+    for j in options.android_sdk_jars:
+      cmd += ['-I', j]
+    subprocess.check_call(cmd)
+    with zipfile.ZipFile(options.out_apk, 'w') as z:
+      path_transform = lambda p: None if p != 'AndroidManifest.xml' else p
+      build_utils.MergeZips(z, [tmp_apk.name], path_transform=path_transform)
+      path_transform = lambda p: None if p == 'AndroidManifest.xml' else p
+      build_utils.MergeZips(z, [options.in_apk], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/incremental_install/generate_android_manifest.pydeps b/src/build/android/incremental_install/generate_android_manifest.pydeps
new file mode 100644
index 0000000..568ea1e
--- /dev/null
+++ b/src/build/android/incremental_install/generate_android_manifest.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/manifest_utils.py
+../gyp/util/resource_utils.py
+generate_android_manifest.py
diff --git a/src/build/android/incremental_install/installer.py b/src/build/android/incremental_install/installer.py
new file mode 100755
index 0000000..9625822
--- /dev/null
+++ b/src/build/android/incremental_install/installer.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install *_incremental.apk targets as well as their dependent files."""
+
+import argparse
+import collections
+import functools
+import glob
+import json
+import logging
+import os
+import posixpath
+import shutil
+import sys
+
+sys.path.append(
+    os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_utils
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.utils import time_profile
+
+prev_sys_path = list(sys.path)
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+import dex
+from util import build_utils
+sys.path = prev_sys_path
+
+
+_R8_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib',
+                        'r8.jar')
+
+
+def _DeviceCachePath(device):
+  file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+  return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def _Execute(concurrently, *funcs):
+  """Calls all functions in |funcs| concurrently or in sequence."""
+  timer = time_profile.TimeProfile()
+  if concurrently:
+    reraiser_thread.RunAsync(funcs)
+  else:
+    for f in funcs:
+      f()
+  timer.Stop(log=False)
+  return timer
+
+
+def _GetDeviceIncrementalDir(package):
+  """Returns the device path to put incremental files for the given package."""
+  return '/data/local/tmp/incremental-app-%s' % package
+
+
+def _IsStale(src_paths, dest):
+  """Returns if |dest| is older than any of |src_paths|, or missing."""
+  if not os.path.exists(dest):
+    return True
+  dest_time = os.path.getmtime(dest)
+  for path in src_paths:
+    if os.path.getmtime(path) > dest_time:
+      return True
+  return False
+
+
+def _AllocateDexShards(dex_files):
+  """Divides input dex files into buckets."""
+  # Goals:
+  # * Make shards small enough that they are fast to merge.
+  # * Minimize the number of shards so they load quickly on device.
+  # * Partition files into shards such that a change in one file results in only
+  #   one shard having to be re-created.
+  shards = collections.defaultdict(list)
+  # As of Oct 2019, 10 shards results in a min/max size of 582K/2.6M.
+  NUM_CORE_SHARDS = 10
+  # As of Oct 2019, 17 dex files are larger than 1M.
+  SHARD_THRESHOLD = 2**20
+  for src_path in dex_files:
+    if os.path.getsize(src_path) >= SHARD_THRESHOLD:
+      # Use the path as the name rather than an incrementing number to ensure
+      # that it shards to the same name every time.
+      name = os.path.relpath(src_path, constants.GetOutDirectory()).replace(
+          os.sep, '.')
+      shards[name].append(src_path)
+    else:
+      name = 'shard{}.dex.jar'.format(hash(src_path) % NUM_CORE_SHARDS)
+      shards[name].append(src_path)
+  logging.info('Sharding %d dex files into %d buckets', len(dex_files),
+               len(shards))
+  return shards
+
+
+def _CreateDexFiles(shards, dex_staging_dir, min_api, use_concurrency):
+  """Creates dex files within |dex_staging_dir| defined by |shards|."""
+  tasks = []
+  for name, src_paths in shards.iteritems():
+    dest_path = os.path.join(dex_staging_dir, name)
+    if _IsStale(src_paths, dest_path):
+      tasks.append(
+          functools.partial(dex.MergeDexForIncrementalInstall, _R8_PATH,
+                            src_paths, dest_path, min_api))
+
+  # TODO(agrieve): It would be more performant to write a custom d8.jar
+  #     wrapper in java that would process these in bulk, rather than spinning
+  #     up a new process for each one.
+  _Execute(use_concurrency, *tasks)
+
+  # Remove any stale shards.
+  for name in os.listdir(dex_staging_dir):
+    if name not in shards:
+      os.unlink(os.path.join(dex_staging_dir, name))
+
+
+def Uninstall(device, package, enable_device_cache=False):
+  """Uninstalls and removes all incremental files for the given package."""
+  main_timer = time_profile.TimeProfile()
+  device.Uninstall(package)
+  if enable_device_cache:
+    # Uninstall is rare, so just wipe the cache in this case.
+    cache_path = _DeviceCachePath(device)
+    if os.path.exists(cache_path):
+      os.unlink(cache_path)
+  device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)],
+                         check_return=True)
+  logging.info('Uninstall took %s seconds.', main_timer.GetDelta())
+
+
+def Install(device, install_json, apk=None, enable_device_cache=False,
+            use_concurrency=True, permissions=()):
+  """Installs the given incremental apk and all required supporting files.
+
+  Args:
+    device: A DeviceUtils instance (to install to).
+    install_json: Path to .json file or already parsed .json object.
+    apk: An existing ApkHelper instance for the apk (optional).
+    enable_device_cache: Whether to enable on-device caching of checksums.
+    use_concurrency: Whether to speed things up using multiple threads.
+    permissions: A list of the permissions to grant, or None to grant all
+                 non-denylisted permissions in the manifest.
+  """
+  if isinstance(install_json, basestring):
+    with open(install_json) as f:
+      install_dict = json.load(f)
+  else:
+    install_dict = install_json
+
+  main_timer = time_profile.TimeProfile()
+  install_timer = time_profile.TimeProfile()
+  push_native_timer = time_profile.TimeProfile()
+  merge_dex_timer = time_profile.TimeProfile()
+  push_dex_timer = time_profile.TimeProfile()
+
+  def fix_path(p):
+    return os.path.normpath(os.path.join(constants.GetOutDirectory(), p))
+
+  if not apk:
+    apk = apk_helper.ToHelper(fix_path(install_dict['apk_path']))
+  split_globs = [fix_path(p) for p in install_dict['split_globs']]
+  native_libs = [fix_path(p) for p in install_dict['native_libs']]
+  dex_files = [fix_path(p) for p in install_dict['dex_files']]
+  show_proguard_warning = install_dict.get('show_proguard_warning')
+
+  apk_package = apk.GetPackageName()
+  device_incremental_dir = _GetDeviceIncrementalDir(apk_package)
+  dex_staging_dir = os.path.join(constants.GetOutDirectory(),
+                                 'incremental-install',
+                                 install_dict['apk_path'])
+  device_dex_dir = posixpath.join(device_incremental_dir, 'dex')
+
+  # Install .apk(s) if any of them have changed.
+  def do_install():
+    install_timer.Start()
+    if split_globs:
+      splits = []
+      for split_glob in split_globs:
+        splits.extend((f for f in glob.glob(split_glob)))
+      device.InstallSplitApk(
+          apk,
+          splits,
+          allow_downgrade=True,
+          reinstall=True,
+          allow_cached_props=True,
+          permissions=permissions)
+    else:
+      device.Install(
+          apk, allow_downgrade=True, reinstall=True, permissions=permissions)
+    install_timer.Stop(log=False)
+
+  # Push .so and .dex files to the device (if they have changed).
+  def do_push_files():
+
+    def do_push_native():
+      push_native_timer.Start()
+      if native_libs:
+        with build_utils.TempDir() as temp_dir:
+          device_lib_dir = posixpath.join(device_incremental_dir, 'lib')
+          for path in native_libs:
+            # Note: Can't use symlinks as they don't work when
+            # "adb push parent_dir" is used (like we do here).
+            shutil.copy(path, os.path.join(temp_dir, os.path.basename(path)))
+          device.PushChangedFiles([(temp_dir, device_lib_dir)],
+                                  delete_device_stale=True)
+      push_native_timer.Stop(log=False)
+
+    def do_merge_dex():
+      merge_dex_timer.Start()
+      shards = _AllocateDexShards(dex_files)
+      build_utils.MakeDirectory(dex_staging_dir)
+      _CreateDexFiles(shards, dex_staging_dir, apk.GetMinSdkVersion(),
+                      use_concurrency)
+      merge_dex_timer.Stop(log=False)
+
+    def do_push_dex():
+      push_dex_timer.Start()
+      device.PushChangedFiles([(dex_staging_dir, device_dex_dir)],
+                              delete_device_stale=True)
+      push_dex_timer.Stop(log=False)
+
+    _Execute(use_concurrency, do_push_native, do_merge_dex)
+    do_push_dex()
+
+  def check_device_configured():
+    target_sdk_version = int(apk.GetTargetSdkVersion())
+    # Beta Q builds apply allowlist to targetSdk=28 as well.
+    if target_sdk_version >= 28 and device.build_version_sdk >= 28:
+      # In P, there are two settings:
+      #  * hidden_api_policy_p_apps
+      #  * hidden_api_policy_pre_p_apps
+      # In Q, there is just one:
+      #  * hidden_api_policy
+      if device.build_version_sdk == 28:
+        setting_name = 'hidden_api_policy_p_apps'
+      else:
+        setting_name = 'hidden_api_policy'
+      apis_allowed = ''.join(
+          device.RunShellCommand(['settings', 'get', 'global', setting_name],
+                                 check_return=True))
+      if apis_allowed.strip() not in '01':
+        msg = """\
+Cannot use incremental installs on Android P+ without first enabling access to
+non-SDK interfaces (https://developer.android.com/preview/non-sdk-q).
+
+To enable access:
+   adb -s {0} shell settings put global {1} 0
+To restore back to default:
+   adb -s {0} shell settings delete global {1}"""
+        raise Exception(msg.format(device.serial, setting_name))
+
+  cache_path = _DeviceCachePath(device)
+  def restore_cache():
+    if not enable_device_cache:
+      return
+    if os.path.exists(cache_path):
+      logging.info('Using device cache: %s', cache_path)
+      with open(cache_path) as f:
+        device.LoadCacheData(f.read())
+      # Delete the cached file so that any exceptions cause it to be cleared.
+      os.unlink(cache_path)
+    else:
+      logging.info('No device cache present: %s', cache_path)
+
+  def save_cache():
+    if not enable_device_cache:
+      return
+    with open(cache_path, 'w') as f:
+      f.write(device.DumpCacheData())
+      logging.info('Wrote device cache: %s', cache_path)
+
+  # Create 2 lock files:
+  # * install.lock tells the app to pause on start-up (until we release it).
+  # * firstrun.lock is used by the app to pause all secondary processes until
+  #   the primary process finishes loading the .dex / .so files.
+  def create_lock_files():
+    # Creates or zeros out lock files.
+    cmd = ('D="%s";'
+           'mkdir -p $D &&'
+           'echo -n >$D/install.lock 2>$D/firstrun.lock')
+    device.RunShellCommand(
+        cmd % device_incremental_dir, shell=True, check_return=True)
+
+  # The firstrun.lock is released by the app itself.
+  def release_installer_lock():
+    device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir,
+                           check_return=True, shell=True)
+
+  # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't
+  # been designed for multi-threading. Enabling only because this is a
+  # developer-only tool.
+  setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache,
+                         check_device_configured)
+
+  _Execute(use_concurrency, do_install, do_push_files)
+
+  finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache)
+
+  logging.info(
+      'Install of %s took %s seconds (setup=%s, install=%s, lib_push=%s, '
+      'dex_merge=%s dex_push=%s, finalize=%s)', os.path.basename(apk.path),
+      main_timer.GetDelta(), setup_timer.GetDelta(), install_timer.GetDelta(),
+      push_native_timer.GetDelta(), merge_dex_timer.GetDelta(),
+      push_dex_timer.GetDelta(), finalize_timer.GetDelta())
+  if show_proguard_warning:
+    logging.warning('Target had proguard enabled, but incremental install uses '
+                    'non-proguarded .dex files. Performance characteristics '
+                    'may differ.')
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('json_path',
+                      help='The path to the generated incremental apk .json.')
+  parser.add_argument('-d', '--device', dest='device',
+                      help='Target device for apk to install on.')
+  parser.add_argument('--uninstall',
+                      action='store_true',
+                      default=False,
+                      help='Remove the app and all side-loaded files.')
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('--no-threading',
+                      action='store_false',
+                      default=True,
+                      dest='threading',
+                      help='Do not install and push concurrently')
+  parser.add_argument('--no-cache',
+                      action='store_false',
+                      default=True,
+                      dest='cache',
+                      help='Do not use cached information about what files are '
+                           'currently on the target device.')
+  parser.add_argument('-v',
+                      '--verbose',
+                      dest='verbose_count',
+                      default=0,
+                      action='count',
+                      help='Verbose level (multiple times for more)')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+  # Retries are annoying when commands fail for legitimate reasons. Might want
+  # to enable them if this is ever used on bots though.
+  device = device_utils.DeviceUtils.HealthyDevices(
+      device_arg=args.device,
+      default_retries=0,
+      enable_device_files_cache=True)[0]
+
+  if args.uninstall:
+    with open(args.json_path) as f:
+      install_dict = json.load(f)
+    apk = apk_helper.ToHelper(install_dict['apk_path'])
+    Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache)
+  else:
+    Install(device, args.json_path, enable_device_cache=args.cache,
+            use_concurrency=args.threading)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
new file mode 100644
index 0000000..f7003f2
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
@@ -0,0 +1,297 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Application;
+import android.app.Instrumentation;
+import android.content.Context;
+import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageManager;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.os.Bundle;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Application that replaces itself with another Application (as defined in
+ * an AndroidManifext.xml meta-data tag). It loads the other application only
+ * after side-loading its .so and .dex files from /data/local/tmp.
+ *
+ * This class is highly dependent on the private implementation details of
+ * Android's ActivityThread.java. However, it has been tested to work with
+ * JellyBean through Marshmallow.
+ */
+public final class BootstrapApplication extends Application {
+    private static final String TAG = "incrementalinstall";
+    private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-";
+    private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app";
+    private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 =
+            "incremental-install-real-instrumentation-0";
+    private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 =
+            "incremental-install-real-instrumentation-1";
+
+    private ClassLoaderPatcher mClassLoaderPatcher;
+    private Application mRealApplication;
+    private Instrumentation mOrigInstrumentation;
+    private Instrumentation mRealInstrumentation;
+    private Object mStashedProviderList;
+    private Object mActivityThread;
+    public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner.
+
+    @Override
+    protected void attachBaseContext(Context context) {
+        super.attachBaseContext(context);
+        try {
+            mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"),
+                    "currentActivityThread");
+            mClassLoaderPatcher = new ClassLoaderPatcher(context);
+
+            mOrigInstrumentation =
+                    (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation");
+            Context instContext = mOrigInstrumentation.getContext();
+            if (instContext == null) {
+                instContext = context;
+            }
+
+            // When running with an instrumentation that lives in a different package from the
+            // application, we must load the dex files and native libraries from both pacakges.
+            // This logic likely won't work when the instrumentation is incremental, but the app is
+            // non-incremental. This configuration isn't used right now though.
+            String appPackageName = getPackageName();
+            String instPackageName = instContext.getPackageName();
+            boolean instPackageNameDiffers = !appPackageName.equals(instPackageName);
+            Log.i(TAG, "App PackageName: " + appPackageName);
+            if (instPackageNameDiffers) {
+                Log.i(TAG, "Inst PackageName: " + instPackageName);
+            }
+
+            File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName);
+            File appLibDir = new File(appIncrementalRootDir, "lib");
+            File appDexDir = new File(appIncrementalRootDir, "dex");
+            File appInstallLockFile = new File(appIncrementalRootDir, "install.lock");
+            File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock");
+            File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName);
+            File instLibDir = new File(instIncrementalRootDir, "lib");
+            File instDexDir = new File(instIncrementalRootDir, "dex");
+            File instInstallLockFile = new File(instIncrementalRootDir, "install.lock");
+            File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock");
+
+            boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile)
+                    || (instPackageNameDiffers
+                               && LockFile.installerLockExists(instFirstRunLockFile));
+            if (isFirstRun) {
+                if (mClassLoaderPatcher.mIsPrimaryProcess) {
+                    // Wait for incremental_install.py to finish.
+                    LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000);
+                    LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000);
+                } else {
+                    // Wait for the browser process to create the optimized dex files
+                    // and copy the library files.
+                    LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000);
+                    LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000);
+                }
+            }
+
+            mClassLoaderPatcher.importNativeLibs(instLibDir);
+            sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir, instPackageName);
+            if (instPackageNameDiffers) {
+                mClassLoaderPatcher.importNativeLibs(appLibDir);
+                mClassLoaderPatcher.loadDexFiles(appDexDir, appPackageName);
+            }
+
+            if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) {
+                LockFile.clearInstallerLock(appFirstRunLockFile);
+                if (instPackageNameDiffers) {
+                    LockFile.clearInstallerLock(instFirstRunLockFile);
+                }
+            }
+
+            // mInstrumentationAppDir is one of a set of fields that is initialized only when
+            // instrumentation is active.
+            if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) {
+                String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0;
+                if (mOrigInstrumentation instanceof SecondInstrumentation) {
+                    metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1;
+                }
+                mRealInstrumentation =
+                        initInstrumentation(getClassNameFromMetadata(metaDataName, instContext));
+            } else {
+                Log.i(TAG, "No instrumentation active.");
+            }
+
+            // Even when instrumentation is not enabled, ActivityThread uses a default
+            // Instrumentation instance internally. We hook it here in order to hook into the
+            // call to Instrumentation.onCreate().
+            BootstrapInstrumentation bootstrapInstrumentation = new BootstrapInstrumentation(this);
+            populateInstrumenationFields(bootstrapInstrumentation);
+            Reflect.setField(mActivityThread, "mInstrumentation", bootstrapInstrumentation);
+
+            // attachBaseContext() is called from ActivityThread#handleBindApplication() and
+            // Application#mApplication is changed right after we return. Thus, we cannot swap
+            // the Application instances until onCreate() is called.
+            String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context);
+            Log.i(TAG, "Instantiating " + realApplicationName);
+            Instrumentation anyInstrumentation =
+                    mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation;
+            mRealApplication = anyInstrumentation.newApplication(
+                    getClassLoader(), realApplicationName, context);
+
+            // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate
+            // all ContentProviders. The ContentProviders break without the correct Application
+            // class being installed, so temporarily pretend there are no providers, and then
+            // instantiate them explicitly within onCreate().
+            disableContentProviders();
+            Log.i(TAG, "Waiting for Instrumentation.onCreate");
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    /**
+     * Returns the fully-qualified class name for the given key, stored in a
+     * &lt;meta&gt; witin the manifest.
+     */
+    private static String getClassNameFromMetadata(String key, Context context)
+            throws NameNotFoundException {
+        String pkgName = context.getPackageName();
+        ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName,
+                PackageManager.GET_META_DATA);
+        String value = appInfo.metaData.getString(key);
+        if (value != null && !value.contains(".")) {
+            value = pkgName + "." + value;
+        }
+        return value;
+    }
+
+    /**
+     * Instantiates and initializes mRealInstrumentation (the real Instrumentation class).
+     */
+    private Instrumentation initInstrumentation(String realInstrumentationName)
+            throws ReflectiveOperationException {
+        if (realInstrumentationName == null) {
+            // This is the case when an incremental app is used as a target for an instrumentation
+            // test. In this case, ActivityThread can instantiate the proper class just fine since
+            // it exists within the test apk (as opposed to the incremental apk-under-test).
+            Log.i(TAG, "Running with external instrumentation");
+            return null;
+        }
+        // For unit tests, the instrumentation class is replaced in the manifest by a build step
+        // because ActivityThread tries to instantiate it before we get a chance to load the
+        // incremental dex files.
+        Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName);
+        Instrumentation ret =
+                (Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName));
+        populateInstrumenationFields(ret);
+        return ret;
+    }
+
+    /**
+     * Sets important fields on a newly created Instrumentation object by copying them from the
+     * original Instrumentation instance.
+     */
+    private void populateInstrumenationFields(Instrumentation target)
+            throws ReflectiveOperationException {
+        // Initialize the fields that are set by Instrumentation.init().
+        String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue",
+                "mThread", "mUiAutomationConnection", "mWatcher"};
+        for (String fieldName : initFields) {
+            Reflect.setField(target, fieldName, Reflect.getField(mOrigInstrumentation, fieldName));
+        }
+    }
+
+    /**
+     * Called by BootstrapInstrumentation from Instrumentation.onCreate().
+     * This happens regardless of whether or not instrumentation is enabled.
+     */
+    void onInstrumentationCreate(Bundle arguments) {
+        Log.i(TAG, "Instrumentation.onCreate() called. Swapping references.");
+        try {
+            swapApplicationReferences();
+            enableContentProviders();
+            if (mRealInstrumentation != null) {
+                Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation);
+                mRealInstrumentation.onCreate(arguments);
+            }
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    @Override
+    public void onCreate() {
+        super.onCreate();
+        try {
+            Log.i(TAG, "Application.onCreate() called.");
+            mRealApplication.onCreate();
+        } catch (Exception e) {
+            throw new RuntimeException("Incremental install failed.", e);
+        }
+    }
+
+    /**
+     * Nulls out ActivityThread.mBoundApplication.providers.
+     */
+    private void disableContentProviders() throws ReflectiveOperationException {
+        Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+        mStashedProviderList = Reflect.getField(data, "providers");
+        Reflect.setField(data, "providers", null);
+    }
+
+    /**
+     * Restores the value of ActivityThread.mBoundApplication.providers, and invokes
+     * ActivityThread#installContentProviders().
+     */
+    private void enableContentProviders() throws ReflectiveOperationException {
+        Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+        Reflect.setField(data, "providers", mStashedProviderList);
+        if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) {
+            Log.i(TAG, "Instantiating content providers");
+            Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication,
+                    mStashedProviderList);
+        }
+        mStashedProviderList = null;
+    }
+
+    /**
+     * Changes all fields within framework classes that have stored an reference to this
+     * BootstrapApplication to instead store references to mRealApplication.
+     */
+    @SuppressWarnings("unchecked")
+    private void swapApplicationReferences() throws ReflectiveOperationException {
+        if (Reflect.getField(mActivityThread, "mInitialApplication") == this) {
+            Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication);
+        }
+
+        List<Application> allApplications =
+                (List<Application>) Reflect.getField(mActivityThread, "mAllApplications");
+        for (int i = 0; i < allApplications.size(); i++) {
+            if (allApplications.get(i) == this) {
+                allApplications.set(i, mRealApplication);
+            }
+        }
+
+        // Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail
+        // if not replaced.
+        Context contextImpl = mRealApplication.getBaseContext();
+        Reflect.setField(contextImpl, "mOuterContext", mRealApplication);
+
+        for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) {
+            Map<String, WeakReference<?>> packageMap =
+                    (Map<String, WeakReference<?>>) Reflect.getField(mActivityThread, fieldName);
+            for (Map.Entry<String, WeakReference<?>> entry : packageMap.entrySet()) {
+                Object loadedApk = entry.getValue().get();
+                if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) {
+                    Reflect.setField(loadedApk, "mApplication", mRealApplication);
+                }
+            }
+        }
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
new file mode 100644
index 0000000..f197406
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
@@ -0,0 +1,25 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+import android.os.Bundle;
+
+/**
+ * Notifies BootstrapApplication of the call to Instrumentation.onCreate().
+ */
+public final class BootstrapInstrumentation extends Instrumentation {
+    private final BootstrapApplication mApp;
+
+    BootstrapInstrumentation(BootstrapApplication app) {
+        mApp = app;
+    }
+
+    @Override
+    public void onCreate(Bundle arguments) {
+        super.onCreate(arguments);
+        mApp.onInstrumentationCreate(arguments);
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
new file mode 100644
index 0000000..b6d7522
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
@@ -0,0 +1,312 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.os.Build;
+import android.os.Process;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Provides the ability to add native libraries and .dex files to an existing class loader.
+ * Tested with Jellybean MR2 - Marshmellow.
+ */
+final class ClassLoaderPatcher {
+    private static final String TAG = "incrementalinstall";
+    private final File mAppFilesSubDir;
+    private final ClassLoader mClassLoader;
+    private final Object mLibcoreOs;
+    private final int mProcessUid;
+    final boolean mIsPrimaryProcess;
+
+    ClassLoaderPatcher(Context context) throws ReflectiveOperationException {
+        mAppFilesSubDir =
+                new File(context.getApplicationInfo().dataDir, "incremental-install-files");
+        mClassLoader = context.getClassLoader();
+        mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os");
+        mProcessUid = Process.myUid();
+        mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid;
+        Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")");
+    }
+
+    /**
+     * Loads all dex files within |dexDir| into the app's ClassLoader.
+     */
+    @SuppressLint({
+            "SetWorldReadable",
+            "SetWorldWritable",
+    })
+    DexFile[] loadDexFiles(File dexDir, String packageName)
+            throws ReflectiveOperationException, IOException {
+        Log.i(TAG, "Installing dex files from: " + dexDir);
+
+        File optimizedDir = null;
+        boolean isAtLeastOreo = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
+
+        if (isAtLeastOreo) {
+            // In O, optimizedDirectory is ignored, and the files are always put in an "oat"
+            // directory that is a sibling to the dex files themselves. SELinux policies
+            // prevent using odex files from /data/local/tmp, so we must first copy them
+            // into the app's data directory in order to get the odex files to live there.
+            // Use a package-name subdirectory to prevent name collisions when apk-under-test is
+            // used.
+            File newDexDir = new File(mAppFilesSubDir, packageName + "-dexes");
+            if (mIsPrimaryProcess) {
+                safeCopyAllFiles(dexDir, newDexDir);
+            }
+            dexDir = newDexDir;
+        } else {
+            // The optimized dex files will be owned by this process' user.
+            // Store them within the app's data dir rather than on /data/local/tmp
+            // so that they are still deleted (by the OS) when we uninstall
+            // (even on a non-rooted device).
+            File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes");
+            File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes");
+
+            if (mIsPrimaryProcess) {
+                ensureAppFilesSubDirExists();
+                // Allows isolated processes to access the same files.
+                incrementalDexesDir.mkdir();
+                incrementalDexesDir.setReadable(true, false);
+                incrementalDexesDir.setExecutable(true, false);
+                // Create a directory for isolated processes to create directories in.
+                isolatedDexesDir.mkdir();
+                isolatedDexesDir.setWritable(true, false);
+                isolatedDexesDir.setExecutable(true, false);
+
+                optimizedDir = incrementalDexesDir;
+            } else {
+                // There is a UID check of the directory in dalvik.system.DexFile():
+                // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101
+                // Rather than have each isolated process run DexOpt though, we use
+                // symlinks within the directory to point at the browser process'
+                // optimized dex files.
+                optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid);
+                optimizedDir.mkdir();
+                // Always wipe it out and re-create for simplicity.
+                Log.i(TAG, "Creating dex file symlinks for isolated process");
+                for (File f : optimizedDir.listFiles()) {
+                    f.delete();
+                }
+                for (File f : incrementalDexesDir.listFiles()) {
+                    String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName();
+                    File from = new File(optimizedDir, f.getName());
+                    createSymlink(to, from);
+                }
+            }
+            Log.i(TAG, "Code cache dir: " + optimizedDir);
+        }
+
+        // Ignore "oat" directory.
+        // Also ignore files that sometimes show up (e.g. .jar.arm.flock).
+        File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar"));
+        if (dexFilesArr == null) {
+            throw new FileNotFoundException("Dex dir does not exist: " + dexDir);
+        }
+
+        Log.i(TAG, "Loading " + dexFilesArr.length + " dex files");
+
+        Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+        Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements");
+        dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements);
+        Reflect.setField(dexPathList, "dexElements", dexElements);
+
+        // Return the list of new DexFile instances for the .jars in dexPathList.
+        DexFile[] ret = new DexFile[dexFilesArr.length];
+        int startIndex = dexElements.length - dexFilesArr.length;
+        for (int i = 0; i < ret.length; ++i) {
+            ret[i] = (DexFile) Reflect.getField(dexElements[startIndex + i], "dexFile");
+        }
+        return ret;
+    }
+
+    /**
+     * Sets up all libraries within |libDir| to be loadable by System.loadLibrary().
+     */
+    @SuppressLint("SetWorldReadable")
+    void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException {
+        Log.i(TAG, "Importing native libraries from: " + libDir);
+        if (!libDir.exists()) {
+            Log.i(TAG, "No native libs exist.");
+            return;
+        }
+        // The library copying is not necessary on older devices, but we do it anyways to
+        // simplify things (it's fast compared to dexing).
+        // https://code.google.com/p/android/issues/detail?id=79480
+        File localLibsDir = new File(mAppFilesSubDir, "lib");
+        safeCopyAllFiles(libDir, localLibsDir);
+        addNativeLibrarySearchPath(localLibsDir);
+    }
+
+    @SuppressLint("SetWorldReadable")
+    private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException {
+        // The library copying is not necessary on older devices, but we do it anyways to
+        // simplify things (it's fast compared to dexing).
+        // https://code.google.com/p/android/issues/detail?id=79480
+        File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock");
+        if (mIsPrimaryProcess) {
+            ensureAppFilesSubDirExists();
+            LockFile lock = LockFile.acquireRuntimeLock(lockFile);
+            if (lock == null) {
+                LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+            } else {
+                try {
+                    dstDir.mkdir();
+                    dstDir.setReadable(true, false);
+                    dstDir.setExecutable(true, false);
+                    copyChangedFiles(srcDir, dstDir);
+                } finally {
+                    lock.release();
+                }
+            }
+        } else {
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                // TODO: Work around this issue by using APK splits to install each dex / lib.
+                throw new RuntimeException("Incremental install does not work on Android M+ "
+                        + "with isolated processes. Build system should have removed this. "
+                        + "Please file a bug.");
+            }
+            // Other processes: Waits for primary process to finish copying.
+            LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException {
+        Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+        Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories");
+        File[] newDirs = new File[] { nativeLibDir };
+        // Switched from an array to an ArrayList in Lollipop.
+        if (currentDirs instanceof List) {
+            List<File> dirsAsList = (List<File>) currentDirs;
+            dirsAsList.add(0, nativeLibDir);
+        } else {
+            File[] dirsAsArray = (File[]) currentDirs;
+            Reflect.setField(dexPathList, "nativeLibraryDirectories",
+                    Reflect.concatArrays(newDirs, newDirs, dirsAsArray));
+        }
+
+        Object[] nativeLibraryPathElements;
+        try {
+            nativeLibraryPathElements =
+                    (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements");
+        } catch (NoSuchFieldException e) {
+            // This field doesn't exist pre-M.
+            return;
+        }
+        Object[] additionalElements = makeNativePathElements(newDirs);
+        Reflect.setField(dexPathList, "nativeLibraryPathElements",
+                Reflect.concatArrays(nativeLibraryPathElements, additionalElements,
+                        nativeLibraryPathElements));
+    }
+
+    private static void copyChangedFiles(File srcDir, File dstDir) throws IOException {
+        int numUpdated = 0;
+        File[] srcFiles = srcDir.listFiles();
+        for (File f : srcFiles) {
+            // Note: Tried using hardlinks, but resulted in EACCES exceptions.
+            File dest = new File(dstDir, f.getName());
+            if (copyIfModified(f, dest)) {
+                numUpdated++;
+            }
+        }
+        // Delete stale files.
+        int numDeleted = 0;
+        for (File f : dstDir.listFiles()) {
+            File src = new File(srcDir, f.getName());
+            if (!src.exists()) {
+                numDeleted++;
+                f.delete();
+            }
+        }
+        String msg = String.format(Locale.US,
+                "copyChangedFiles: %d of %d updated. %d stale files removed.", numUpdated,
+                srcFiles.length, numDeleted);
+        Log.i(TAG, msg);
+    }
+
+    @SuppressLint("SetWorldReadable")
+    private static boolean copyIfModified(File src, File dest) throws IOException {
+        long lastModified = src.lastModified();
+        if (dest.exists() && dest.lastModified() == lastModified) {
+            return false;
+        }
+        Log.i(TAG, "Copying " + src + " -> " + dest);
+        FileInputStream istream = new FileInputStream(src);
+        FileOutputStream ostream = new FileOutputStream(dest);
+        ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size());
+        istream.close();
+        ostream.close();
+        dest.setReadable(true, false);
+        dest.setExecutable(true,  false);
+        dest.setLastModified(lastModified);
+        return true;
+    }
+
+    private void ensureAppFilesSubDirExists() {
+        mAppFilesSubDir.mkdir();
+        mAppFilesSubDir.setExecutable(true, false);
+    }
+
+    private void createSymlink(String to, File from) throws ReflectiveOperationException {
+        Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath());
+    }
+
+    private static Object[] makeNativePathElements(File[] paths)
+            throws ReflectiveOperationException {
+        Object[] entries = new Object[paths.length];
+        if (Build.VERSION.SDK_INT >= 26) {
+            Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$NativeLibraryElement");
+            for (int i = 0; i < paths.length; ++i) {
+                entries[i] = Reflect.newInstance(entryClazz, paths[i]);
+            }
+        } else {
+            Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+            for (int i = 0; i < paths.length; ++i) {
+                entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null);
+            }
+        }
+        return entries;
+    }
+
+    private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements)
+            throws ReflectiveOperationException {
+        Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+        Class<?> clazz = Class.forName("dalvik.system.DexPathList");
+        Object[] ret =
+                Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]);
+        File emptyDir = new File("");
+        for (int i = 0; i < files.length; ++i) {
+            File file = files[i];
+            Object dexFile;
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+                // loadDexFile requires that ret contain all previously added elements.
+                dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory,
+                                               mClassLoader, ret);
+            } else {
+                dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory);
+            }
+            Object dexElement;
+            if (Build.VERSION.SDK_INT >= 26) {
+                dexElement = Reflect.newInstance(entryClazz, dexFile, file);
+            } else {
+                dexElement = Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile);
+            }
+            ret[curDexElements.length + i] = dexElement;
+        }
+        return ret;
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
new file mode 100644
index 0000000..19d1f76
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
@@ -0,0 +1,129 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileLock;
+import java.util.concurrent.Callable;
+
+/**
+ * Helpers for dealing with .lock files used during install / first run.
+ */
+final class LockFile {
+    private static final String TAG = "incrementalinstall";
+
+    private final File mFile;
+    private final FileOutputStream mOutputStream;
+    private final FileLock mFileLock;
+
+    private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) {
+        mFile = file;
+        mOutputStream = outputStream;
+        mFileLock = fileLock;
+    }
+
+    /**
+     * Clears the lock file by writing to it (making it non-zero in length);
+     */
+    static void clearInstallerLock(File lockFile) throws IOException {
+        Log.i(TAG, "Clearing " + lockFile);
+        // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead.
+        FileOutputStream os = new FileOutputStream(lockFile);
+        os.write(1);
+        os.close();
+    }
+
+    /**
+     * Waits for the given file to be non-zero in length.
+     */
+    static void waitForInstallerLock(final File file, long timeoutMs) {
+        pollingWait(new Callable<Boolean>() {
+            @Override public Boolean call() {
+                return !installerLockExists(file);
+            }
+        }, file, timeoutMs);
+    }
+
+    /**
+     * Waits for the given file to be non-zero in length.
+     */
+    private static void pollingWait(Callable<Boolean> func, File file, long timeoutMs) {
+        long pollIntervalMs = 200;
+        for (int i = 0; i < timeoutMs / pollIntervalMs; i++) {
+            try {
+                if (func.call()) {
+                    if (i > 0) {
+                        Log.i(TAG, "Finished waiting on lock file: " + file);
+                    }
+                    return;
+                } else if (i == 0) {
+                    Log.i(TAG, "Waiting on lock file: " + file);
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            try {
+                Thread.sleep(pollIntervalMs);
+            } catch (InterruptedException e) {
+                // Should never happen.
+            }
+        }
+        throw new RuntimeException("Timed out waiting for lock file: " + file);
+    }
+
+    /**
+     * Returns whether the given lock file is missing or is in the locked state.
+     */
+    static boolean installerLockExists(File file) {
+        return !file.exists() || file.length() == 0;
+    }
+
+    /**
+     * Attempts to acquire a lock for the given file.
+     * @return Returns the FileLock if it was acquired, or null otherwise.
+     */
+    static LockFile acquireRuntimeLock(File file) {
+        try {
+            FileOutputStream outputStream = new FileOutputStream(file);
+            FileLock lock = outputStream.getChannel().tryLock();
+            if (lock != null) {
+                Log.i(TAG, "Created lock file: " + file);
+                return new LockFile(file, outputStream, lock);
+            }
+            outputStream.close();
+        } catch (IOException e) {
+            // Do nothing. We didn't get the lock.
+            Log.w(TAG, "Exception trying to acquire lock " + file, e);
+        }
+        return null;
+    }
+
+    /**
+     * Waits for the given file to not exist.
+     */
+    static void waitForRuntimeLock(final File file, long timeoutMs) {
+        pollingWait(new Callable<Boolean>() {
+            @Override public Boolean call() {
+                return !file.exists();
+            }
+        }, file, timeoutMs);
+    }
+
+    /**
+     * Releases and deletes the lock file.
+     */
+    void release() throws IOException {
+        Log.i(TAG, "Deleting lock file: " + mFile);
+        mFileLock.release();
+        mOutputStream.close();
+        if (!mFile.delete()) {
+            throw new IOException("Failed to delete lock file: " + mFile);
+        }
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
new file mode 100644
index 0000000..c64dc1e
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
@@ -0,0 +1,142 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Reflection helper methods.
+ */
+final class Reflect {
+    /**
+     * Sets the value of an object's field (even if it's not visible).
+     *
+     * @param instance The object containing the field to set.
+     * @param name The name of the field to set.
+     * @param value The new value for the field.
+     */
+    static void setField(Object instance, String name, Object value)
+            throws ReflectiveOperationException {
+        Field field = findField(instance, name);
+        field.setAccessible(true);
+        field.set(instance, value);
+    }
+
+    /**
+     * Retrieves the value of an object's field (even if it's not visible).
+     *
+     * @param instance The object containing the field to set.
+     * @param name The name of the field to set.
+     * @return The field's value. Primitive values are returned as their boxed
+     *         type.
+     */
+    static Object getField(Object instance, String name) throws ReflectiveOperationException {
+        Field field = findField(instance, name);
+        field.setAccessible(true);
+        return field.get(instance);
+    }
+
+    /**
+     * Concatenates two arrays into a new array. The arrays must be of the same
+     * type.
+     */
+    static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) {
+        Object[] result = (Object[]) Array.newInstance(
+                arrType.getClass().getComponentType(), left.length + right.length);
+        System.arraycopy(left, 0, result, 0, left.length);
+        System.arraycopy(right, 0, result, left.length, right.length);
+        return result;
+    }
+
+    /**
+     * Invokes a method with zero or more parameters. For static methods, use the Class as the
+     * instance.
+     */
+    static Object invokeMethod(Object instance, String name, Object... params)
+            throws ReflectiveOperationException {
+        boolean isStatic = instance instanceof Class;
+        Class<?> clazz = isStatic ? (Class<?>) instance :  instance.getClass();
+        Method method = findMethod(clazz, name, params);
+        method.setAccessible(true);
+        return method.invoke(instance, params);
+    }
+
+    /**
+     * Calls a constructor with zero or more parameters.
+     */
+    static Object newInstance(Class<?> clazz, Object... params)
+            throws ReflectiveOperationException {
+        Constructor<?> constructor = findConstructor(clazz, params);
+        constructor.setAccessible(true);
+        return constructor.newInstance(params);
+    }
+
+    private static Field findField(Object instance, String name) throws NoSuchFieldException {
+        boolean isStatic = instance instanceof Class;
+        Class<?> clazz = isStatic ? (Class<?>) instance :  instance.getClass();
+        for (; clazz != null; clazz = clazz.getSuperclass()) {
+            try {
+                return clazz.getDeclaredField(name);
+            } catch (NoSuchFieldException e) {
+                // Need to look in the super class.
+            }
+        }
+        throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass());
+    }
+
+    private static Method findMethod(Class<?> clazz, String name, Object... params)
+            throws NoSuchMethodException {
+        for (; clazz != null; clazz = clazz.getSuperclass()) {
+            for (Method method : clazz.getDeclaredMethods()) {
+                if (method.getName().equals(name)
+                        && areParametersCompatible(method.getParameterTypes(), params)) {
+                    return method;
+                }
+            }
+        }
+        throw new NoSuchMethodException("Method " + name + " with parameters "
+                + Arrays.asList(params) + " not found in " + clazz);
+    }
+
+    private static Constructor<?> findConstructor(Class<?> clazz, Object... params)
+            throws NoSuchMethodException {
+        for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
+            if (areParametersCompatible(constructor.getParameterTypes(), params)) {
+                return constructor;
+            }
+        }
+        throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params)
+                + " not found in " + clazz);
+    }
+
+    private static boolean areParametersCompatible(Class<?>[] paramTypes, Object... params) {
+        if (params.length != paramTypes.length) {
+            return false;
+        }
+        for (int i = 0; i < params.length; i++) {
+            if (!isAssignableFrom(paramTypes[i], params[i])) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private static boolean isAssignableFrom(Class<?> left, Object right) {
+        if (right == null) {
+            return !left.isPrimitive();
+        }
+        Class<?> rightClazz = right.getClass();
+        if (left.isPrimitive()) {
+            // TODO(agrieve): Fill in the rest as needed.
+            return left == boolean.class && rightClazz == Boolean.class
+                   || left == int.class && rightClazz == Integer.class;
+        }
+        return left.isAssignableFrom(rightClazz);
+    }
+}
diff --git a/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
new file mode 100644
index 0000000..3e0df05
--- /dev/null
+++ b/src/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
@@ -0,0 +1,12 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+
+/**
+ * Exists to support an app having multiple instrumentations.
+ */
+public final class SecondInstrumentation extends Instrumentation {}
diff --git a/src/build/android/incremental_install/write_installer_json.py b/src/build/android/incremental_install/write_installer_json.py
new file mode 100755
index 0000000..cf1d2d4
--- /dev/null
+++ b/src/build/android/incremental_install/write_installer_json.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a .json file with the per-apk details for an incremental install."""
+
+import argparse
+import json
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output-path',
+                      help='Output path for .json file.',
+                      required=True)
+  parser.add_argument('--apk-path',
+                      help='Path to .apk relative to output directory.',
+                      required=True)
+  parser.add_argument('--split',
+                      action='append',
+                      dest='split_globs',
+                      default=[],
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument(
+      '--native-libs',
+      action='append',
+      help='GN-list of paths to native libraries relative to '
+      'output directory. Can be repeated.')
+  parser.add_argument(
+      '--dex-files', help='GN-list of dex paths relative to output directory.')
+  parser.add_argument('--show-proguard-warning',
+                      action='store_true',
+                      default=False,
+                      help='Print a warning about proguard being disabled')
+
+  options = parser.parse_args(args)
+  options.dex_files = build_utils.ParseGnList(options.dex_files)
+  options.native_libs = build_utils.ParseGnList(options.native_libs)
+  return options
+
+
+def main(args):
+  options = _ParseArgs(args)
+
+  data = {
+      'apk_path': options.apk_path,
+      'native_libs': options.native_libs,
+      'dex_files': options.dex_files,
+      'show_proguard_warning': options.show_proguard_warning,
+      'split_globs': options.split_globs,
+  }
+
+  with build_utils.AtomicOutput(options.output_path, mode='w+') as f:
+    json.dump(data, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/android/incremental_install/write_installer_json.pydeps b/src/build/android/incremental_install/write_installer_json.pydeps
new file mode 100644
index 0000000..11a263f
--- /dev/null
+++ b/src/build/android/incremental_install/write_installer_json.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+write_installer_json.py
diff --git a/src/build/android/java/templates/BuildConfig.template b/src/build/android/java/templates/BuildConfig.template
new file mode 100644
index 0000000..8953ad5
--- /dev/null
+++ b/src/build/android/java/templates/BuildConfig.template
@@ -0,0 +1,95 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+#if defined(USE_FINAL)
+#define MAYBE_FINAL final
+#define MAYBE_ZERO = 0
+#define MAYBE_FALSE = false
+#else
+#define MAYBE_FINAL
+#define MAYBE_ZERO
+#define MAYBE_FALSE
+#endif
+
+/**
+ *  Build configuration. Generated on a per-target basis.
+ */
+public class BuildConfig {
+
+#if defined(ENABLE_MULTIDEX)
+    public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED = true;
+#else
+    public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED MAYBE_FALSE;
+#endif
+
+#if defined(_ENABLE_ASSERTS)
+    public static MAYBE_FINAL boolean ENABLE_ASSERTS = true;
+#else
+    public static MAYBE_FINAL boolean ENABLE_ASSERTS MAYBE_FALSE;
+#endif
+
+#if defined(_IS_UBSAN)
+    public static MAYBE_FINAL boolean IS_UBSAN = true;
+#else
+    public static MAYBE_FINAL boolean IS_UBSAN MAYBE_FALSE;
+#endif
+
+#if defined(_IS_CHROME_BRANDED)
+    public static MAYBE_FINAL boolean IS_CHROME_BRANDED = true;
+#else
+    public static MAYBE_FINAL boolean IS_CHROME_BRANDED MAYBE_FALSE;
+#endif
+
+    // The ID of the android string resource that stores the product version.
+    // This layer of indirection is necessary to make the resource dependency
+    // optional for android_apk targets/base_java (ex. for cronet).
+#if defined(_RESOURCES_VERSION_VARIABLE)
+    public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION = _RESOURCES_VERSION_VARIABLE;
+#else
+    // Default value, do not use.
+    public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION MAYBE_ZERO;
+#endif
+
+    // Minimum SDK Version supported by this apk.
+    // Be cautious when using this value, as it can happen that older apks get
+    // installed on newer Android version (e.g. when a device goes through a
+    // system upgrade). It is also convenient for developing to have all
+    // features available through a single APK.
+    // However, it's pretty safe to assument that a feature specific to KitKat
+    // will never be needed in an APK with MIN_SDK_VERSION = Oreo.
+#if defined(_MIN_SDK_VERSION)
+    public static MAYBE_FINAL int MIN_SDK_VERSION = _MIN_SDK_VERSION;
+#else
+    public static MAYBE_FINAL int MIN_SDK_VERSION = 1;
+#endif
+
+#if defined(_BUNDLES_SUPPORTED)
+    public static MAYBE_FINAL boolean BUNDLES_SUPPORTED = true;
+#else
+    public static MAYBE_FINAL boolean BUNDLES_SUPPORTED MAYBE_FALSE;
+#endif
+
+#if defined(_IS_INCREMENTAL_INSTALL)
+    public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL = true;
+#else
+    public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL MAYBE_FALSE;
+#endif
+
+#if defined(_IS_CHROMECAST_BRANDING_INTERNAL)
+    public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL = true;
+#else
+    public static MAYBE_FINAL boolean IS_CHROMECAST_BRANDING_INTERNAL MAYBE_FALSE;
+#endif
+
+#if defined(_ISOLATED_SPLITS_ENABLED)
+    public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED = true;
+#else
+    public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED MAYBE_FALSE;
+#endif
+}
diff --git a/src/build/android/java/templates/ProductConfig.template b/src/build/android/java/templates/ProductConfig.template
new file mode 100644
index 0000000..4bc0d52
--- /dev/null
+++ b/src/build/android/java/templates/ProductConfig.template
@@ -0,0 +1,34 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package PACKAGE;
+
+#if defined(USE_FINAL)
+#define MAYBE_FINAL final
+#define MAYBE_USE_CHROMIUM_LINKER = USE_CHROMIUM_LINKER_VALUE
+#define MAYBE_USE_MODERN_LINKER = USE_MODERN_LINKER_VALUE
+#define MAYBE_IS_BUNDLE = IS_BUNDLE_VALUE
+#else
+#define MAYBE_FINAL
+#define MAYBE_USE_CHROMIUM_LINKER
+#define MAYBE_USE_MODERN_LINKER
+#define MAYBE_IS_BUNDLE
+#endif
+
+/**
+ *  Product configuration. Generated on a per-target basis.
+ */
+public class ProductConfig {
+    // Sorted list of locales that have an uncompressed .pak within assets.
+    // Stored as an array because AssetManager.list() is slow.
+#if defined(LOCALE_LIST)
+    public static final String[] LOCALES = LOCALE_LIST;
+#else
+    public static final String[] LOCALES = {};
+#endif
+
+   public static MAYBE_FINAL boolean USE_CHROMIUM_LINKER MAYBE_USE_CHROMIUM_LINKER;
+   public static MAYBE_FINAL boolean USE_MODERN_LINKER MAYBE_USE_MODERN_LINKER;
+   public static MAYBE_FINAL boolean IS_BUNDLE MAYBE_IS_BUNDLE;
+}
diff --git a/src/build/android/java/test/DefaultLocaleLintTest.java b/src/build/android/java/test/DefaultLocaleLintTest.java
new file mode 100644
index 0000000..2193429
--- /dev/null
+++ b/src/build/android/java/test/DefaultLocaleLintTest.java
@@ -0,0 +1,17 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package test;
+
+import android.app.Application;
+
+/**
+ * Class which fails 'DefaultLocale' lint check.
+ */
+public class LintTest extends Application {
+    public String testTriggerDefaultLocaleCheck(int any) {
+        // String format with an integer requires a Locale since it may be formatted differently.
+        return String.format("Test %d", any);
+    }
+}
diff --git a/src/build/android/java/test/NewApiLintTest.java b/src/build/android/java/test/NewApiLintTest.java
new file mode 100644
index 0000000..6c68dd8
--- /dev/null
+++ b/src/build/android/java/test/NewApiLintTest.java
@@ -0,0 +1,17 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package test;
+
+import android.app.Application;
+
+/**
+ * Class which fails 'NewAPI' lint check.
+ */
+public class NewApiTest extends Application {
+    public String testTriggerNewApiCheck() {
+        // This was added in API level 30.
+        return getApplicationContext().getAttributionTag();
+    }
+}
diff --git a/src/build/android/lighttpd_server.py b/src/build/android/lighttpd_server.py
new file mode 100755
index 0000000..42fbcdb
--- /dev/null
+++ b/src/build/android/lighttpd_server.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+from __future__ import print_function
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        omitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temporary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  @staticmethod
+  def _GetRandomPort():
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      # pylint: disable=no-member
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or 'in use' not in server_error:
+        print('Client error:', client_error)
+        print('Server error:', server_error)
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      # pylint: disable=no-member
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0) # pylint: disable=no-member
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  @staticmethod
+  def _KillProcessListeningOnPort(port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  @staticmethod
+  def _GetDefaultBaseConfig():
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print('Server exit code:', server.process.exitstatus)
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/list_class_verification_failures.py b/src/build/android/list_class_verification_failures.py
new file mode 100755
index 0000000..508e831
--- /dev/null
+++ b/src/build/android/list_class_verification_failures.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A helper script to list class verification errors.
+
+This is a wrapper around the device's oatdump executable, parsing desired output
+and accommodating API-level-specific details, such as file paths.
+"""
+
+from __future__ import print_function
+
+import argparse
+import exceptions
+import logging
+import os
+import re
+
+import devil_chromium
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+from devil.android.tools import script_common
+from devil.utils import logging_common
+from py_utils import tempfile_ext
+
+STATUSES = [
+    'NotReady',
+    'RetryVerificationAtRuntime',
+    'Verified',
+    'Initialized',
+    'SuperclassValidated',
+]
+
+
+def DetermineDeviceToUse(devices):
+  """Like DeviceUtils.HealthyDevices(), but only allow a single device.
+
+  Args:
+    devices: A (possibly empty) list of serial numbers, such as from the
+        --device flag.
+  Returns:
+    A single device_utils.DeviceUtils instance.
+  Raises:
+    device_errors.NoDevicesError: Raised when no non-denylisted devices exist.
+    device_errors.MultipleDevicesError: Raise when multiple devices exist, but
+        |devices| does not distinguish which to use.
+  """
+  if not devices:
+    # If the user did not specify which device, we let HealthyDevices raise
+    # MultipleDevicesError.
+    devices = None
+  usable_devices = device_utils.DeviceUtils.HealthyDevices(device_arg=devices)
+  # If the user specified more than one device, we still only want to support a
+  # single device, so we explicitly raise MultipleDevicesError.
+  if len(usable_devices) > 1:
+    raise device_errors.MultipleDevicesError(usable_devices)
+  return usable_devices[0]
+
+
+class DeviceOSError(Exception):
+  """Raised when a file is missing from the device, or something similar."""
+  pass
+
+
+class UnsupportedDeviceError(Exception):
+  """Raised when the device is not supported by this script."""
+  pass
+
+
+def _GetFormattedArch(device):
+  abi = device.product_cpu_abi
+  # Some architectures don't map 1:1 with the folder names.
+  return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi)
+
+
+def PathToDexForPlatformVersion(device, package_name):
+  """Gets the full path to the dex file on the device."""
+  sdk_level = device.build_version_sdk
+  paths_to_apk = device.GetApplicationPaths(package_name)
+  if not paths_to_apk:
+    raise DeviceOSError(
+        'Could not find data directory for {}. Is it installed?'.format(
+            package_name))
+  if len(paths_to_apk) != 1:
+    raise DeviceOSError(
+        'Expected exactly one path for {} but found {}'.format(
+            package_name,
+            paths_to_apk))
+  path_to_apk = paths_to_apk[0]
+
+  if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1:
+    # Of the form "com.example.foo-\d", where \d is some digit (usually 1 or 2)
+    package_with_suffix = os.path.basename(os.path.dirname(path_to_apk))
+    arch = _GetFormattedArch(device)
+    dalvik_prefix = '/data/dalvik-cache/{arch}'.format(arch=arch)
+    odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format(
+        prefix=dalvik_prefix,
+        package=package_with_suffix)
+  elif sdk_level >= version_codes.MARSHMALLOW:
+    arch = _GetFormattedArch(device)
+    odex_file = '{data_dir}/oat/{arch}/base.odex'.format(
+        data_dir=os.path.dirname(path_to_apk), arch=arch)
+  else:
+    raise UnsupportedDeviceError('Unsupported API level: {}'.format(sdk_level))
+
+  odex_file_exists = device.FileExists(odex_file)
+  if odex_file_exists:
+    return odex_file
+  elif sdk_level >= version_codes.PIE:
+    raise DeviceOSError(
+        'Unable to find odex file: you must run dex2oat on debuggable apps '
+        'on >= P after installation.')
+  raise DeviceOSError('Unable to find odex file ' + odex_file)
+
+
+def _AdbOatDumpForPackage(device, package_name, out_file):
+  """Runs oatdump on the device."""
+  # Get the path to the odex file.
+  odex_file = PathToDexForPlatformVersion(device, package_name)
+  device.RunShellCommand(
+      ['oatdump', '--oat-file=' + odex_file, '--output=' + out_file],
+      timeout=420,
+      shell=True,
+      check_return=True)
+
+
+class JavaClass(object):
+  """This represents a Java Class and its ART Class Verification status."""
+
+  def __init__(self, name, verification_status):
+    self.name = name
+    self.verification_status = verification_status
+
+
+def _ParseMappingFile(proguard_map_file):
+  """Creates a map of obfuscated names to deobfuscated names."""
+  mappings = {}
+  with open(proguard_map_file, 'r') as f:
+    pattern = re.compile(r'^(\S+) -> (\S+):')
+    for line in f:
+      m = pattern.match(line)
+      if m is not None:
+        deobfuscated_name = m.group(1)
+        obfuscated_name = m.group(2)
+        mappings[obfuscated_name] = deobfuscated_name
+  return mappings
+
+
+def _DeobfuscateJavaClassName(dex_code_name, proguard_mappings):
+  return proguard_mappings.get(dex_code_name, dex_code_name)
+
+
+def FormatJavaClassName(dex_code_name, proguard_mappings):
+  obfuscated_name = dex_code_name.replace('/', '.')
+  if proguard_mappings is not None:
+    return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings)
+  else:
+    return obfuscated_name
+
+
+def ListClassesAndVerificationStatus(oatdump_output, proguard_mappings):
+  """Lists all Java classes in the dex along with verification status."""
+  java_classes = []
+  pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*')
+  for line in oatdump_output:
+    m = pattern.match(line)
+    if m is not None:
+      name = FormatJavaClassName(m.group(1), proguard_mappings)
+      # Some platform levels prefix this with "Status" while other levels do
+      # not. Strip this for consistency.
+      verification_status = m.group(2).replace('Status', '')
+      java_classes.append(JavaClass(name, verification_status))
+  return java_classes
+
+
+def _PrintVerificationResults(target_status, java_classes, show_summary):
+  """Prints results for user output."""
+  # Sort to keep output consistent between runs.
+  java_classes.sort(key=lambda c: c.name)
+  d = {}
+  for status in STATUSES:
+    d[status] = 0
+
+  for java_class in java_classes:
+    if java_class.verification_status == target_status:
+      print(java_class.name)
+    if java_class.verification_status not in d:
+      raise exceptions.RuntimeError('Unexpected status: {0}'.format(
+          java_class.verification_status))
+    else:
+      d[java_class.verification_status] += 1
+
+  if show_summary:
+    for status in d:
+      count = d[status]
+      print('Total {status} classes: {num}'.format(
+          status=status, num=count))
+    print('Total number of classes: {num}'.format(
+        num=len(java_classes)))
+
+
+def RealMain(mapping, device_arg, package, status, hide_summary, workdir):
+  if mapping is None:
+    logging.warn('Skipping deobfuscation because no map file was provided.')
+  device = DetermineDeviceToUse(device_arg)
+  device.EnableRoot()
+  with device_temp_file.DeviceTempFile(
+      device.adb) as file_on_device:
+    _AdbOatDumpForPackage(device, package, file_on_device.name)
+    file_on_host = os.path.join(workdir, 'out.dump')
+    device.PullFile(file_on_device.name, file_on_host, timeout=220)
+  proguard_mappings = (_ParseMappingFile(mapping) if mapping else None)
+  with open(file_on_host, 'r') as f:
+    java_classes = ListClassesAndVerificationStatus(f, proguard_mappings)
+    _PrintVerificationResults(status, java_classes, not hide_summary)
+
+
+def main():
+  parser = argparse.ArgumentParser(description="""
+List Java classes in an APK which fail ART class verification.
+""")
+  parser.add_argument(
+      '--package',
+      '-P',
+      type=str,
+      default=None,
+      required=True,
+      help='Specify the full application package name')
+  parser.add_argument(
+      '--mapping',
+      '-m',
+      type=os.path.realpath,
+      default=None,
+      help='Mapping file for the desired APK to deobfuscate class names')
+  parser.add_argument(
+      '--hide-summary',
+      default=False,
+      action='store_true',
+      help='Do not output the total number of classes in each Status.')
+  parser.add_argument(
+      '--status',
+      type=str,
+      default='RetryVerificationAtRuntime',
+      choices=STATUSES,
+      help='Which category of classes to list at the end of the script')
+  parser.add_argument(
+      '--workdir',
+      '-w',
+      type=os.path.realpath,
+      default=None,
+      help=('Work directory for oatdump output (default = temporary '
+            'directory). If specified, this will not be cleaned up at the end '
+            'of the script (useful if you want to inspect oatdump output '
+            'manually)'))
+
+  script_common.AddEnvironmentArguments(parser)
+  script_common.AddDeviceArguments(parser)
+  logging_common.AddLoggingArguments(parser)
+
+  args = parser.parse_args()
+  devil_chromium.Initialize(adb_path=args.adb_path)
+  logging_common.InitializeLogging(args)
+
+  if args.workdir:
+    if not os.path.isdir(args.workdir):
+      raise RuntimeError('Specified working directory does not exist')
+    RealMain(args.mapping, args.devices, args.package, args.status,
+             args.hide_summary, args.workdir)
+    # Assume the user wants the workdir to persist (useful for debugging).
+    logging.warn('Not cleaning up explicitly-specified workdir: %s',
+                 args.workdir)
+  else:
+    with tempfile_ext.NamedTemporaryDirectory() as workdir:
+      RealMain(args.mapping, args.devices, args.package, args.status,
+               args.hide_summary, workdir)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/list_class_verification_failures_test.py b/src/build/android/list_class_verification_failures_test.py
new file mode 100644
index 0000000..4248064
--- /dev/null
+++ b/src/build/android/list_class_verification_failures_test.py
@@ -0,0 +1,236 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import list_class_verification_failures as list_verification
+
+import devil_chromium  # pylint: disable=unused-import
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+
+import mock  # pylint: disable=import-error
+
+
+def _CreateOdexLine(java_class_name, type_idx, verification_status):
+  """Create a rough approximation of a line of oatdump output."""
+  return ('{type_idx}: L{java_class}; (offset=0xac) (type_idx={type_idx}) '
+          '({verification}) '
+          '(OatClassNoneCompiled)'.format(type_idx=type_idx,
+                                          java_class=java_class_name,
+                                          verification=verification_status))
+
+
+def _ClassForName(name, classes):
+  return next(c for c in classes if c.name == name)
+
+
+class _DetermineDeviceToUseTest(unittest.TestCase):
+
+  def testDetermineDeviceToUse_emptyListWithOneAttachedDevice(self):
+    fake_attached_devices = ['123']
+    user_specified_devices = []
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        return_value=fake_attached_devices)
+    result = list_verification.DetermineDeviceToUse(user_specified_devices)
+    self.assertEqual(result, fake_attached_devices[0])
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+    # pylint: enable=no-member
+
+  def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self):
+    user_specified_devices = []
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        side_effect=device_errors.NoDevicesError())
+    with self.assertRaises(device_errors.NoDevicesError) as _:
+      list_verification.DetermineDeviceToUse(user_specified_devices)
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+    # pylint: enable=no-member
+
+  def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self):
+    user_specified_devices = ['123']
+    fake_attached_devices = ['123']
+    device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+        return_value=fake_attached_devices)
+    result = list_verification.DetermineDeviceToUse(user_specified_devices)
+    self.assertEqual(result, fake_attached_devices[0])
+    # pylint: disable=no-member
+    device_utils.DeviceUtils.HealthyDevices.assert_called_with(
+        device_arg=user_specified_devices)
+    # pylint: enable=no-member
+
+
+class _ListClassVerificationFailuresTest(unittest.TestCase):
+
+  def testPathToDexForPlatformVersion_noPaths(self):
+    sdk_int = version_codes.LOLLIPOP
+    paths_to_apk = []
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('Could not find data directory', message)
+
+  def testPathToDexForPlatformVersion_multiplePaths(self):
+    sdk_int = version_codes.LOLLIPOP
+    paths_to_apk = ['/first/path', '/second/path']
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('Expected exactly one path for', message)
+
+  def testPathToDexForPlatformVersion_dalvikApiLevel(self):
+    sdk_int = version_codes.KITKAT
+    paths_to_apk = ['/some/path']
+    package_name = 'package.name'
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+    with self.assertRaises(list_verification.UnsupportedDeviceError) as _:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+
+  def testPathToDexForPlatformVersion_lollipopArm(self):
+    sdk_int = version_codes.LOLLIPOP
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = 'arm'
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     ('/data/dalvik-cache/arm/data@app'
+                      '@package.name-1@base.apk@classes.dex'))
+
+  def testPathToDexForPlatformVersion_mashmallowArm(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = 'arm'
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     '/some/path/package.name-1/oat/arm/base.odex')
+
+  def testPathToDexForPlatformVersion_mashmallowArm64(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=True)
+
+    odex_file = list_verification.PathToDexForPlatformVersion(device,
+                                                              package_name)
+    self.assertEqual(odex_file,
+                     '/some/path/package.name-1/oat/arm64/base.odex')
+
+  def testPathToDexForPlatformVersion_pieNoOdexFile(self):
+    sdk_int = version_codes.PIE
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=False)
+
+    with self.assertRaises(list_verification.DeviceOSError) as cm:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+    message = str(cm.exception)
+    self.assertIn('you must run dex2oat on debuggable apps on >= P', message)
+
+  def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self):
+    sdk_int = version_codes.MARSHMALLOW
+    package_name = 'package.name'
+    paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+    arch = abis.ARM_64
+
+    device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+    device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+    device.FileExists = mock.MagicMock(return_value=False)
+
+    with self.assertRaises(list_verification.DeviceOSError) as _:
+      list_verification.PathToDexForPlatformVersion(device, package_name)
+
+  def testListClasses_noProguardMap(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.JavaClass1', 6, 'StatusVerified'),
+        _CreateOdexLine('a.b.JavaClass2', 7,
+                        'StatusRetryVerificationAtRuntime'),
+    ]
+
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 None)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+  def testListClasses_proguardMap(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.ObfuscatedJavaClass1', 6, 'StatusVerified'),
+        _CreateOdexLine('a.b.ObfuscatedJavaClass2', 7,
+                        'StatusRetryVerificationAtRuntime'),
+    ]
+
+    mapping = {
+        'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1',
+        'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2',
+    }
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 mapping)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+  def testListClasses_noStatusPrefix(self):
+    oatdump_output = [
+        _CreateOdexLine('a.b.JavaClass1', 6, 'Verified'),
+        _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'),
+    ]
+
+    classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+                                                                 None)
+    self.assertEqual(2, len(classes))
+    java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+    java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+    self.assertEqual(java_class_1.verification_status, 'Verified')
+    self.assertEqual(java_class_2.verification_status,
+                     'RetryVerificationAtRuntime')
+
+if __name__ == '__main__':
+  # Suppress logging messages.
+  unittest.main(buffer=True)
diff --git a/src/build/android/list_java_targets.py b/src/build/android/list_java_targets.py
new file mode 100755
index 0000000..d0689a6
--- /dev/null
+++ b/src/build/android/list_java_targets.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env vpython3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Lint as: python3
+"""Prints out available java targets.
+
+Examples:
+# List GN target for bundles:
+build/android/list_java_targets.py -C out/Default --type android_app_bundle \
+--gn-labels
+
+# List all android targets with types:
+build/android/list_java_targets.py -C out/Default --print-types
+
+# Build all apk targets:
+build/android/list_java_targets.py -C out/Default --type android_apk | xargs \
+autoninja -C out/Default
+
+# Show how many of each target type exist:
+build/android/list_java_targets.py -C out/Default --stats
+
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import subprocess
+import sys
+
+_SRC_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..',
+                                          '..'))
+sys.path.append(os.path.join(_SRC_ROOT, 'build', 'android'))
+from pylib import constants
+
+_VALID_TYPES = (
+    'android_apk',
+    'android_app_bundle',
+    'android_app_bundle_module',
+    'android_assets',
+    'android_resources',
+    'dist_aar',
+    'dist_jar',
+    'group',
+    'java_annotation_processor',
+    'java_binary',
+    'java_library',
+    'junit_binary',
+    'system_java_library',
+)
+
+
+def _run_ninja(output_dir, args):
+  cmd = [
+      'autoninja',
+      '-C',
+      output_dir,
+  ]
+  cmd.extend(args)
+  logging.info('Running: %r', cmd)
+  subprocess.run(cmd, check=True, stdout=sys.stderr)
+
+
+def _query_for_build_config_targets(output_dir):
+  # Query ninja rather than GN since it's faster.
+  cmd = ['ninja', '-C', output_dir, '-t', 'targets']
+  logging.info('Running: %r', cmd)
+  ninja_output = subprocess.run(cmd,
+                                check=True,
+                                capture_output=True,
+                                encoding='ascii').stdout
+  ret = []
+  SUFFIX = '__build_config_crbug_908819'
+  SUFFIX_LEN = len(SUFFIX)
+  for line in ninja_output.splitlines():
+    ninja_target = line.rsplit(':', 1)[0]
+    # Ignore root aliases by ensuring a : exists.
+    if ':' in ninja_target and ninja_target.endswith(SUFFIX):
+      ret.append(f'//{ninja_target[:-SUFFIX_LEN]}')
+  return ret
+
+
+class _TargetEntry(object):
+  def __init__(self, gn_target):
+    assert gn_target.startswith('//'), f'{gn_target} does not start with //'
+    assert ':' in gn_target, f'Non-root {gn_target} required'
+    self.gn_target = gn_target
+    self._build_config = None
+
+  @property
+  def ninja_target(self):
+    return self.gn_target[2:]
+
+  @property
+  def ninja_build_config_target(self):
+    return self.ninja_target + '__build_config_crbug_908819'
+
+  @property
+  def build_config_path(self):
+    """Returns the filepath of the project's .build_config."""
+    ninja_target = self.ninja_target
+    # Support targets at the root level. e.g. //:foo
+    if ninja_target[0] == ':':
+      ninja_target = ninja_target[1:]
+    subpath = ninja_target.replace(':', os.path.sep) + '.build_config'
+    return os.path.join(constants.GetOutDirectory(), 'gen', subpath)
+
+  def build_config(self):
+    """Reads and returns the project's .build_config JSON."""
+    if not self._build_config:
+      with open(self.build_config_path) as jsonfile:
+        self._build_config = json.load(jsonfile)
+    return self._build_config
+
+  def get_type(self):
+    """Returns the target type from its .build_config."""
+    return self.build_config()['deps_info']['type']
+
+  def proguard_enabled(self):
+    """Returns whether proguard runs for this target."""
+    # Modules set proguard_enabled, but the proguarding happens only once at the
+    # bundle level.
+    if self.get_type() == 'android_app_bundle_module':
+      return False
+    return self.build_config()['deps_info'].get('proguard_enabled', False)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument('-C',
+                      '--output-directory',
+                      help='If outdir is not provided, will attempt to guess.')
+  parser.add_argument('--gn-labels',
+                      action='store_true',
+                      help='Print GN labels rather than ninja targets')
+  parser.add_argument(
+      '--nested',
+      action='store_true',
+      help='Do not convert nested targets to their top-level equivalents. '
+      'E.g. Without this, foo_test__apk -> foo_test')
+  parser.add_argument('--print-types',
+                      action='store_true',
+                      help='Print type of each target')
+  parser.add_argument('--print-build-config-paths',
+                      action='store_true',
+                      help='Print path to the .build_config of each target')
+  parser.add_argument('--build',
+                      action='store_true',
+                      help='Build all .build_config files.')
+  parser.add_argument('--type',
+                      action='append',
+                      help='Restrict to targets of given type',
+                      choices=_VALID_TYPES)
+  parser.add_argument('--stats',
+                      action='store_true',
+                      help='Print counts of each target type.')
+  parser.add_argument('--proguard-enabled',
+                      action='store_true',
+                      help='Restrict to targets that have proguard enabled')
+  parser.add_argument('-v', '--verbose', default=0, action='count')
+  args = parser.parse_args()
+
+  args.build |= bool(args.type or args.proguard_enabled or args.print_types
+                     or args.stats)
+
+  logging.basicConfig(level=logging.WARNING - (10 * args.verbose),
+                      format='%(levelname).1s %(relativeCreated)6d %(message)s')
+
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  constants.CheckOutputDirectory()
+  output_dir = constants.GetOutDirectory()
+
+  # Query ninja for all __build_config_crbug_908819 targets.
+  targets = _query_for_build_config_targets(output_dir)
+  entries = [_TargetEntry(t) for t in targets]
+
+  if args.build:
+    logging.warning('Building %d .build_config files...', len(entries))
+    _run_ninja(output_dir, [e.ninja_build_config_target for e in entries])
+
+  if args.type:
+    entries = [e for e in entries if e.get_type() in args.type]
+
+  if args.proguard_enabled:
+    entries = [e for e in entries if e.proguard_enabled()]
+
+  if args.stats:
+    counts = collections.Counter(e.get_type() for e in entries)
+    for entry_type, count in sorted(counts.items()):
+      print(f'{entry_type}: {count}')
+  else:
+    for e in entries:
+      if args.gn_labels:
+        to_print = e.gn_target
+      else:
+        to_print = e.ninja_target
+
+      # Convert to top-level target
+      if not args.nested:
+        to_print = to_print.replace('__test_apk', '').replace('__apk', '')
+
+      if args.print_types:
+        to_print = f'{to_print}: {e.get_type()}'
+      elif args.print_build_config_paths:
+        to_print = f'{to_print}: {e.build_config_path}'
+
+      print(to_print)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/main_dex_classes.flags b/src/build/android/main_dex_classes.flags
new file mode 100644
index 0000000..31dbdd6
--- /dev/null
+++ b/src/build/android/main_dex_classes.flags
@@ -0,0 +1,52 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what should be kept in the main dex. Only used
+# during main dex list determination, not during actual proguarding.
+
+-keep @org.chromium.base.annotations.MainDex class * {
+  *;
+}
+
+-keepclasseswithmembers class * {
+  @org.chromium.base.annotations.MainDex <methods>;
+}
+
+# Assume all IDL-generated classes should be kept. They can't reference other
+# non-framework classes, so fairly low-risk.
+-keepclasseswithmembers class * {
+  public static ** asInterface(android.os.IBinder);
+}
+
+# Required when code coverage is enabled.
+-keep class com.vladium.** {
+  *;
+}
+
+# Renderers / GPU process don't load secondary dex.
+-keep public class * extends org.chromium.base.process_launcher.ChildProcessService {
+  *;
+}
+
+# Used by tests for secondary dex extraction.
+-keep class android.support.v4.content.ContextCompat {
+  *;
+}
+
+# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules
+# Ours differ in that:
+# 1. It omits -keeps for application / instrumentation / backupagents (these are
+#    redundant since they are added by aapt's main dex list rules output).
+# 2. Omits keep for Application.attachBaseContext(), which is overly broad.
+# 3. Omits keep for all annotations, which is also overly broad (and pulls in
+#    any class that has an @IntDef).
+
+######## START mainDexClasses.rules ########
+
+# Keep old fashion tests in the main dex or they'll be silently ignored by InstrumentationTestRunner
+-keep public class * extends android.test.InstrumentationTestCase {
+  <init>();
+}
+
+######## END mainDexClasses.rules ########
diff --git a/src/build/android/method_count.py b/src/build/android/method_count.py
new file mode 100755
index 0000000..a39a390
--- /dev/null
+++ b/src/build/android/method_count.py
@@ -0,0 +1,118 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import zipfile
+
+from pylib.dex import dex_parser
+
+
+class DexStatsCollector(object):
+  """Tracks count of method/field/string/type as well as unique methods."""
+
+  def __init__(self):
+    # Signatures of all methods from all seen dex files.
+    self._unique_methods = set()
+    # Map of label -> { metric -> count }.
+    self._counts_by_label = {}
+
+  def _CollectFromDexfile(self, label, dexfile):
+    assert label not in self._counts_by_label, 'exists: ' + label
+    self._counts_by_label[label] = {
+        'fields': dexfile.header.field_ids_size,
+        'methods': dexfile.header.method_ids_size,
+        'strings': dexfile.header.string_ids_size,
+        'types': dexfile.header.type_ids_size,
+    }
+    self._unique_methods.update(dexfile.IterMethodSignatureParts())
+
+  def CollectFromZip(self, label, path):
+    """Add dex stats from an .apk/.jar/.aab/.zip."""
+    with zipfile.ZipFile(path, 'r') as z:
+      for subpath in z.namelist():
+        if not re.match(r'.*classes\d*\.dex$', subpath):
+          continue
+        dexfile = dex_parser.DexFile(bytearray(z.read(subpath)))
+        self._CollectFromDexfile('{}!{}'.format(label, subpath), dexfile)
+
+  def CollectFromDex(self, label, path):
+    """Add dex stats from a .dex file."""
+    with open(path, 'rb') as f:
+      dexfile = dex_parser.DexFile(bytearray(f.read()))
+    self._CollectFromDexfile(label, dexfile)
+
+  def MergeFrom(self, parent_label, other):
+    """Add dex stats from another DexStatsCollector."""
+    # pylint: disable=protected-access
+    for label, other_counts in other._counts_by_label.items():
+      new_label = '{}-{}'.format(parent_label, label)
+      self._counts_by_label[new_label] = other_counts.copy()
+    self._unique_methods.update(other._unique_methods)
+    # pylint: enable=protected-access
+
+  def GetUniqueMethodCount(self):
+    """Returns total number of unique methods across encountered dex files."""
+    return len(self._unique_methods)
+
+  def GetCountsByLabel(self):
+    """Returns dict of label -> {metric -> count}."""
+    return self._counts_by_label
+
+  def GetTotalCounts(self):
+    """Returns dict of {metric -> count}, where |count| is sum(metric)."""
+    ret = {}
+    for metric in ('fields', 'methods', 'strings', 'types'):
+      ret[metric] = sum(x[metric] for x in self._counts_by_label.values())
+    return ret
+
+  def GetDexCacheSize(self, pre_oreo):
+    """Returns number of bytes of dirty RAM is consumed from all dex files."""
+    # Dex Cache was optimized in Android Oreo:
+    # https://source.android.com/devices/tech/dalvik/improvements#dex-cache-removal
+    if pre_oreo:
+      total = sum(self.GetTotalCounts().values())
+    else:
+      total = sum(c['methods'] for c in self._counts_by_label.values())
+    return total * 4  # 4 bytes per entry.
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('paths', nargs='+')
+  args = parser.parse_args()
+
+  collector = DexStatsCollector()
+  for path in args.paths:
+    if os.path.splitext(path)[1] in ('.zip', '.apk', '.jar', '.aab'):
+      collector.CollectFromZip(path, path)
+    else:
+      collector.CollectFromDex(path, path)
+
+  counts_by_label = collector.GetCountsByLabel()
+  for label, counts in sorted(counts_by_label.items()):
+    print('{}:'.format(label))
+    for metric, count in sorted(counts.items()):
+      print('  {}:'.format(metric), count)
+    print()
+
+  if len(counts_by_label) > 1:
+    print('Totals:')
+    for metric, count in sorted(collector.GetTotalCounts().items()):
+      print('  {}:'.format(metric), count)
+    print()
+
+  print('Unique Methods:', collector.GetUniqueMethodCount())
+  print('DexCache (Pre-Oreo):', collector.GetDexCacheSize(pre_oreo=True),
+        'bytes of dirty memory')
+  print('DexCache (Oreo+):', collector.GetDexCacheSize(pre_oreo=False),
+        'bytes of dirty memory')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/multidex.flags b/src/build/android/multidex.flags
new file mode 100644
index 0000000..e3543c1
--- /dev/null
+++ b/src/build/android/multidex.flags
@@ -0,0 +1,8 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# When multidex is enabled, need to keep the @MainDex annotation so that it
+# can be used to create the main dex list.
+-keepattributes *Annotations*
+-keep @interface org.chromium.base.annotations.MainDex
diff --git a/src/build/android/native_flags/BUILD.gn b/src/build/android/native_flags/BUILD.gn
new file mode 100644
index 0000000..9c5be70
--- /dev/null
+++ b/src/build/android/native_flags/BUILD.gn
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_toolchain == default_toolchain) {
+  import("//build/toolchain/toolchain.gni")
+
+  # A toolchain that will capture compiler and linker arguments to a file.
+  toolchain("flagcapture") {
+    tool("cxx") {
+      cxx = rebase_path("argcapture.py", root_build_dir)
+      command = "$cxx {{output}} {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}"
+      outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ]
+    }
+    tool("solink") {
+      solink = rebase_path("argcapture.py", root_build_dir)
+      command = "$solink {{output}} {{ldflags}}"
+      outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ]
+    }
+    tool("alink") {
+      command = "this-should-never-run"
+      outputs = [ "this-will-never-exist" ]
+    }
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+  }
+} else if (current_toolchain == "//build/android/native_flags:flagcapture") {
+  # This will record flags from all default configs of the default toolchain.
+  source_set("default_ccflags") {
+    sources = [ "empty.cc" ]
+  }
+  shared_library("default_ldflags") {
+    no_default_deps = true
+  }
+}
diff --git a/src/build/android/native_flags/argcapture.py b/src/build/android/native_flags/argcapture.py
new file mode 100755
index 0000000..159b03a
--- /dev/null
+++ b/src/build/android/native_flags/argcapture.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes arguments to a file."""
+
+import sys
+
+
+def main():
+  with open(sys.argv[1], 'w') as f:
+    f.write('\n'.join(sys.argv[2:]))
+    f.write('\n')
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/native_flags/empty.cc b/src/build/android/native_flags/empty.cc
new file mode 100644
index 0000000..94aac14
--- /dev/null
+++ b/src/build/android/native_flags/empty.cc
@@ -0,0 +1,5 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file just needs to exist to appease GN.
diff --git a/src/build/android/provision_devices.py b/src/build/android/provision_devices.py
new file mode 100755
index 0000000..5fb4d93
--- /dev/null
+++ b/src/build/android/provision_devices.py
@@ -0,0 +1,563 @@
+#!/usr/bin/env vpython
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+  ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import datetime
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See crbug.com/584730 and https://bugs.python.org/issue7980.
+import _strptime  # pylint: disable=unused-import
+
+import devil_chromium
+from devil.android import battery_utils
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.sdk import keyevent
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+from devil.utils import run_tests_helper
+from devil.utils import timeout_retry
+from pylib import constants
+from pylib import device_settings
+from pylib.constants import host_paths
+
+_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle']
+_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
+_TOMBSTONE_REGEX = re.compile('tombstone.*')
+
+
+class _DEFAULT_TIMEOUTS(object):
+  # L can take a while to reboot after a wipe.
+  LOLLIPOP = 600
+  PRE_LOLLIPOP = 180
+
+  HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+  WIPE = 'wipe'
+  PROPERTIES = 'properties'
+  FINISH = 'finish'
+
+  ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(args):
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+  devices = [
+      d for d in device_utils.DeviceUtils.HealthyDevices(denylist)
+      if not args.emulators or d.adb.is_emulator
+  ]
+  if args.device:
+    devices = [d for d in devices if d == args.device]
+  if not devices:
+    raise device_errors.DeviceUnreachableError(args.device)
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  if args.emulators:
+    parallel_devices.pMap(SetProperties, args)
+  else:
+    parallel_devices.pMap(ProvisionDevice, denylist, args)
+  if args.auto_reconnect:
+    _LaunchHostHeartbeat()
+  denylisted_devices = denylist.Read() if denylist else []
+  if args.output_device_denylist:
+    with open(args.output_device_denylist, 'w') as f:
+      json.dump(denylisted_devices, f)
+  if all(d in denylisted_devices for d in devices):
+    raise device_errors.NoDevicesError
+  return 0
+
+
+def ProvisionDevice(device, denylist, options):
+  def should_run_phase(phase_name):
+    return not options.phases or phase_name in options.phases
+
+  def run_phase(phase_func, reboot_timeout, reboot=True):
+    try:
+      device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+    except device_errors.CommandTimeoutError:
+      logging.error('Device did not finish booting. Will try to reboot.')
+      device.Reboot(timeout=reboot_timeout)
+    phase_func(device, options)
+    if reboot:
+      device.Reboot(False, retries=0)
+      device.adb.WaitForDevice()
+
+  try:
+    if options.reboot_timeout:
+      reboot_timeout = options.reboot_timeout
+    elif device.build_version_sdk >= version_codes.LOLLIPOP:
+      reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+    else:
+      reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+    if should_run_phase(_PHASES.WIPE):
+      if (options.chrome_specific_wipe or device.IsUserBuild() or
+          device.build_version_sdk >= version_codes.MARSHMALLOW):
+        run_phase(WipeChromeData, reboot_timeout)
+      else:
+        run_phase(WipeDevice, reboot_timeout)
+
+    if should_run_phase(_PHASES.PROPERTIES):
+      run_phase(SetProperties, reboot_timeout)
+
+    if should_run_phase(_PHASES.FINISH):
+      run_phase(FinishProvisioning, reboot_timeout, reboot=False)
+
+    if options.chrome_specific_wipe:
+      package = "com.google.android.gms"
+      version_name = device.GetApplicationVersion(package)
+      logging.info("Version name for %s is %s", package, version_name)
+
+    CheckExternalStorage(device)
+
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timed out waiting for device %s. Adding to denylist.',
+                      str(device))
+    if denylist:
+      denylist.Extend([str(device)], reason='provision_timeout')
+
+  except (device_errors.CommandFailedError,
+          device_errors.DeviceUnreachableError):
+    logging.exception('Failed to provision device %s. Adding to denylist.',
+                      str(device))
+    if denylist:
+      denylist.Extend([str(device)], reason='provision_failure')
+
+
+def CheckExternalStorage(device):
+  """Checks that storage is writable and if not makes it writable.
+
+  Arguments:
+    device: The device to check.
+  """
+  try:
+    with device_temp_file.DeviceTempFile(
+        device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+      device.WriteFile(f.name, 'test')
+  except device_errors.CommandFailedError:
+    logging.info('External storage not writable. Remounting / as RW')
+    device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
+                           check_return=True, as_root=True)
+    device.EnableRoot()
+    with device_temp_file.DeviceTempFile(
+        device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+      device.WriteFile(f.name, 'test')
+
+def WipeChromeData(device, options):
+  """Wipes chrome specific data from device
+
+  (1) uninstall any app whose name matches *chrom*, except
+      com.android.chrome, which is the chrome stable package. Doing so also
+      removes the corresponding dirs under /data/data/ and /data/app/
+  (2) remove any dir under /data/app-lib/ whose name matches *chrom*
+  (3) remove any files under /data/tombstones/ whose name matches "tombstone*"
+  (4) remove /data/local.prop if there is any
+  (5) remove /data/local/chrome-command-line if there is any
+  (6) remove anything under /data/local/.config/ if the dir exists
+      (this is telemetry related)
+  (7) remove anything under /data/local/tmp/
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    if device.IsUserBuild():
+      _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+                        constants.PACKAGE_INFO['chrome_stable'].package)
+      device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+                             check_return=True)
+      device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True)
+    else:
+      device.EnableRoot()
+      _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+                        constants.PACKAGE_INFO['chrome_stable'].package)
+      _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
+      _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
+
+      _WipeFileOrDir(device, '/data/local.prop')
+      _WipeFileOrDir(device, '/data/local/chrome-command-line')
+      _WipeFileOrDir(device, '/data/local/.config/')
+      _WipeFileOrDir(device, '/data/local/tmp/')
+      device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+                             check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def WipeDevice(device, options):
+  """Wipes data from device, keeping only the adb_keys for authorization.
+
+  After wiping data on a device that has been authorized, adb can still
+  communicate with the device, but after reboot the device will need to be
+  re-authorized because the adb keys file is stored in /data/misc/adb/.
+  Thus, adb_keys file is rewritten so the device does not need to be
+  re-authorized.
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    device.EnableRoot()
+    device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+    if device_authorized:
+      adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+                                 as_root=True).splitlines()
+    device.RunShellCommand(['wipe', 'data'],
+                           as_root=True, check_return=True)
+    device.adb.WaitForDevice()
+
+    if device_authorized:
+      adb_keys_set = set(adb_keys)
+      for adb_key_file in options.adb_key_files or []:
+        try:
+          with open(adb_key_file, 'r') as f:
+            adb_public_keys = f.readlines()
+          adb_keys_set.update(adb_public_keys)
+        except IOError:
+          logging.warning('Unable to find adb keys file %s.', adb_key_file)
+      _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+  dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+  device.RunShellCommand(['mkdir', '-p', dir_path],
+                         as_root=True, check_return=True)
+  device.RunShellCommand(['restorecon', dir_path],
+                         as_root=True, check_return=True)
+  device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+  device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+                         as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+  try:
+    device.EnableRoot()
+  except device_errors.CommandFailedError as e:
+    logging.warning(str(e))
+
+  if not device.IsUserBuild():
+    _ConfigureLocalProperties(device, options.enable_java_debug)
+  else:
+    logging.warning('Cannot configure properties in user builds.')
+  device_settings.ConfigureContentSettings(
+      device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+  if options.disable_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+  if options.disable_mock_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+  device_settings.SetLockScreenSettings(device)
+  if options.disable_network:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.NETWORK_DISABLED_SETTINGS)
+    if device.build_version_sdk >= version_codes.MARSHMALLOW:
+      # Ensure that NFC is also switched off.
+      device.RunShellCommand(['svc', 'nfc', 'disable'],
+                             as_root=True, check_return=True)
+
+  if options.disable_system_chrome:
+    # The system chrome version on the device interferes with some tests.
+    device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
+                           check_return=True)
+
+  if options.remove_system_webview:
+    if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS):
+      logging.info('System WebView exists and needs to be removed')
+      if device.HasRoot():
+        # Disabled Marshmallow's Verity security feature
+        if device.build_version_sdk >= version_codes.MARSHMALLOW:
+          device.adb.DisableVerity()
+          device.Reboot()
+          device.WaitUntilFullyBooted()
+          device.EnableRoot()
+
+        # This is required, e.g., to replace the system webview on a device.
+        device.adb.Remount()
+        device.RunShellCommand(['stop'], check_return=True)
+        device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS,
+                               check_return=True)
+        device.RunShellCommand(['start'], check_return=True)
+      else:
+        logging.warning('Cannot remove system webview from a non-rooted device')
+    else:
+      logging.info('System WebView already removed')
+
+  # Some device types can momentarily disappear after setting properties.
+  device.adb.WaitForDevice()
+
+
+def _ConfigureLocalProperties(device, java_debug=True):
+  """Set standard readonly testing device properties prior to reboot."""
+  local_props = [
+      'persist.sys.usb.config=adb',
+      'ro.monkey=1',
+      'ro.test_harness=1',
+      'ro.audio.silent=1',
+      'ro.setupwizard.mode=DISABLED',
+      ]
+  if java_debug:
+    local_props.append(
+        '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+    local_props.append('debug.checkjni=1')
+  try:
+    device.WriteFile(
+        device.LOCAL_PROPERTIES_PATH,
+        '\n'.join(local_props), as_root=True)
+    # Android will not respect the local props file if it is world writable.
+    device.RunShellCommand(
+        ['chmod', '644', device.LOCAL_PROPERTIES_PATH],
+        as_root=True, check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+  # The lockscreen can't be disabled on user builds, so send a keyevent
+  # to unlock it.
+  if device.IsUserBuild():
+    device.SendKeyEvent(keyevent.KEYCODE_MENU)
+
+  if options.min_battery_level is not None:
+    battery = battery_utils.BatteryUtils(device)
+    try:
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+    except device_errors.DeviceChargingError:
+      device.Reboot()
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+
+  if options.max_battery_temp is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to let battery cool to specified temperature.')
+
+  def _set_and_verify_date():
+    if device.build_version_sdk >= version_codes.MARSHMALLOW:
+      date_format = '%m%d%H%M%Y.%S'
+      set_date_command = ['date', '-u']
+      get_date_command = ['date', '-u']
+    else:
+      date_format = '%Y%m%d.%H%M%S'
+      set_date_command = ['date', '-s']
+      get_date_command = ['date']
+
+    # TODO(jbudorick): This is wrong on pre-M devices -- get/set are
+    # dealing in local time, but we're setting based on GMT.
+    strgmtime = time.strftime(date_format, time.gmtime())
+    set_date_command.append(strgmtime)
+    device.RunShellCommand(set_date_command, as_root=True, check_return=True)
+
+    get_date_command.append('+"%Y%m%d.%H%M%S"')
+    device_time = device.RunShellCommand(
+        get_date_command, as_root=True, single_line=True).replace('"', '')
+    device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
+    correct_time = datetime.datetime.strptime(strgmtime, date_format)
+    tdelta = (correct_time - device_time).seconds
+    if tdelta <= 1:
+      logging.info('Date/time successfully set on %s', device)
+      return True
+    else:
+      logging.error('Date mismatch. Device: %s Correct: %s',
+                    device_time.isoformat(), correct_time.isoformat())
+      return False
+
+  # Sometimes the date is not set correctly on the devices. Retry on failure.
+  if device.IsUserBuild():
+    # TODO(bpastene): Figure out how to set the date & time on user builds.
+    pass
+  else:
+    if not timeout_retry.WaitFor(
+        _set_and_verify_date, wait_period=1, max_tries=2):
+      raise device_errors.CommandFailedError(
+          'Failed to set date & time.', device_serial=str(device))
+
+  props = device.RunShellCommand('getprop', check_return=True)
+  for prop in props:
+    logging.info('  %s', prop)
+  if options.auto_reconnect:
+    _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _UninstallIfMatch(device, pattern, app_to_keep):
+  installed_packages = device.RunShellCommand(['pm', 'list', 'packages'])
+  installed_system_packages = [
+      pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list',
+                                                           'packages', '-s'])]
+  for package_output in installed_packages:
+    package = package_output.split(":")[1]
+    if pattern.match(package) and not package == app_to_keep:
+      if not device.IsUserBuild() or package not in installed_system_packages:
+        device.Uninstall(package)
+
+
+def _WipeUnderDirIfMatch(device, path, pattern):
+  for filename in device.ListDirectory(path):
+    if pattern.match(filename):
+      _WipeFileOrDir(device, posixpath.join(path, filename))
+
+
+def _WipeFileOrDir(device, path):
+  if device.PathExists(path):
+    device.RunShellCommand(['rm', '-rf', path], check_return=True)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+  """Pushes and launches the adb_reboot binary on the device.
+
+  Arguments:
+    device: The DeviceUtils instance for the device to which the adb_reboot
+            binary should be pushed.
+    target: The build target (example, Debug or Release) which helps in
+            locating the adb_reboot binary.
+  """
+  logging.info('Will push and launch adb_reboot on %s', str(device))
+  # Kill if adb_reboot is already running.
+  device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+  # Push adb_reboot
+  logging.info('  Pushing adb_reboot ...')
+  adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                            'out/%s/adb_reboot' % target)
+  device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+  # Launch adb_reboot
+  logging.info('  Launching adb_reboot ...')
+  device.RunShellCommand(
+      ['/data/local/tmp/adb_reboot'],
+      check_return=True)
+
+
+def _LaunchHostHeartbeat():
+  # Kill if existing host_heartbeat
+  KillHostHeartbeat()
+  # Launch a new host_heartbeat
+  logging.info('Spawning host heartbeat...')
+  subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                 'build/android/host_heartbeat.py')])
+
+def KillHostHeartbeat():
+  ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+  stdout, _ = ps.communicate()
+  matches = re.findall('\\n.*host_heartbeat.*', stdout)
+  for match in matches:
+    logging.info('An instance of host heart beart running... will kill')
+    pid = re.findall(r'(\S+)', match)[1]
+    subprocess.call(['kill', str(pid)])
+
+def main():
+  # Recommended options on perf bots:
+  # --disable-network
+  #     TODO(tonyg): We eventually want network on. However, currently radios
+  #     can cause perfbots to drain faster than they charge.
+  # --min-battery-level 95
+  #     Some perf bots run benchmarks with USB charging disabled which leads
+  #     to gradual draining of the battery. We must wait for a full charge
+  #     before starting a run in order to keep the devices online.
+
+  parser = argparse.ArgumentParser(
+      description='Provision Android devices with settings required for bots.')
+  parser.add_argument('-d', '--device', metavar='SERIAL',
+                      help='the serial number of the device to be provisioned'
+                      ' (the default is to provision all devices attached)')
+  parser.add_argument('--adb-path',
+                      help='Absolute path to the adb binary to use.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+                      dest='phases',
+                      help='Phases of provisioning to run. '
+                           '(If omitted, all phases will be run.)')
+  parser.add_argument('--skip-wipe', action='store_true', default=False,
+                      help="don't wipe device data during provisioning")
+  parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+                      help='when wiping the device, max number of seconds to'
+                      ' wait after each reboot '
+                      '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+  parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+                      help='wait for the device to reach this minimum battery'
+                      ' level before trying to continue')
+  parser.add_argument('--disable-location', action='store_true',
+                      help='disable Google location services on devices')
+  parser.add_argument('--disable-mock-location', action='store_true',
+                      default=False, help='Set ALLOW_MOCK_LOCATION to false')
+  parser.add_argument('--disable-network', action='store_true',
+                      help='disable network access on devices')
+  parser.add_argument('--disable-java-debug', action='store_false',
+                      dest='enable_java_debug', default=True,
+                      help='disable Java property asserts and JNI checking')
+  parser.add_argument('--disable-system-chrome', action='store_true',
+                      help='Disable the system chrome from devices.')
+  parser.add_argument('--remove-system-webview', action='store_true',
+                      help='Remove the system webview from devices.')
+  parser.add_argument('-t', '--target', default='Debug',
+                      help='the build target (default: %(default)s)')
+  parser.add_argument('-r', '--auto-reconnect', action='store_true',
+                      help='push binary which will reboot the device on adb'
+                      ' disconnections')
+  parser.add_argument('--adb-key-files', type=str, nargs='+',
+                      help='list of adb keys to push to device')
+  parser.add_argument('-v', '--verbose', action='count', default=1,
+                      help='Log more information.')
+  parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+                      help='Wait for the battery to have this temp or lower.')
+  parser.add_argument('--output-device-denylist',
+                      help='Json file to output the device denylist.')
+  parser.add_argument('--chrome-specific-wipe', action='store_true',
+                      help='only wipe chrome specific data during provisioning')
+  parser.add_argument('--emulators', action='store_true',
+                      help='provision only emulators and ignore usb devices')
+  args = parser.parse_args()
+  constants.SetBuildType(args.target)
+
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devil_chromium.Initialize(adb_path=args.adb_path)
+
+  try:
+    return ProvisionDevices(args)
+  except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
+    logging.exception('Unable to provision local devices.')
+    return exit_codes.INFRA
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/__init__.py b/src/build/android/pylib/__init__.py
new file mode 100644
index 0000000..c9a4c03
--- /dev/null
+++ b/src/build/android/pylib/__init__.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+_THIRD_PARTY_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..', 'third_party'))
+
+_CATAPULT_PATH = os.path.join(_THIRD_PARTY_PATH, 'catapult')
+
+_DEVIL_PATH = os.path.join(_CATAPULT_PATH, 'devil')
+
+_PYTRACE_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_trace_event')
+
+_PY_UTILS_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_utils')
+
+_SIX_PATH = os.path.join(_THIRD_PARTY_PATH, 'six', 'src')
+
+_TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing')
+
+
+if _DEVIL_PATH not in sys.path:
+  sys.path.append(_DEVIL_PATH)
+
+if _PYTRACE_PATH not in sys.path:
+  sys.path.append(_PYTRACE_PATH)
+
+if _PY_UTILS_PATH not in sys.path:
+  sys.path.append(_PY_UTILS_PATH)
+
+if _TRACE2HTML_PATH not in sys.path:
+  sys.path.append(_TRACE2HTML_PATH)
+
+if _SIX_PATH not in sys.path:
+  sys.path.append(_SIX_PATH)
diff --git a/src/build/android/pylib/android/__init__.py b/src/build/android/pylib/android/__init__.py
new file mode 100644
index 0000000..a67c350
--- /dev/null
+++ b/src/build/android/pylib/android/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/android/logcat_symbolizer.py b/src/build/android/pylib/android/logcat_symbolizer.py
new file mode 100644
index 0000000..720629b
--- /dev/null
+++ b/src/build/android/pylib/android/logcat_symbolizer.py
@@ -0,0 +1,98 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+from devil.android import logcat_monitor
+
+BACKTRACE_LINE_RE = re.compile(r'#\d+')
+THREADTIME_RE = re.compile(
+    logcat_monitor.LogcatMonitor.THREADTIME_RE_FORMAT % (
+        r' *\S* *', r' *\S* *', r' *\S* *', r' *\S* *', r'.*'))
+
+def SymbolizeLogcat(logcat, dest, symbolizer, abi):
+  """Symbolize stack trace in the logcat.
+
+  Symbolize the logcat and write the symbolized logcat to a new file.
+
+  Args:
+    logcat: Path to logcat file.
+    dest: Path to where to write the symbolized logcat.
+    symbolizer: The stack symbolizer to symbolize stack trace in logcat.
+    abi: The device's product_cpu_abi. Symbolizer needs it to symbolize.
+
+  A sample logcat that needs to be symbolized, after stripping the prefix,
+  such as '08-07 18:39:37.692 28649 28649 E Ion     : ', would be:
+  Build fingerprint: 'google/shamu/shamu:7.1.1/NMF20B/3370:userdebug/dev-keys'
+  Revision: '0'
+  ABI: 'arm'
+  pid: 28936, tid: 28936, name: chromium.chrome  >>> org.chromium.chrome <<<
+  signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
+  Abort message: '[FATAL:debug_urls.cc(151)] Check failed: false.
+  #00 0x63e16c41 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0006cc4
+  #01 0x63f19be3 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016fbe
+  #02 0x63f19737 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016f73
+  #03 0x63f18ddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016edd
+  #04 0x63f18b79 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016eb7
+  #05 0xab53f319 /system/lib/libart.so+0x000a3319
+  #06
+     r0 00000000  r1 00007108  r2 00000006  r3 00000008
+     r4 ae60258c  r5 00000006  r6 ae602534  r7 0000010c
+     r8 bede5cd0  r9 00000030  sl 00000000  fp 9265a800
+     ip 0000000b  sp bede5c38  lr ac8e5537  pc ac8e7da0  cpsr 600f0010
+
+  backtrace:
+     #00 pc 00049da0  /system/lib/libc.so (tgkill+12)
+     #01 pc 00047533  /system/lib/libc.so (pthread_kill+34)
+     #02 pc 0001d635  /system/lib/libc.so (raise+10)
+     #03 pc 00019181  /system/lib/libc.so (__libc_android_abort+34)
+     #04 pc 00017048  /system/lib/libc.so (abort+4)
+     #05 pc 00948605  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #06 pc 002c9f73  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #07 pc 003ccbe1  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #08 pc 003cc735  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #09 pc 003cbddf  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+     #10 pc 003cbb77  /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+  """
+
+  with open(logcat) as logcat_file:
+    with open(dest, 'w') as dest_file:
+      # The current stack script will only print out the symbolized stack,
+      # and completely ignore logs other than the crash log that is used for
+      # symbolization, if any exists. Thus the code here extracts the
+      # crash log inside the logcat and pass only the crash log to the script,
+      # because we don't want to lose other information in the logcat that,
+      # if passed to the stack script, will just be ignored by it.
+      # TODO(crbug.com/755225): Rewrite the logic here.
+      outside_of_crash_log = True
+      in_lower_half_crash = False
+      data_to_symbolize = []
+
+      for line in logcat_file:
+        if outside_of_crash_log:
+          # Check whether it is the start of crash log.
+          if 'Build fingerprint: ' in line:
+            outside_of_crash_log = False
+            # Only include necessary information for symbolization.
+            # The logic here that removes date, time, proc_id etc.
+            # should be in sync with _THREADTIME_RE_FORMAT in logcat_monitor.
+            data_to_symbolize.append(
+              re.search(THREADTIME_RE, line).group(7))
+          else:
+            dest_file.write(line)
+        else:
+          # Once we have reached the end of the backtrace section,
+          # we will start symbolizing.
+          if in_lower_half_crash and not bool(BACKTRACE_LINE_RE.search(line)):
+            outside_of_crash_log = True
+            in_lower_half_crash = False
+            symbolized_lines = symbolizer.ExtractAndResolveNativeStackTraces(
+                data_to_symbolize, abi)
+            dest_file.write('\n'.join(symbolized_lines) + '\n' + line)
+            data_to_symbolize = []
+          else:
+            if not in_lower_half_crash and 'backtrace:' in line:
+              in_lower_half_crash = True
+            data_to_symbolize.append(
+                re.search(THREADTIME_RE, line).group(7))
diff --git a/src/build/android/pylib/base/__init__.py b/src/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/base/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/base/base_test_result.py b/src/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000..03f00f2
--- /dev/null
+++ b/src/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,264 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+from __future__ import absolute_import
+import threading
+import six
+
+
+class ResultType(object):
+  """Class enumerating test types."""
+  # The test passed.
+  PASS = 'SUCCESS'
+
+  # The test was intentionally skipped.
+  SKIP = 'SKIPPED'
+
+  # The test failed.
+  FAIL = 'FAILURE'
+
+  # The test caused the containing process to crash.
+  CRASH = 'CRASH'
+
+  # The test timed out.
+  TIMEOUT = 'TIMEOUT'
+
+  # The test ran, but we couldn't determine what happened.
+  UNKNOWN = 'UNKNOWN'
+
+  # The test did not run.
+  NOTRUN = 'NOTRUN'
+
+  @staticmethod
+  def GetTypes():
+    """Get a list of all test types."""
+    return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+            ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN,
+            ResultType.NOTRUN]
+
+
+class BaseTestResult(object):
+  """Base class for a single test result."""
+
+  def __init__(self, name, test_type, duration=0, log=''):
+    """Construct a BaseTestResult.
+
+    Args:
+      name: Name of the test which defines uniqueness.
+      test_type: Type of the test result as defined in ResultType.
+      duration: Time it took for the test to run in milliseconds.
+      log: An optional string listing any errors.
+    """
+    assert name
+    assert test_type in ResultType.GetTypes()
+    self._name = name
+    self._test_type = test_type
+    self._duration = duration
+    self._log = log
+    self._links = {}
+
+  def __str__(self):
+    return self._name
+
+  def __repr__(self):
+    return self._name
+
+  def __cmp__(self, other):
+    # pylint: disable=W0212
+    return cmp(self._name, other._name)
+
+  def __hash__(self):
+    return hash(self._name)
+
+  def SetName(self, name):
+    """Set the test name.
+
+    Because we're putting this into a set, this should only be used if moving
+    this test result into another set.
+    """
+    self._name = name
+
+  def GetName(self):
+    """Get the test name."""
+    return self._name
+
+  def SetType(self, test_type):
+    """Set the test result type."""
+    assert test_type in ResultType.GetTypes()
+    self._test_type = test_type
+
+  def GetType(self):
+    """Get the test result type."""
+    return self._test_type
+
+  def GetDuration(self):
+    """Get the test duration."""
+    return self._duration
+
+  def SetLog(self, log):
+    """Set the test log."""
+    self._log = log
+
+  def GetLog(self):
+    """Get the test log."""
+    return self._log
+
+  def SetLink(self, name, link_url):
+    """Set link with test result data."""
+    self._links[name] = link_url
+
+  def GetLinks(self):
+    """Get dict containing links to test result data."""
+    return self._links
+
+
+class TestRunResults(object):
+  """Set of results for a test run."""
+
+  def __init__(self):
+    self._links = {}
+    self._results = set()
+    self._results_lock = threading.RLock()
+
+  def SetLink(self, name, link_url):
+    """Add link with test run results data."""
+    self._links[name] = link_url
+
+  def GetLinks(self):
+    """Get dict containing links to test run result data."""
+    return self._links
+
+  def GetLogs(self):
+    """Get the string representation of all test logs."""
+    with self._results_lock:
+      s = []
+      for test_type in ResultType.GetTypes():
+        if test_type != ResultType.PASS:
+          for t in sorted(self._GetType(test_type)):
+            log = t.GetLog()
+            if log:
+              s.append('[%s] %s:' % (test_type, t))
+              s.append(six.text_type(log, 'utf-8'))
+      return '\n'.join(s)
+
+  def GetGtestForm(self):
+    """Get the gtest string representation of this object."""
+    with self._results_lock:
+      s = []
+      plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+      tests = lambda n: plural(n, 'test', 'tests')
+
+      s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+      s.append('[  PASSED  ] %s.' % (tests(len(self.GetPass()))))
+
+      skipped = self.GetSkip()
+      if skipped:
+        s.append('[  SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+        for t in sorted(skipped):
+          s.append('[  SKIPPED ] %s' % str(t))
+
+      all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+          self.GetUnknown())
+      if all_failures:
+        s.append('[  FAILED  ] %s, listed below:' % tests(len(all_failures)))
+        for t in sorted(self.GetFail()):
+          s.append('[  FAILED  ] %s' % str(t))
+        for t in sorted(self.GetCrash()):
+          s.append('[  FAILED  ] %s (CRASHED)' % str(t))
+        for t in sorted(self.GetTimeout()):
+          s.append('[  FAILED  ] %s (TIMEOUT)' % str(t))
+        for t in sorted(self.GetUnknown()):
+          s.append('[  FAILED  ] %s (UNKNOWN)' % str(t))
+        s.append('')
+        s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+      return '\n'.join(s)
+
+  def GetShortForm(self):
+    """Get the short string representation of this object."""
+    with self._results_lock:
+      s = []
+      s.append('ALL: %d' % len(self._results))
+      for test_type in ResultType.GetTypes():
+        s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+      return ''.join([x.ljust(15) for x in s])
+
+  def __str__(self):
+    return self.GetGtestForm()
+
+  def AddResult(self, result):
+    """Add |result| to the set.
+
+    Args:
+      result: An instance of BaseTestResult.
+    """
+    assert isinstance(result, BaseTestResult)
+    with self._results_lock:
+      self._results.discard(result)
+      self._results.add(result)
+
+  def AddResults(self, results):
+    """Add |results| to the set.
+
+    Args:
+      results: An iterable of BaseTestResult objects.
+    """
+    with self._results_lock:
+      for t in results:
+        self.AddResult(t)
+
+  def AddTestRunResults(self, results):
+    """Add the set of test results from |results|.
+
+    Args:
+      results: An instance of TestRunResults.
+    """
+    assert isinstance(results, TestRunResults), (
+           'Expected TestRunResult object: %s' % type(results))
+    with self._results_lock:
+      # pylint: disable=W0212
+      self._results.update(results._results)
+
+  def GetAll(self):
+    """Get the set of all test results."""
+    with self._results_lock:
+      return self._results.copy()
+
+  def _GetType(self, test_type):
+    """Get the set of test results with the given test type."""
+    with self._results_lock:
+      return set(t for t in self._results if t.GetType() == test_type)
+
+  def GetPass(self):
+    """Get the set of all passed test results."""
+    return self._GetType(ResultType.PASS)
+
+  def GetSkip(self):
+    """Get the set of all skipped test results."""
+    return self._GetType(ResultType.SKIP)
+
+  def GetFail(self):
+    """Get the set of all failed test results."""
+    return self._GetType(ResultType.FAIL)
+
+  def GetCrash(self):
+    """Get the set of all crashed test results."""
+    return self._GetType(ResultType.CRASH)
+
+  def GetTimeout(self):
+    """Get the set of all timed out test results."""
+    return self._GetType(ResultType.TIMEOUT)
+
+  def GetUnknown(self):
+    """Get the set of all unknown test results."""
+    return self._GetType(ResultType.UNKNOWN)
+
+  def GetNotPass(self):
+    """Get the set of all non-passed test results."""
+    return self.GetAll() - self.GetPass()
+
+  def DidRunPass(self):
+    """Return whether the test run was successful."""
+    return not self.GetNotPass() - self.GetSkip()
diff --git a/src/build/android/pylib/base/base_test_result_unittest.py b/src/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000..31a1f60
--- /dev/null
+++ b/src/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+  def setUp(self):
+    self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    self.p2 = BaseTestResult('p2', ResultType.PASS)
+    self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+    self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+    self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+    self.tr = TestRunResults()
+    self.tr.AddResult(self.p1)
+    self.tr.AddResult(other_p1)
+    self.tr.AddResult(self.p2)
+    self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+  def testGetAll(self):
+    self.assertFalse(
+        self.tr.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+  def testGetPass(self):
+    self.assertFalse(self.tr.GetPass().symmetric_difference(
+        [self.p1, self.p2]))
+
+  def testGetNotPass(self):
+    self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+        [self.f1, self.c1, self.u1]))
+
+  def testGetAddTestRunResults(self):
+    tr2 = TestRunResults()
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    f2 = BaseTestResult('f2', ResultType.FAIL)
+    tr2.AddResult(other_p1)
+    tr2.AddResult(f2)
+    tr2.AddTestRunResults(self.tr)
+    self.assertFalse(
+        tr2.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+  def testGetLogs(self):
+    log_print = ('[FAIL] f1:\n'
+                 'failure1\n'
+                 '[CRASH] c1:\n'
+                 'crash1')
+    self.assertEqual(self.tr.GetLogs(), log_print)
+
+  def testGetShortForm(self):
+    short_print = ('ALL: 5         PASS: 2        FAIL: 1        '
+                   'CRASH: 1       TIMEOUT: 0     UNKNOWN: 1     ')
+    self.assertEqual(self.tr.GetShortForm(), short_print)
+
+  def testGetGtestForm(self):
+    gtest_print = ('[==========] 5 tests ran.\n'
+                   '[  PASSED  ] 2 tests.\n'
+                   '[  FAILED  ] 3 tests, listed below:\n'
+                   '[  FAILED  ] f1\n'
+                   '[  FAILED  ] c1 (CRASHED)\n'
+                   '[  FAILED  ] u1 (UNKNOWN)\n'
+                   '\n'
+                   '3 FAILED TESTS')
+    self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+  def testRunPassed(self):
+    self.assertFalse(self.tr.DidRunPass())
+    tr2 = TestRunResults()
+    self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/base/environment.py b/src/build/android/pylib/base/environment.py
new file mode 100644
index 0000000..744c392
--- /dev/null
+++ b/src/build/android/pylib/base/environment.py
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+  """An environment in which tests can be run.
+
+  This is expected to handle all logic that is applicable to an entire specific
+  environment but is independent of the test type.
+
+  Examples include:
+    - The local device environment, for running tests on devices attached to
+      the local machine.
+    - The local machine environment, for running tests directly on the local
+      machine.
+  """
+
+  def __init__(self, output_manager):
+    """Environment constructor.
+
+    Args:
+      output_manager: Instance of |output_manager.OutputManager| used to
+          save test output.
+    """
+    self._output_manager = output_manager
+
+    # Some subclasses have different teardown behavior on receiving SIGTERM.
+    self._received_sigterm = False
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
+  @property
+  def output_manager(self):
+    return self._output_manager
+
+  def ReceivedSigterm(self):
+    self._received_sigterm = True
diff --git a/src/build/android/pylib/base/environment_factory.py b/src/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000..2ff93f3
--- /dev/null
+++ b/src/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.local.machine import local_machine_environment
+
+try:
+  # local_emulator_environment depends on //tools.
+  # If a client pulls in the //build subtree but not the //tools
+  # one, fail at emulator environment creation time.
+  from pylib.local.emulator import local_emulator_environment
+except ImportError:
+  local_emulator_environment = None
+
+
+def CreateEnvironment(args, output_manager, error_func):
+
+  if args.environment == 'local':
+    if args.command not in constants.LOCAL_MACHINE_TESTS:
+      if args.avd_config:
+        if not local_emulator_environment:
+          error_func('emulator environment requested but not available.')
+        return local_emulator_environment.LocalEmulatorEnvironment(
+            args, output_manager, error_func)
+      return local_device_environment.LocalDeviceEnvironment(
+          args, output_manager, error_func)
+    else:
+      return local_machine_environment.LocalMachineEnvironment(
+          args, output_manager, error_func)
+
+  error_func('Unable to create %s environment.' % args.environment)
diff --git a/src/build/android/pylib/base/mock_environment.py b/src/build/android/pylib/base/mock_environment.py
new file mode 100644
index 0000000..d7293c7
--- /dev/null
+++ b/src/build/android/pylib/base/mock_environment.py
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.base import environment
+
+import mock  # pylint: disable=import-error
+
+
+MockEnvironment = mock.MagicMock(environment.Environment)
diff --git a/src/build/android/pylib/base/mock_test_instance.py b/src/build/android/pylib/base/mock_test_instance.py
new file mode 100644
index 0000000..19a1d7e
--- /dev/null
+++ b/src/build/android/pylib/base/mock_test_instance.py
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.base import test_instance
+
+import mock  # pylint: disable=import-error
+
+
+MockTestInstance = mock.MagicMock(test_instance.TestInstance)
diff --git a/src/build/android/pylib/base/output_manager.py b/src/build/android/pylib/base/output_manager.py
new file mode 100644
index 0000000..53e5aea
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager.py
@@ -0,0 +1,159 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import logging
+import os
+import tempfile
+
+from devil.utils import reraiser_thread
+
+
+class Datatype(object):
+  HTML = 'text/html'
+  JSON = 'application/json'
+  PNG = 'image/png'
+  TEXT = 'text/plain'
+
+
+class OutputManager(object):
+
+  def __init__(self):
+    """OutputManager Constructor.
+
+    This class provides a simple interface to save test output. Subclasses
+    of this will allow users to save test results in the cloud or locally.
+    """
+    self._allow_upload = False
+    self._thread_group = None
+
+  @contextlib.contextmanager
+  def ArchivedTempfile(
+      self, out_filename, out_subdir, datatype=Datatype.TEXT):
+    """Archive file contents asynchonously and then deletes file.
+
+    Args:
+      out_filename: Name for saved file.
+      out_subdir: Directory to save |out_filename| to.
+      datatype: Datatype of file.
+
+    Returns:
+      An ArchivedFile file. This file will be uploaded async when the context
+      manager exits. AFTER the context manager exits, you can get the link to
+      where the file will be stored using the Link() API. You can use typical
+      file APIs to write and flish the ArchivedFile. You can also use file.name
+      to get the local filepath to where the underlying file exists. If you do
+      this, you are responsible of flushing the file before exiting the context
+      manager.
+    """
+    if not self._allow_upload:
+      raise Exception('Must run |SetUp| before attempting to upload!')
+
+    f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
+    try:
+      yield f
+    finally:
+      f.PrepareArchive()
+
+      def archive():
+        try:
+          f.Archive()
+        finally:
+          f.Delete()
+
+      thread = reraiser_thread.ReraiserThread(func=archive)
+      thread.start()
+      self._thread_group.Add(thread)
+
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    """Returns an instance of ArchivedFile."""
+    raise NotImplementedError
+
+  def SetUp(self):
+    self._allow_upload = True
+    self._thread_group = reraiser_thread.ReraiserThreadGroup()
+
+  def TearDown(self):
+    self._allow_upload = False
+    logging.info('Finishing archiving output.')
+    self._thread_group.JoinAll()
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
+
+class ArchivedFile(object):
+
+  def __init__(self, out_filename, out_subdir, datatype):
+    self._out_filename = out_filename
+    self._out_subdir = out_subdir
+    self._datatype = datatype
+
+    self._f = tempfile.NamedTemporaryFile(delete=False)
+    self._ready_to_archive = False
+
+  @property
+  def name(self):
+    return self._f.name
+
+  def write(self, *args, **kwargs):
+    if self._ready_to_archive:
+      raise Exception('Cannot write to file after archiving has begun!')
+    self._f.write(*args, **kwargs)
+
+  def flush(self, *args, **kwargs):
+    if self._ready_to_archive:
+      raise Exception('Cannot flush file after archiving has begun!')
+    self._f.flush(*args, **kwargs)
+
+  def Link(self):
+    """Returns location of archived file."""
+    if not self._ready_to_archive:
+      raise Exception('Cannot get link to archived file before archiving '
+                      'has begun')
+    return self._Link()
+
+  def _Link(self):
+    """Note for when overriding this function.
+
+    This function will certainly be called before the file
+    has finished being archived. Therefore, this needs to be able to know the
+    exact location of the archived file before it is finished being archived.
+    """
+    raise NotImplementedError
+
+  def PrepareArchive(self):
+    """Meant to be called synchronously to prepare file for async archiving."""
+    self.flush()
+    self._ready_to_archive = True
+    self._PrepareArchive()
+
+  def _PrepareArchive(self):
+    """Note for when overriding this function.
+
+    This function is needed for things such as computing the location of
+    content addressed files. This is called after the file is written but
+    before archiving has begun.
+    """
+    pass
+
+  def Archive(self):
+    """Archives file."""
+    if not self._ready_to_archive:
+      raise Exception('File is not ready to archive. Be sure you are not '
+                      'writing to the file and PrepareArchive has been called')
+    self._Archive()
+
+  def _Archive(self):
+    raise NotImplementedError
+
+  def Delete(self):
+    """Deletes the backing file."""
+    self._f.close()
+    os.remove(self.name)
diff --git a/src/build/android/pylib/base/output_manager_factory.py b/src/build/android/pylib/base/output_manager_factory.py
new file mode 100644
index 0000000..891692d
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager_factory.py
@@ -0,0 +1,18 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib import constants
+from pylib.output import local_output_manager
+from pylib.output import remote_output_manager
+from pylib.utils import local_utils
+
+
+def CreateOutputManager(args):
+  if args.local_output or not local_utils.IsOnSwarming():
+    return local_output_manager.LocalOutputManager(
+        output_dir=constants.GetOutDirectory())
+  else:
+    return remote_output_manager.RemoteOutputManager(
+        bucket=args.gs_results_bucket)
diff --git a/src/build/android/pylib/base/output_manager_test_case.py b/src/build/android/pylib/base/output_manager_test_case.py
new file mode 100644
index 0000000..7b7e462
--- /dev/null
+++ b/src/build/android/pylib/base/output_manager_test_case.py
@@ -0,0 +1,15 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import os.path
+import unittest
+
+
+class OutputManagerTestCase(unittest.TestCase):
+
+  def assertUsableTempFile(self, archived_tempfile):
+    self.assertTrue(bool(archived_tempfile.name))
+    self.assertTrue(os.path.exists(archived_tempfile.name))
+    self.assertTrue(os.path.isfile(archived_tempfile.name))
diff --git a/src/build/android/pylib/base/result_sink.py b/src/build/android/pylib/base/result_sink.py
new file mode 100644
index 0000000..424b873
--- /dev/null
+++ b/src/build/android/pylib/base/result_sink.py
@@ -0,0 +1,163 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+import base64
+import cgi
+import json
+import os
+
+import six
+if not six.PY2:
+  import html  # pylint: disable=import-error
+
+from pylib.base import base_test_result
+import requests  # pylint: disable=import-error
+
+# Comes from luci/resultdb/pbutil/test_result.go
+MAX_REPORT_LEN = 4 * 1024
+
+# Maps base_test_results to the luci test-result.proto.
+# https://godoc.org/go.chromium.org/luci/resultdb/proto/v1#TestStatus
+RESULT_MAP = {
+    base_test_result.ResultType.UNKNOWN: 'ABORT',
+    base_test_result.ResultType.PASS: 'PASS',
+    base_test_result.ResultType.FAIL: 'FAIL',
+    base_test_result.ResultType.CRASH: 'CRASH',
+    base_test_result.ResultType.TIMEOUT: 'ABORT',
+    base_test_result.ResultType.SKIP: 'SKIP',
+    base_test_result.ResultType.NOTRUN: 'SKIP',
+}
+
+
+def TryInitClient():
+  """Tries to initialize a result_sink_client object.
+
+  Assumes that rdb stream is already running.
+
+  Returns:
+    A ResultSinkClient for the result_sink server else returns None.
+  """
+  try:
+    with open(os.environ['LUCI_CONTEXT']) as f:
+      sink = json.load(f)['result_sink']
+      return ResultSinkClient(sink)
+  except KeyError:
+    return None
+
+
+class ResultSinkClient(object):
+  """A class to store the sink's post configurations and make post requests.
+
+  This assumes that the rdb stream has been called already and that the
+  server is listening.
+  """
+  def __init__(self, context):
+    base_url = 'http://%s/prpc/luci.resultsink.v1.Sink' % context['address']
+    self.test_results_url = base_url + '/ReportTestResults'
+    self.report_artifacts_url = base_url + '/ReportInvocationLevelArtifacts'
+
+    self.headers = {
+        'Content-Type': 'application/json',
+        'Accept': 'application/json',
+        'Authorization': 'ResultSink %s' % context['auth_token'],
+    }
+
+  def Post(self, test_id, status, duration, test_log, test_file,
+           artifacts=None):
+    """Uploads the test result to the ResultSink server.
+
+    This assumes that the rdb stream has been called already and that
+    server is ready listening.
+
+    Args:
+      test_id: A string representing the test's name.
+      status: A string representing if the test passed, failed, etc...
+      duration: An int representing time in ms.
+      test_log: A string representing the test's output.
+      test_file: A string representing the file location of the test.
+      artifacts: An optional dict of artifacts to attach to the test.
+
+    Returns:
+      N/A
+    """
+    assert status in RESULT_MAP
+    expected = status in (base_test_result.ResultType.PASS,
+                          base_test_result.ResultType.SKIP)
+    result_db_status = RESULT_MAP[status]
+
+    # Slightly smaller to allow addition of <pre> tags and message.
+    report_check_size = MAX_REPORT_LEN - 45
+    if six.PY2:
+      test_log_escaped = cgi.escape(test_log)
+    else:
+      test_log_escaped = html.escape(test_log)
+    if len(test_log_escaped) > report_check_size:
+      test_log_formatted = ('<pre>' + test_log_escaped[:report_check_size] +
+                            '...Full output in Artifact.</pre>')
+    else:
+      test_log_formatted = '<pre>' + test_log_escaped + '</pre>'
+
+    tr = {
+        'expected':
+        expected,
+        'status':
+        result_db_status,
+        'summaryHtml':
+        test_log_formatted,
+        'tags': [
+            {
+                'key': 'test_name',
+                'value': test_id,
+            },
+            {
+                # Status before getting mapped to result_db statuses.
+                'key': 'android_test_runner_status',
+                'value': status,
+            }
+        ],
+        'testId':
+        test_id,
+    }
+    artifacts = artifacts or {}
+    if len(test_log_escaped) > report_check_size:
+      # Upload the original log without any modifications.
+      b64_log = six.ensure_str(base64.b64encode(six.ensure_binary(test_log)))
+      artifacts.update({'Test Log': {'contents': b64_log}})
+    if artifacts:
+      tr['artifacts'] = artifacts
+
+    if duration is not None:
+      # Duration must be formatted to avoid scientific notation in case
+      # number is too small or too large. Result_db takes seconds, not ms.
+      # Need to use float() otherwise it does substitution first then divides.
+      tr['duration'] = '%.9fs' % float(duration / 1000.0)
+
+    if test_file and str(test_file).startswith('//'):
+      tr['testMetadata'] = {
+          'name': test_id,
+          'location': {
+              'file_name': test_file,
+              'repo': 'https://chromium.googlesource.com/chromium/src',
+          }
+      }
+
+    res = requests.post(url=self.test_results_url,
+                        headers=self.headers,
+                        data=json.dumps({'testResults': [tr]}))
+    res.raise_for_status()
+
+  def ReportInvocationLevelArtifacts(self, artifacts):
+    """Uploads invocation-level artifacts to the ResultSink server.
+
+    This is for artifacts that don't apply to a single test but to the test
+    invocation as a whole (eg: system logs).
+
+    Args:
+      artifacts: A dict of artifacts to attach to the invocation.
+    """
+    req = {'artifacts': artifacts}
+    res = requests.post(url=self.report_artifacts_url,
+                        headers=self.headers,
+                        data=json.dumps(req))
+    res.raise_for_status()
diff --git a/src/build/android/pylib/base/test_collection.py b/src/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000..83b3bf8
--- /dev/null
+++ b/src/build/android/pylib/base/test_collection.py
@@ -0,0 +1,81 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import threading
+
+class TestCollection(object):
+  """A threadsafe collection of tests.
+
+  Args:
+    tests: List of tests to put in the collection.
+  """
+
+  def __init__(self, tests=None):
+    if not tests:
+      tests = []
+    self._lock = threading.Lock()
+    self._tests = []
+    self._tests_in_progress = 0
+    # Used to signal that an item is available or all items have been handled.
+    self._item_available_or_all_done = threading.Event()
+    for t in tests:
+      self.add(t)
+
+  def _pop(self):
+    """Pop a test from the collection.
+
+    Waits until a test is available or all tests have been handled.
+
+    Returns:
+      A test or None if all tests have been handled.
+    """
+    while True:
+      # Wait for a test to be available or all tests to have been handled.
+      self._item_available_or_all_done.wait()
+      with self._lock:
+        # Check which of the two conditions triggered the signal.
+        if self._tests_in_progress == 0:
+          return None
+        try:
+          return self._tests.pop(0)
+        except IndexError:
+          # Another thread beat us to the available test, wait again.
+          self._item_available_or_all_done.clear()
+
+  def add(self, test):
+    """Add a test to the collection.
+
+    Args:
+      test: A test to add.
+    """
+    with self._lock:
+      self._tests.append(test)
+      self._item_available_or_all_done.set()
+      self._tests_in_progress += 1
+
+  def test_completed(self):
+    """Indicate that a test has been fully handled."""
+    with self._lock:
+      self._tests_in_progress -= 1
+      if self._tests_in_progress == 0:
+        # All tests have been handled, signal all waiting threads.
+        self._item_available_or_all_done.set()
+
+  def __iter__(self):
+    """Iterate through tests in the collection until all have been handled."""
+    while True:
+      r = self._pop()
+      if r is None:
+        break
+      yield r
+
+  def __len__(self):
+    """Return the number of tests currently in the collection."""
+    return len(self._tests)
+
+  def test_names(self):
+    """Return a list of the names of the tests currently in the collection."""
+    with self._lock:
+      return list(t.test for t in self._tests)
diff --git a/src/build/android/pylib/base/test_exception.py b/src/build/android/pylib/base/test_exception.py
new file mode 100644
index 0000000..c98d2cb
--- /dev/null
+++ b/src/build/android/pylib/base/test_exception.py
@@ -0,0 +1,8 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestException(Exception):
+  """Base class for exceptions thrown by the test runner."""
+  pass
diff --git a/src/build/android/pylib/base/test_instance.py b/src/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000..7b1099c
--- /dev/null
+++ b/src/build/android/pylib/base/test_instance.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+  """A type of test.
+
+  This is expected to handle all logic that is test-type specific but
+  independent of the environment or device.
+
+  Examples include:
+    - gtests
+    - instrumentation tests
+  """
+
+  def __init__(self):
+    pass
+
+  def TestType(self):
+    raise NotImplementedError
+
+  # pylint: disable=no-self-use
+  def GetPreferredAbis(self):
+    return None
+
+  # pylint: enable=no-self-use
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
diff --git a/src/build/android/pylib/base/test_instance_factory.py b/src/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000..f47242a
--- /dev/null
+++ b/src/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.utils import device_dependencies
+
+
+def CreateTestInstance(args, error_func):
+
+  if args.command == 'gtest':
+    return gtest_test_instance.GtestTestInstance(
+        args, device_dependencies.GetDataDependencies, error_func)
+  elif args.command == 'instrumentation':
+    return instrumentation_test_instance.InstrumentationTestInstance(
+        args, device_dependencies.GetDataDependencies, error_func)
+  elif args.command == 'junit':
+    return junit_test_instance.JunitTestInstance(args, error_func)
+  elif args.command == 'monkey':
+    return monkey_test_instance.MonkeyTestInstance(args, error_func)
+
+  error_func('Unable to create %s test instance.' % args.command)
diff --git a/src/build/android/pylib/base/test_run.py b/src/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000..fc72d3a
--- /dev/null
+++ b/src/build/android/pylib/base/test_run.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+  """An execution of a particular test on a particular device.
+
+  This is expected to handle all logic that is specific to the combination of
+  environment and test type.
+
+  Examples include:
+    - local gtests
+    - local instrumentation tests
+  """
+
+  def __init__(self, env, test_instance):
+    self._env = env
+    self._test_instance = test_instance
+
+    # Some subclasses have different teardown behavior on receiving SIGTERM.
+    self._received_sigterm = False
+
+  def TestPackage(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def RunTests(self, results):
+    """Runs Tests and populates |results|.
+
+    Args:
+      results: An array that should be populated with
+               |base_test_result.TestRunResults| objects.
+    """
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.TearDown()
+
+  def ReceivedSigterm(self):
+    self._received_sigterm = True
diff --git a/src/build/android/pylib/base/test_run_factory.py b/src/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000..35d5494
--- /dev/null
+++ b/src/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,36 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.local.device import local_device_monkey_test_run
+from pylib.local.machine import local_machine_environment
+from pylib.local.machine import local_machine_junit_test_run
+
+
+def CreateTestRun(env, test_instance, error_func):
+  if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (local_device_instrumentation_test_run
+              .LocalDeviceInstrumentationTestRun(env, test_instance))
+    if isinstance(test_instance, monkey_test_instance.MonkeyTestInstance):
+      return (local_device_monkey_test_run
+              .LocalDeviceMonkeyTestRun(env, test_instance))
+
+  if isinstance(env, local_machine_environment.LocalMachineEnvironment):
+    if isinstance(test_instance, junit_test_instance.JunitTestInstance):
+      return (local_machine_junit_test_run
+              .LocalMachineJunitTestRun(env, test_instance))
+
+  error_func('Unable to create test run for %s tests in %s environment'
+             % (str(test_instance), str(env)))
diff --git a/src/build/android/pylib/base/test_server.py b/src/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000..763e121
--- /dev/null
+++ b/src/build/android/pylib/base/test_server.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+  """Base class for any server that needs to be set up for the tests."""
+
+  def __init__(self, *args, **kwargs):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def Reset(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
diff --git a/src/build/android/pylib/constants/__init__.py b/src/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000..2d1be26
--- /dev/null
+++ b/src/build/android/pylib/constants/__init__.py
@@ -0,0 +1,288 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+from __future__ import absolute_import
+import collections
+import glob
+import logging
+import os
+import subprocess
+
+import devil.android.sdk.keyevent
+from devil.android.constants import chrome
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+
+
+keyevent = devil.android.sdk.keyevent
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+
+PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
+PACKAGE_INFO.update({
+    'legacy_browser':
+    chrome.PackageInfo('com.google.android.browser',
+                       'com.android.browser.BrowserActivity', None, None),
+    'chromecast_shell':
+    chrome.PackageInfo('com.google.android.apps.mediashell',
+                       'com.google.android.apps.mediashell.MediaShellActivity',
+                       'castshell-command-line', None),
+    'android_webview_shell':
+    chrome.PackageInfo('org.chromium.android_webview.shell',
+                       'org.chromium.android_webview.shell.AwShellActivity',
+                       'android-webview-command-line', None),
+    'gtest':
+    chrome.PackageInfo('org.chromium.native_test',
+                       'org.chromium.native_test.NativeUnitTestActivity',
+                       'chrome-native-tests-command-line', None),
+    'android_browsertests':
+    chrome.PackageInfo('org.chromium.android_browsertests_apk',
+                       ('org.chromium.android_browsertests_apk' +
+                        '.ChromeBrowserTestsActivity'),
+                       'chrome-native-tests-command-line', None),
+    'components_browsertests':
+    chrome.PackageInfo('org.chromium.components_browsertests_apk',
+                       ('org.chromium.components_browsertests_apk' +
+                        '.ComponentsBrowserTestsActivity'),
+                       'chrome-native-tests-command-line', None),
+    'content_browsertests':
+    chrome.PackageInfo(
+        'org.chromium.content_browsertests_apk',
+        'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+        'chrome-native-tests-command-line', None),
+    'chromedriver_webview_shell':
+    chrome.PackageInfo('org.chromium.chromedriver_webview_shell',
+                       'org.chromium.chromedriver_webview_shell.Main', None,
+                       None),
+    'android_webview_cts':
+    chrome.PackageInfo('com.android.webview',
+                       'com.android.cts.webkit.WebViewStartupCtsActivity',
+                       'webview-command-line', None),
+    'android_google_webview_cts':
+    chrome.PackageInfo('com.google.android.webview',
+                       'com.android.cts.webkit.WebViewStartupCtsActivity',
+                       'webview-command-line', None),
+    'android_system_webview_shell':
+    chrome.PackageInfo('org.chromium.webview_shell',
+                       'org.chromium.webview_shell.WebViewBrowserActivity',
+                       'webview-command-line', None),
+    'android_webview_ui_test':
+    chrome.PackageInfo('org.chromium.webview_ui_test',
+                       'org.chromium.webview_ui_test.WebViewUiTestActivity',
+                       'webview-command-line', None),
+    'weblayer_browsertests':
+    chrome.PackageInfo(
+        'org.chromium.weblayer_browsertests_apk',
+        'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity',
+        'chrome-native-tests-command-line', None),
+})
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+    '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+ANDROID_SDK_BUILD_TOOLS_VERSION = '30.0.1'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk',
+                                'public')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+                                 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party', 'android_ndk')
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+                                os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                                'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+# TODO(jbudorick): Remove once unused.
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Configure ubsan to print stack traces in the format understood by "stack" so
+# that they will be symbolized, and disable signal handlers because they
+# interfere with the breakpad and sandbox tests.
+# This value is duplicated in
+# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java
+UBSAN_OPTIONS = (
+    'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' '
+    'handle_segv=0 handle_sigbus=0 handle_sigfpe=0')
+
+# TODO(jbudorick): Rework this into testing/buildbot/
+PYTHON_UNIT_TEST_SUITES = {
+    'pylib_py_unittests': {
+        'path':
+        os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+        'test_modules': [
+            'devil.android.device_utils_test',
+            'devil.android.md5sum_test',
+            'devil.utils.cmd_helper_test',
+            'pylib.results.json_results_test',
+            'pylib.utils.proguard_test',
+        ]
+    },
+    'gyp_py_unittests': {
+        'path':
+        os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+        'test_modules': [
+            'java_cpp_enum_tests',
+            'java_cpp_strings_tests',
+            'java_google_api_keys_tests',
+            'extract_unwind_tables_tests',
+        ]
+    },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+                    'perf', 'python']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def SetBuildType(build_type):
+  """Set the BUILDTYPE environment variable.
+
+  NOTE: Using this function is deprecated, in favor of SetOutputDirectory(),
+        it is still maintained for a few scripts that typically call it
+        to implement their --release and --debug command-line options.
+
+        When writing a new script, consider supporting an --output-dir or
+        --chromium-output-dir option instead, and calling SetOutputDirectory()
+        instead.
+
+  NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was
+  called previously, this will be completely ignored.
+  """
+  chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+  if chromium_output_dir:
+    logging.warning(
+        'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already '
+        'defined as (%s)', build_type, chromium_output_dir)
+  os.environ['BUILDTYPE'] = build_type
+
+
+def SetOutputDirectory(output_directory):
+  """Set the Chromium output directory.
+
+  This must be called early by scripts that rely on GetOutDirectory() or
+  CheckOutputDirectory(). Typically by providing an --output-dir or
+  --chromium-output-dir option.
+  """
+  os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+# The message that is printed when the Chromium output directory cannot
+# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned
+# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead.
+_MISSING_OUTPUT_DIR_MESSAGE = '\
+The Chromium output directory could not be found. Please use an option such as \
+--output-directory to provide it (see --help for details). Otherwise, \
+define the CHROMIUM_OUTPUT_DIR environment variable.'
+
+
+def GetOutDirectory():
+  """Returns the Chromium build output directory.
+
+  NOTE: This is determined in the following way:
+    - From a previous call to SetOutputDirectory()
+    - Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined.
+    - Otherwise, from the current Chromium source directory, and a previous
+      call to SetBuildType() or the BUILDTYPE env variable, in combination
+      with the optional CHROMIUM_OUT_DIR env variable.
+  """
+  if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+    return os.path.abspath(os.path.join(
+        DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+  build_type = os.environ.get('BUILDTYPE')
+  if not build_type:
+    raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE)
+
+  return os.path.abspath(os.path.join(
+      DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+      build_type))
+
+
+def CheckOutputDirectory():
+  """Checks that the Chromium output directory is set, or can be found.
+
+  If it is not already set, this will also perform a little auto-detection:
+
+    - If the current directory contains a build.ninja file, use it as
+      the output directory.
+
+    - If CHROME_HEADLESS is defined in the environment (e.g. on a bot),
+      look if there is a single output directory under DIR_SOURCE_ROOT/out/,
+      and if so, use it as the output directory.
+
+  Raises:
+    Exception: If no output directory is detected.
+  """
+  output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+  if output_dir:
+    return
+
+  build_type = os.environ.get('BUILDTYPE')
+  if build_type and len(build_type) > 1:
+    return
+
+  # If CWD is an output directory, then assume it's the desired one.
+  if os.path.exists('build.ninja'):
+    output_dir = os.getcwd()
+    SetOutputDirectory(output_dir)
+    return
+
+  # When running on bots, see if the output directory is obvious.
+  # TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set
+  # CHROMIUM_OUTPUT_DIR correctly.
+  if os.environ.get('CHROME_HEADLESS'):
+    dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja'))
+    if len(dirs) == 1:
+      SetOutputDirectory(dirs[0])
+      return
+
+    raise Exception(
+        'Chromium output directory not set, and CHROME_HEADLESS detected. ' +
+        'However, multiple out dirs exist: %r' % dirs)
+
+  raise Exception(_MISSING_OUTPUT_DIR_MESSAGE)
+
+
+# Exit codes
+ERROR_EXIT_CODE = exit_codes.ERROR
+INFRA_EXIT_CODE = exit_codes.INFRA
+WARNING_EXIT_CODE = exit_codes.WARNING
diff --git a/src/build/android/pylib/constants/host_paths.py b/src/build/android/pylib/constants/host_paths.py
new file mode 100644
index 0000000..a38d28e
--- /dev/null
+++ b/src/build/android/pylib/constants/host_paths.py
@@ -0,0 +1,97 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import os
+import sys
+
+from pylib import constants
+
+DIR_SOURCE_ROOT = os.environ.get(
+    'CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+
+BUILD_COMMON_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')
+
+# third-party libraries
+ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development',
+    'scripts')
+BUILD_PATH = os.path.join(DIR_SOURCE_ROOT, 'build')
+DEVIL_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
+JAVA_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+                         'bin')
+TRACING_PATH = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
+
+@contextlib.contextmanager
+def SysPath(path, position=None):
+  if position is None:
+    sys.path.append(path)
+  else:
+    sys.path.insert(position, path)
+  try:
+    yield
+  finally:
+    if sys.path[-1] == path:
+      sys.path.pop()
+    else:
+      sys.path.remove(path)
+
+
+# Map of CPU architecture name to (toolchain_name, binprefix) pairs.
+# TODO(digit): Use the build_vars.json file generated by gn.
+_TOOL_ARCH_MAP = {
+  'arm': ('arm-linux-androideabi-4.9', 'arm-linux-androideabi'),
+  'arm64': ('aarch64-linux-android-4.9', 'aarch64-linux-android'),
+  'x86': ('x86-4.9', 'i686-linux-android'),
+  'x86_64': ('x86_64-4.9', 'x86_64-linux-android'),
+  'x64': ('x86_64-4.9', 'x86_64-linux-android'),
+  'mips': ('mipsel-linux-android-4.9', 'mipsel-linux-android'),
+}
+
+# Cache used to speed up the results of ToolPath()
+# Maps (arch, tool_name) pairs to fully qualified program paths.
+# Useful because ToolPath() is called repeatedly for demangling C++ symbols.
+_cached_tool_paths = {}
+
+
+def ToolPath(tool, cpu_arch):
+  """Return a fully qualifed path to an arch-specific toolchain program.
+
+  Args:
+    tool: Unprefixed toolchain program name (e.g. 'objdump')
+    cpu_arch: Target CPU architecture (e.g. 'arm64')
+  Returns:
+    Fully qualified path (e.g. ..../aarch64-linux-android-objdump')
+  Raises:
+    Exception if the toolchain could not be found.
+  """
+  tool_path = _cached_tool_paths.get((tool, cpu_arch))
+  if tool_path:
+    return tool_path
+
+  toolchain_source, toolchain_prefix = _TOOL_ARCH_MAP.get(
+      cpu_arch, (None, None))
+  if not toolchain_source:
+    raise Exception('Could not find tool chain for ' + cpu_arch)
+
+  toolchain_subdir = (
+      'toolchains/%s/prebuilt/linux-x86_64/bin' % toolchain_source)
+
+  tool_path = os.path.join(constants.ANDROID_NDK_ROOT,
+                           toolchain_subdir,
+                           toolchain_prefix + '-' + tool)
+
+  _cached_tool_paths[(tool, cpu_arch)] = tool_path
+  return tool_path
+
+
+def GetAaptPath():
+  """Returns the path to the 'aapt' executable."""
+  return os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
diff --git a/src/build/android/pylib/constants/host_paths_unittest.py b/src/build/android/pylib/constants/host_paths_unittest.py
new file mode 100755
index 0000000..72be4ed
--- /dev/null
+++ b/src/build/android/pylib/constants/host_paths_unittest.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+import os
+import unittest
+
+import six
+import pylib.constants as constants
+import pylib.constants.host_paths as host_paths
+
+# This map corresponds to the binprefix of NDK prebuilt toolchains for various
+# target CPU architectures. Note that 'x86_64' and 'x64' are the same.
+_EXPECTED_NDK_TOOL_SUBDIR_MAP = {
+  'arm': 'toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/' +
+         'arm-linux-androideabi-',
+  'arm64':
+      'toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+      'aarch64-linux-android-',
+  'x86': 'toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-',
+  'x86_64':
+      'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+  'x64':
+      'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+   'mips':
+      'toolchains/mipsel-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+      'mipsel-linux-android-'
+}
+
+
+class HostPathsTest(unittest.TestCase):
+  def setUp(self):
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def test_GetAaptPath(self):
+    _EXPECTED_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+    self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+    self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+
+  def test_ToolPath(self):
+    for cpu_arch, binprefix in six.iteritems(_EXPECTED_NDK_TOOL_SUBDIR_MAP):
+      expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix)
+      expected_path = expected_binprefix + 'foo'
+      self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/content_settings.py b/src/build/android/pylib/content_settings.py
new file mode 100644
index 0000000..3bf11bc
--- /dev/null
+++ b/src/build/android/pylib/content_settings.py
@@ -0,0 +1,80 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ContentSettings(dict):
+
+  """A dict interface to interact with device content settings.
+
+  System properties are key/value pairs as exposed by adb shell content.
+  """
+
+  def __init__(self, table, device):
+    super(ContentSettings, self).__init__()
+    self._table = table
+    self._device = device
+
+  @staticmethod
+  def _GetTypeBinding(value):
+    if isinstance(value, bool):
+      return 'b'
+    if isinstance(value, float):
+      return 'f'
+    if isinstance(value, int):
+      return 'i'
+    if isinstance(value, long):
+      return 'l'
+    if isinstance(value, str):
+      return 's'
+    raise ValueError('Unsupported type %s' % type(value))
+
+  def iteritems(self):
+    # Example row:
+    # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+    for row in self._device.RunShellCommand(
+        'content query --uri content://%s' % self._table, as_root=True):
+      fields = row.split(', ')
+      key = None
+      value = None
+      for field in fields:
+        k, _, v = field.partition('=')
+        if k == 'name':
+          key = v
+        elif k == 'value':
+          value = v
+      if not key:
+        continue
+      if not value:
+        value = ''
+      yield key, value
+
+  def __getitem__(self, key):
+    return self._device.RunShellCommand(
+        'content query --uri content://%s --where "name=\'%s\'" '
+        '--projection value' % (self._table, key), as_root=True).strip()
+
+  def __setitem__(self, key, value):
+    if key in self:
+      self._device.RunShellCommand(
+          'content update --uri content://%s '
+          '--bind value:%s:%s --where "name=\'%s\'"' % (
+              self._table,
+              self._GetTypeBinding(value), value, key),
+          as_root=True)
+    else:
+      self._device.RunShellCommand(
+          'content insert --uri content://%s '
+          '--bind name:%s:%s --bind value:%s:%s' % (
+              self._table,
+              self._GetTypeBinding(key), key,
+              self._GetTypeBinding(value), value),
+          as_root=True)
+
+  def __delitem__(self, key):
+    self._device.RunShellCommand(
+        'content delete --uri content://%s '
+        '--bind name:%s:%s' % (
+            self._table,
+            self._GetTypeBinding(key), key),
+        as_root=True)
diff --git a/src/build/android/pylib/device/__init__.py b/src/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/device/__init__.py
diff --git a/src/build/android/pylib/device/commands/BUILD.gn b/src/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000..13b69f6
--- /dev/null
+++ b/src/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+  data_deps = [ ":chromium_commands_java" ]
+}
+
+android_library("unzip_java") {
+  jacoco_never_instrument = true
+  sources = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+}
+
+dist_dex("chromium_commands_java") {
+  deps = [ ":unzip_java" ]
+  output = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+  data = [ output ]
+}
diff --git a/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000..cf0ff67
--- /dev/null
+++ b/src/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,93 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ *  Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+    private static final String TAG = "Unzip";
+
+    public static void main(String[] args) {
+        try {
+            (new Unzip()).run(args);
+        } catch (RuntimeException e) {
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private void showUsage(PrintStream s) {
+        s.println("Usage:");
+        s.println("unzip [zipfile]");
+    }
+
+    @SuppressWarnings("Finally")
+    private void unzip(String[] args) {
+        ZipInputStream zis = null;
+        try {
+            String zipfile = args[0];
+            zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+            ZipEntry ze = null;
+
+            byte[] bytes = new byte[1024];
+            while ((ze = zis.getNextEntry()) != null) {
+                File outputFile = new File(ze.getName());
+                if (ze.isDirectory()) {
+                    if (!outputFile.exists() && !outputFile.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + outputFile.toString());
+                    }
+                } else {
+                    File parentDir = outputFile.getParentFile();
+                    if (!parentDir.exists() && !parentDir.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + parentDir.toString());
+                    }
+                    OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+                    int actual_bytes = 0;
+                    int total_bytes = 0;
+                    while ((actual_bytes = zis.read(bytes)) != -1) {
+                        out.write(bytes, 0, actual_bytes);
+                        total_bytes += actual_bytes;
+                    }
+                    out.close();
+                }
+                zis.closeEntry();
+            }
+
+        } catch (IOException e) {
+            throw new RuntimeException("Error while unzipping", e);
+        } finally {
+            try {
+                if (zis != null) zis.close();
+            } catch (IOException e) {
+                throw new RuntimeException("Error while closing zip: " + e.toString());
+            }
+        }
+    }
+
+    public void run(String[] args) {
+        if (args.length != 1) {
+            showUsage(System.err);
+            throw new RuntimeException("Incorrect usage!");
+        }
+
+        unzip(args);
+    }
+}
+
diff --git a/src/build/android/pylib/device_settings.py b/src/build/android/pylib/device_settings.py
new file mode 100644
index 0000000..ab4ad1b
--- /dev/null
+++ b/src/build/android/pylib/device_settings.py
@@ -0,0 +1,199 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import content_settings
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+    '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng']
+
+
+def ConfigureContentSettings(device, desired_settings):
+  """Configures device content setings from a list.
+
+  Many settings are documented at:
+    http://developer.android.com/reference/android/provider/Settings.Global.html
+    http://developer.android.com/reference/android/provider/Settings.Secure.html
+    http://developer.android.com/reference/android/provider/Settings.System.html
+
+  Many others are undocumented.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+    desired_settings: A list of (table, [(key: value), ...]) for all
+        settings to configure.
+  """
+  for table, key_value in desired_settings:
+    settings = content_settings.ContentSettings(table, device)
+    for key, value in key_value:
+      settings[key] = value
+    logging.info('\n%s %s', table, (80 - len(table)) * '-')
+    for key, value in sorted(settings.iteritems()):
+      logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+  """Sets lock screen settings on the device.
+
+  On certain device/Android configurations we need to disable the lock screen in
+  a different database. Additionally, the password type must be set to
+  DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+  Lock screen settings are stored in sqlite on the device in:
+      /data/system/locksettings.db
+
+  IMPORTANT: The first column is used as a primary key so that all rows with the
+  same value for that column are removed from the table prior to inserting the
+  new values.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+
+  Raises:
+    Exception if the setting was not properly set.
+  """
+  if device.build_type not in _COMPATIBLE_BUILD_TYPES:
+    logging.warning('Unable to disable lockscreen on %s builds.',
+                    device.build_type)
+    return
+
+  def get_lock_settings(table):
+    return [(table, 'lockscreen.disabled', '1'),
+            (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+            (table, 'lockscreen.password_type_alternate',
+             PASSWORD_QUALITY_UNSPECIFIED)]
+
+  if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+    db = _LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('locksettings')
+    columns = ['name', 'user', 'value']
+    generate_values = lambda k, v: [k, '0', v]
+  elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+    db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('secure') + get_lock_settings('system')
+    columns = ['name', 'value']
+    generate_values = lambda k, v: [k, v]
+  else:
+    logging.warning('Unable to find database file to set lock screen settings.')
+    return
+
+  for table, key, value in locksettings:
+    # Set the lockscreen setting for default user '0'
+    values = generate_values(key, value)
+
+    cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+      'table': table,
+      'primary_key': columns[0],
+      'primary_value': values[0],
+      'columns': ', '.join(columns),
+      'values': ', '.join(["'%s'" % value for value in values])
+    }
+    output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+                                        as_root=True)
+    if output_msg:
+      logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+  # Note that setting these in this order is required in order for all of
+  # them to take and stick through a reboot.
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 1),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is enabled and allowed for tests.
+    ('location_providers_allowed', 'gps,network'),
+  ]),
+  ('com.google.settings/partner', [
+    ('network_location_opt_in', 1),
+  ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 0),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is disabled.
+    ('location_providers_allowed', ''),
+  ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 1),
+  ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 0),
+  ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+  ('settings/global', [
+    ('assisted_gps_enabled', 0),
+
+    # Disable "auto time" and "auto time zone" to avoid network-provided time
+    # to overwrite the device's datetime and timezone synchronized from host
+    # when running tests later. See b/6569849.
+    ('auto_time', 0),
+    ('auto_time_zone', 0),
+
+    ('development_settings_enabled', 1),
+
+    # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+    # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+    # will never display the "Report" button.
+    # Type: int ( 0 = disallow, 1 = allow )
+    ('send_action_app_error', 0),
+
+    ('stay_on_while_plugged_in', 3),
+
+    ('verifier_verify_adb_installs', 0),
+  ]),
+  ('settings/secure', [
+    ('allowed_geolocation_origins',
+        'http://www.google.co.uk http://www.google.com'),
+
+    # Ensure that we never get random dialogs like "Unfortunately the process
+    # android.process.acore has stopped", which steal the focus, and make our
+    # automation fail (because the dialog steals the focus then mistakenly
+    # receives the injected user input events).
+    ('anr_show_background', 0),
+
+    ('lockscreen.disabled', 1),
+
+    ('screensaver_enabled', 0),
+
+    ('skip_first_use_hints', 1),
+  ]),
+  ('settings/system', [
+    # Don't want devices to accidentally rotate the screen as that could
+    # affect performance measurements.
+    ('accelerometer_rotation', 0),
+
+    ('lockscreen.disabled', 1),
+
+    # Turn down brightness and disable auto-adjust so that devices run cooler.
+    ('screen_brightness', 5),
+    ('screen_brightness_mode', 0),
+
+    ('user_rotation', 0),
+  ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+  ('settings/global', [
+    ('airplane_mode_on', 1),
+    ('wifi_on', 0),
+  ]),
+]
diff --git a/src/build/android/pylib/dex/__init__.py b/src/build/android/pylib/dex/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/dex/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/dex/dex_parser.py b/src/build/android/pylib/dex/dex_parser.py
new file mode 100755
index 0000000..3f2ed6f
--- /dev/null
+++ b/src/build/android/pylib/dex/dex_parser.py
@@ -0,0 +1,549 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for optimistically parsing dex files.
+
+This file is not meant to provide a generic tool for analyzing dex files.
+A DexFile class that exposes access to several memory items in the dex format
+is provided, but it does not include error handling or validation.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import errno
+import os
+import re
+import struct
+import sys
+import zipfile
+
+# https://source.android.com/devices/tech/dalvik/dex-format#header-item
+_DEX_HEADER_FMT = (
+    ('magic', '8s'),
+    ('checksum', 'I'),
+    ('signature', '20s'),
+    ('file_size', 'I'),
+    ('header_size', 'I'),
+    ('endian_tag', 'I'),
+    ('link_size', 'I'),
+    ('link_off', 'I'),
+    ('map_off', 'I'),
+    ('string_ids_size', 'I'),
+    ('string_ids_off', 'I'),
+    ('type_ids_size', 'I'),
+    ('type_ids_off', 'I'),
+    ('proto_ids_size', 'I'),
+    ('proto_ids_off', 'I'),
+    ('field_ids_size', 'I'),
+    ('field_ids_off', 'I'),
+    ('method_ids_size', 'I'),
+    ('method_ids_off', 'I'),
+    ('class_defs_size', 'I'),
+    ('class_defs_off', 'I'),
+    ('data_size', 'I'),
+    ('data_off', 'I'),
+)
+
+DexHeader = collections.namedtuple('DexHeader',
+                                   ','.join(t[0] for t in _DEX_HEADER_FMT))
+
+# Simple memory items.
+_TypeIdItem = collections.namedtuple('TypeIdItem', 'descriptor_idx')
+_ProtoIdItem = collections.namedtuple(
+    'ProtoIdItem', 'shorty_idx,return_type_idx,parameters_off')
+_MethodIdItem = collections.namedtuple('MethodIdItem',
+                                       'type_idx,proto_idx,name_idx')
+_TypeItem = collections.namedtuple('TypeItem', 'type_idx')
+_StringDataItem = collections.namedtuple('StringItem', 'utf16_size,data')
+_ClassDefItem = collections.namedtuple(
+    'ClassDefItem',
+    'class_idx,access_flags,superclass_idx,interfaces_off,source_file_idx,'
+    'annotations_off,class_data_off,static_values_off')
+
+
+class _MemoryItemList(object):
+  """Base class for repeated memory items."""
+
+  def __init__(self,
+               reader,
+               offset,
+               size,
+               factory,
+               alignment=None,
+               first_item_offset=None):
+    """Creates the item list using the specific item factory.
+
+    Args:
+      reader: _DexReader used for decoding the memory item.
+      offset: Offset from start of the file to the item list, serving as the
+        key for some item types.
+      size: Number of memory items in the list.
+      factory: Function to extract each memory item from a _DexReader.
+      alignment: Optional integer specifying the alignment for the memory
+        section represented by this list.
+      first_item_offset: Optional, specifies a different offset to use for
+        extracting memory items (default is to use offset).
+    """
+    self.offset = offset
+    self.size = size
+    reader.Seek(first_item_offset or offset)
+    self._items = [factory(reader) for _ in xrange(size)]
+
+    if alignment:
+      reader.AlignUpTo(alignment)
+
+  def __iter__(self):
+    return iter(self._items)
+
+  def __getitem__(self, key):
+    return self._items[key]
+
+  def __len__(self):
+    return len(self._items)
+
+  def __repr__(self):
+    item_type_part = ''
+    if self.size != 0:
+      item_type = type(self._items[0])
+      item_type_part = ', item type={}'.format(item_type.__name__)
+
+    return '{}(offset={:#x}, size={}{})'.format(
+        type(self).__name__, self.offset, self.size, item_type_part)
+
+
+class _TypeIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = lambda x: _TypeIdItem(x.ReadUInt())
+    super(_TypeIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _ProtoIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = lambda x: _ProtoIdItem(x.ReadUInt(), x.ReadUInt(), x.ReadUInt())
+    super(_ProtoIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _MethodIdItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    factory = (
+        lambda x: _MethodIdItem(x.ReadUShort(), x.ReadUShort(), x.ReadUInt()))
+    super(_MethodIdItemList, self).__init__(reader, offset, size, factory)
+
+
+class _StringItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    reader.Seek(offset)
+    string_item_offsets = iter([reader.ReadUInt() for _ in xrange(size)])
+
+    def factory(x):
+      data_offset = next(string_item_offsets)
+      string = x.ReadString(data_offset)
+      return _StringDataItem(len(string), string)
+
+    super(_StringItemList, self).__init__(reader, offset, size, factory)
+
+
+class _TypeListItem(_MemoryItemList):
+
+  def __init__(self, reader):
+    offset = reader.Tell()
+    size = reader.ReadUInt()
+    factory = lambda x: _TypeItem(x.ReadUShort())
+    # This is necessary because we need to extract the size of the type list
+    # (in other cases the list size is provided in the header).
+    first_item_offset = reader.Tell()
+    super(_TypeListItem, self).__init__(
+        reader,
+        offset,
+        size,
+        factory,
+        alignment=4,
+        first_item_offset=first_item_offset)
+
+
+class _TypeListItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    super(_TypeListItemList, self).__init__(reader, offset, size, _TypeListItem)
+
+
+class _ClassDefItemList(_MemoryItemList):
+
+  def __init__(self, reader, offset, size):
+    reader.Seek(offset)
+
+    def factory(x):
+      return _ClassDefItem(*(x.ReadUInt()
+                             for _ in xrange(len(_ClassDefItem._fields))))
+
+    super(_ClassDefItemList, self).__init__(reader, offset, size, factory)
+
+
+class _DexMapItem(object):
+
+  def __init__(self, reader):
+    self.type = reader.ReadUShort()
+    reader.ReadUShort()
+    self.size = reader.ReadUInt()
+    self.offset = reader.ReadUInt()
+
+  def __repr__(self):
+    return '_DexMapItem(type={}, size={}, offset={:#x})'.format(
+        self.type, self.size, self.offset)
+
+
+class _DexMapList(object):
+  # Full list of type codes:
+  # https://source.android.com/devices/tech/dalvik/dex-format#type-codes
+  TYPE_TYPE_LIST = 0x1001
+
+  def __init__(self, reader, offset):
+    self._map = {}
+    reader.Seek(offset)
+    self._size = reader.ReadUInt()
+    for _ in xrange(self._size):
+      item = _DexMapItem(reader)
+      self._map[item.type] = item
+
+  def __getitem__(self, key):
+    return self._map[key]
+
+  def __contains__(self, key):
+    return key in self._map
+
+  def __repr__(self):
+    return '_DexMapList(size={}, items={})'.format(self._size, self._map)
+
+
+class _DexReader(object):
+
+  def __init__(self, data):
+    self._data = data
+    self._pos = 0
+
+  def Seek(self, offset):
+    self._pos = offset
+
+  def Tell(self):
+    return self._pos
+
+  def ReadUByte(self):
+    return self._ReadData('<B')
+
+  def ReadUShort(self):
+    return self._ReadData('<H')
+
+  def ReadUInt(self):
+    return self._ReadData('<I')
+
+  def ReadString(self, data_offset):
+    string_length, string_offset = self._ReadULeb128(data_offset)
+    string_data_offset = string_offset + data_offset
+    return self._DecodeMUtf8(string_length, string_data_offset)
+
+  def AlignUpTo(self, align_unit):
+    off_by = self._pos % align_unit
+    if off_by:
+      self.Seek(self._pos + align_unit - off_by)
+
+  def ReadHeader(self):
+    header_fmt = '<' + ''.join(t[1] for t in _DEX_HEADER_FMT)
+    return DexHeader._make(struct.unpack_from(header_fmt, self._data))
+
+  def _ReadData(self, fmt):
+    ret = struct.unpack_from(fmt, self._data, self._pos)[0]
+    self._pos += struct.calcsize(fmt)
+    return ret
+
+  def _ReadULeb128(self, data_offset):
+    """Returns a tuple of (uleb128 value, number of bytes occupied).
+
+    From DWARF3 spec: http://dwarfstd.org/doc/Dwarf3.pdf
+
+    Args:
+      data_offset: Location of the unsigned LEB128.
+    """
+    value = 0
+    shift = 0
+    cur_offset = data_offset
+    while True:
+      byte = self._data[cur_offset]
+      cur_offset += 1
+      value |= (byte & 0b01111111) << shift
+      if (byte & 0b10000000) == 0:
+        break
+      shift += 7
+
+    return value, cur_offset - data_offset
+
+  def _DecodeMUtf8(self, string_length, offset):
+    """Returns the string located at the specified offset.
+
+    See https://source.android.com/devices/tech/dalvik/dex-format#mutf-8
+
+    Ported from the Android Java implementation:
+    https://android.googlesource.com/platform/dalvik/+/fe107fb6e3f308ac5174ebdc5a794ee880c741d9/dx/src/com/android/dex/Mutf8.java#34
+
+    Args:
+      string_length: The length of the decoded string.
+      offset: Offset to the beginning of the string.
+    """
+    self.Seek(offset)
+    ret = ''
+
+    for _ in xrange(string_length):
+      a = self.ReadUByte()
+      if a == 0:
+        raise _MUTf8DecodeError('Early string termination encountered',
+                                string_length, offset)
+      if (a & 0x80) == 0x00:
+        code = a
+      elif (a & 0xe0) == 0xc0:
+        b = self.ReadUByte()
+        if (b & 0xc0) != 0x80:
+          raise _MUTf8DecodeError('Error in byte 2', string_length, offset)
+        code = ((a & 0x1f) << 6) | (b & 0x3f)
+      elif (a & 0xf0) == 0xe0:
+        b = self.ReadUByte()
+        c = self.ReadUByte()
+        if (b & 0xc0) != 0x80 or (c & 0xc0) != 0x80:
+          raise _MUTf8DecodeError('Error in byte 3 or 4', string_length, offset)
+        code = ((a & 0x0f) << 12) | ((b & 0x3f) << 6) | (c & 0x3f)
+      else:
+        raise _MUTf8DecodeError('Bad byte', string_length, offset)
+
+      ret += unichr(code)
+
+    if self.ReadUByte() != 0x00:
+      raise _MUTf8DecodeError('Expected string termination', string_length,
+                              offset)
+
+    return ret
+
+
+class _MUTf8DecodeError(Exception):
+
+  def __init__(self, message, length, offset):
+    message += ' (decoded string length: {}, string data offset: {:#x})'.format(
+        length, offset)
+    super(_MUTf8DecodeError, self).__init__(message)
+
+
+class DexFile(object):
+  """Represents a single dex file.
+
+  Parses and exposes access to dex file structure and contents, as described
+  at https://source.android.com/devices/tech/dalvik/dex-format
+
+  Fields:
+    reader: _DexReader object used to decode dex file contents.
+    header: DexHeader for this dex file.
+    map_list: _DexMapList object containing list of dex file contents.
+    type_item_list: _TypeIdItemList containing type_id_items.
+    proto_item_list: _ProtoIdItemList containing proto_id_items.
+    method_item_list: _MethodIdItemList containing method_id_items.
+    string_item_list: _StringItemList containing string_data_items that are
+      referenced by index in other sections.
+    type_list_item_list: _TypeListItemList containing _TypeListItems.
+      _TypeListItems are referenced by their offsets from other dex items.
+    class_def_item_list: _ClassDefItemList containing _ClassDefItems.
+  """
+  _CLASS_ACCESS_FLAGS = {
+      0x1: 'public',
+      0x2: 'private',
+      0x4: 'protected',
+      0x8: 'static',
+      0x10: 'final',
+      0x200: 'interface',
+      0x400: 'abstract',
+      0x1000: 'synthetic',
+      0x2000: 'annotation',
+      0x4000: 'enum',
+  }
+
+  def __init__(self, data):
+    """Decodes dex file memory sections.
+
+    Args:
+      data: bytearray containing the contents of a dex file.
+    """
+    self.reader = _DexReader(data)
+    self.header = self.reader.ReadHeader()
+    self.map_list = _DexMapList(self.reader, self.header.map_off)
+    self.type_item_list = _TypeIdItemList(self.reader, self.header.type_ids_off,
+                                          self.header.type_ids_size)
+    self.proto_item_list = _ProtoIdItemList(
+        self.reader, self.header.proto_ids_off, self.header.proto_ids_size)
+    self.method_item_list = _MethodIdItemList(
+        self.reader, self.header.method_ids_off, self.header.method_ids_size)
+    self.string_item_list = _StringItemList(
+        self.reader, self.header.string_ids_off, self.header.string_ids_size)
+    self.class_def_item_list = _ClassDefItemList(
+        self.reader, self.header.class_defs_off, self.header.class_defs_size)
+
+    type_list_key = _DexMapList.TYPE_TYPE_LIST
+    if type_list_key in self.map_list:
+      map_list_item = self.map_list[type_list_key]
+      self.type_list_item_list = _TypeListItemList(
+          self.reader, map_list_item.offset, map_list_item.size)
+    else:
+      self.type_list_item_list = _TypeListItemList(self.reader, 0, 0)
+    self._type_lists_by_offset = {
+        type_list.offset: type_list
+        for type_list in self.type_list_item_list
+    }
+
+  def GetString(self, string_item_idx):
+    string_item = self.string_item_list[string_item_idx]
+    return string_item.data
+
+  def GetTypeString(self, type_item_idx):
+    type_item = self.type_item_list[type_item_idx]
+    return self.GetString(type_item.descriptor_idx)
+
+  def GetTypeListStringsByOffset(self, offset):
+    if not offset:
+      return ()
+    type_list = self._type_lists_by_offset[offset]
+    return tuple(self.GetTypeString(item.type_idx) for item in type_list)
+
+  @staticmethod
+  def ResolveClassAccessFlags(access_flags):
+    return tuple(
+        flag_string
+        for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.iteritems()
+        if flag & access_flags)
+
+  def IterMethodSignatureParts(self):
+    """Yields the string components of dex methods in a dex file.
+
+    Yields:
+      Tuples that look like:
+        (class name, return type, method name, (parameter type, ...)).
+    """
+    for method_item in self.method_item_list:
+      class_name_string = self.GetTypeString(method_item.type_idx)
+      method_name_string = self.GetString(method_item.name_idx)
+      proto_item = self.proto_item_list[method_item.proto_idx]
+      return_type_string = self.GetTypeString(proto_item.return_type_idx)
+      parameter_types = self.GetTypeListStringsByOffset(
+          proto_item.parameters_off)
+      yield (class_name_string, return_type_string, method_name_string,
+             parameter_types)
+
+  def __repr__(self):
+    items = [
+        self.header,
+        self.map_list,
+        self.type_item_list,
+        self.proto_item_list,
+        self.method_item_list,
+        self.string_item_list,
+        self.type_list_item_list,
+        self.class_def_item_list,
+    ]
+    return '\n'.join(str(item) for item in items)
+
+
+class _DumpCommand(object):
+
+  def __init__(self, dexfile):
+    self._dexfile = dexfile
+
+  def Run(self):
+    raise NotImplementedError()
+
+
+class _DumpMethods(_DumpCommand):
+
+  def Run(self):
+    for parts in self._dexfile.IterMethodSignatureParts():
+      class_type, return_type, method_name, parameter_types = parts
+      print('{} {} (return type={}, parameters={})'.format(
+          class_type, method_name, return_type, parameter_types))
+
+
+class _DumpStrings(_DumpCommand):
+
+  def Run(self):
+    for string_item in self._dexfile.string_item_list:
+      # Some strings are likely to be non-ascii (vs. methods/classes).
+      print(string_item.data.encode('utf-8'))
+
+
+class _DumpClasses(_DumpCommand):
+
+  def Run(self):
+    for class_item in self._dexfile.class_def_item_list:
+      class_string = self._dexfile.GetTypeString(class_item.class_idx)
+      superclass_string = self._dexfile.GetTypeString(class_item.superclass_idx)
+      interfaces = self._dexfile.GetTypeListStringsByOffset(
+          class_item.interfaces_off)
+      access_flags = DexFile.ResolveClassAccessFlags(class_item.access_flags)
+      print('{} (superclass={}, interfaces={}, access_flags={})'.format(
+          class_string, superclass_string, interfaces, access_flags))
+
+
+class _DumpSummary(_DumpCommand):
+
+  def Run(self):
+    print(self._dexfile)
+
+
+def _DumpDexItems(dexfile_data, name, item):
+  dexfile = DexFile(bytearray(dexfile_data))
+  print('dex_parser: Dumping {} for {}'.format(item, name))
+  cmds = {
+      'summary': _DumpSummary,
+      'methods': _DumpMethods,
+      'strings': _DumpStrings,
+      'classes': _DumpClasses,
+  }
+  try:
+    cmds[item](dexfile).Run()
+  except IOError as e:
+    if e.errno == errno.EPIPE:
+      # Assume we're piping to "less", do nothing.
+      pass
+
+
+def main():
+  parser = argparse.ArgumentParser(description='Dump dex contents to stdout.')
+  parser.add_argument(
+      'input', help='Input (.dex, .jar, .zip, .aab, .apk) file path.')
+  parser.add_argument(
+      'item',
+      choices=('methods', 'strings', 'classes', 'summary'),
+      help='Item to dump',
+      nargs='?',
+      default='summary')
+  args = parser.parse_args()
+
+  if os.path.splitext(args.input)[1] in ('.apk', '.jar', '.zip', '.aab'):
+    with zipfile.ZipFile(args.input) as z:
+      dex_file_paths = [
+          f for f in z.namelist() if re.match(r'.*classes[0-9]*\.dex$', f)
+      ]
+      if not dex_file_paths:
+        print('Error: {} does not contain any classes.dex files'.format(
+            args.input))
+        sys.exit(1)
+
+      for path in dex_file_paths:
+        _DumpDexItems(z.read(path), path, args.item)
+
+  else:
+    with open(args.input) as f:
+      _DumpDexItems(f.read(), args.input, args.item)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/pylib/gtest/__init__.py b/src/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/gtest/filter/base_unittests_disabled b/src/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000..533d3e1
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..6bec7d0
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Additional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..cefc64f
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/src/build/android/pylib/gtest/filter/content_browsertests_disabled b/src/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000..9c89121
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,45 @@
+# List of suppressions
+# Timeouts
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+RenderAccessibilityImplTest.DetachAccessibilityObject
+
+# http://crbug.com/187500
+RenderViewImplTest.ImeComposition
+RenderViewImplTest.InsertCharacters
+RenderViewImplTest.OnHandleKeyboardEvent
+RenderViewImplTest.OnNavStateChanged
+# ZoomLevel is not used on Android
+RenderFrameImplTest.ZoomLimit
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+RendererAccessibilityTest.TextSelectionShouldSendRoot
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
+
+# http://crbug.com/1039450
+ProprietaryCodec/WebRtcMediaRecorderTest.*
diff --git a/src/build/android/pylib/gtest/filter/unit_tests_disabled b/src/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000..97811c8
--- /dev/null
+++ b/src/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,74 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+PageInfoTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/src/build/android/pylib/gtest/gtest_config.py b/src/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000..3ac1955
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+    'components_browsertests',
+    'heap_profiler_unittests',
+    'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+    'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+    'android_webview_unittests',
+    'base_unittests',
+    'blink_unittests',
+    'breakpad_unittests',
+    'cc_unittests',
+    'components_unittests',
+    'content_browsertests',
+    'content_unittests',
+    'events_unittests',
+    'gl_tests',
+    'gl_unittests',
+    'gpu_unittests',
+    'ipc_tests',
+    'media_unittests',
+    'midi_unittests',
+    'net_unittests',
+    'sandbox_linux_unittests',
+    'skia_unittests',
+    'sql_unittests',
+    'storage_unittests',
+    'ui_android_unittests',
+    'ui_base_unittests',
+    'ui_touch_selection_unittests',
+    'unit_tests_apk',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+    'breakpad_unittests',
+    'sandbox_linux_unittests',
+
+    # The internal ASAN recipe cannot run step "unit_tests_apk", this is the
+    # only internal recipe affected. See http://crbug.com/607850
+    'unit_tests_apk',
+]
diff --git a/src/build/android/pylib/gtest/gtest_test_instance.py b/src/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000..a88c365
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,610 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import HTMLParser
+import json
+import logging
+import os
+import re
+import tempfile
+import threading
+import xml.etree.ElementTree
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.symbols import stack_symbolizer
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import unittest_util # pylint: disable=import-error
+
+
+BROWSER_TEST_SUITES = [
+    'android_browsertests',
+    'android_sync_integration_tests',
+    'components_browsertests',
+    'content_browsertests',
+    'weblayer_browsertests',
+]
+
+# The max number of tests to run on a shard during the test run.
+MAX_SHARDS = 256
+
+RUN_IN_SUB_THREAD_TEST_SUITES = [
+    # Multiprocess tests should be run outside of the main thread.
+    'base_unittests',  # file_locking_unittest.cc uses a child process.
+    'gwp_asan_unittests',
+    'ipc_perftests',
+    'ipc_tests',
+    'mojo_perftests',
+    'mojo_unittests',
+    'net_unittests'
+]
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'NativeTestActivity')
+_EXTRA_RUN_IN_SUB_THREAD = (
+    'org.chromium.native_test.NativeTest.RunInSubThread')
+EXTRA_SHARD_NANO_TIMEOUT = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardNanoTimeout')
+_EXTRA_SHARD_SIZE_LIMIT = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+    # Test state.
+    r'\[ +((?:RUN)|(?:FAILED)|(?:OK)|(?:CRASHED)|(?:SKIPPED)) +\] ?'
+    # Test name.
+    r'([^ ]+)?'
+    # Optional parameters.
+    r'(?:, where'
+    #   Type parameter
+    r'(?: TypeParam = [^()]*(?: and)?)?'
+    #   Value parameter
+    r'(?: GetParam\(\) = [^()]*)?'
+    # End of optional parameters.
+    ')?'
+    # Optional test execution time.
+    r'(?: \((\d+) ms\))?$')
+# Crash detection constants.
+_RE_TEST_ERROR = re.compile(r'FAILURES!!! Tests run: \d+,'
+                                    r' Failures: \d+, Errors: 1')
+_RE_TEST_CURRENTLY_RUNNING = re.compile(
+    r'\[ERROR:.*?\] Currently running: (.*)')
+_RE_TEST_DCHECK_FATAL = re.compile(r'\[.*:FATAL:.*\] (.*)')
+_RE_DISABLED = re.compile(r'DISABLED_')
+_RE_FLAKY = re.compile(r'FLAKY_')
+
+# Detect stack line in stdout.
+_STACK_LINE_RE = re.compile(r'\s*#\d+')
+
+def ParseGTestListTests(raw_list):
+  """Parses a raw test list as provided by --gtest_list_tests.
+
+  Args:
+    raw_list: The raw test listing with the following format:
+
+    IPCChannelTest.
+      SendMessageInChannelConnected
+    IPCSyncChannelTest.
+      Simple
+      DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+  Returns:
+    A list of all tests. For the above raw listing:
+
+    [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+     IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+  """
+  ret = []
+  current = ''
+  for test in raw_list:
+    if not test:
+      continue
+    if not test.startswith(' '):
+      test_case = test.split()[0]
+      if test_case.endswith('.'):
+        current = test_case
+    else:
+      test = test.strip()
+      if test and not 'YOU HAVE' in test:
+        test_name = test.split()[0]
+        ret += [current + test_name]
+  return ret
+
+
+def ParseGTestOutput(output, symbolizer, device_abi):
+  """Parses raw gtest output and returns a list of results.
+
+  Args:
+    output: A list of output lines.
+    symbolizer: The symbolizer used to symbolize stack.
+    device_abi: Device abi that is needed for symbolization.
+  Returns:
+    A list of base_test_result.BaseTestResults.
+  """
+  duration = 0
+  fallback_result_type = None
+  log = []
+  stack = []
+  result_type = None
+  results = []
+  test_name = None
+
+  def symbolize_stack_and_merge_with_log():
+    log_string = '\n'.join(log or [])
+    if not stack:
+      stack_string = ''
+    else:
+      stack_string = '\n'.join(
+          symbolizer.ExtractAndResolveNativeStackTraces(
+              stack, device_abi))
+    return '%s\n%s' % (log_string, stack_string)
+
+  def handle_possibly_unknown_test():
+    if test_name is not None:
+      results.append(
+          base_test_result.BaseTestResult(
+              TestNameWithoutDisabledPrefix(test_name),
+              # If we get here, that means we started a test, but it did not
+              # produce a definitive test status output, so assume it crashed.
+              # crbug/1191716
+              fallback_result_type or base_test_result.ResultType.CRASH,
+              duration,
+              log=symbolize_stack_and_merge_with_log()))
+
+  for l in output:
+    matcher = _RE_TEST_STATUS.match(l)
+    if matcher:
+      if matcher.group(1) == 'RUN':
+        handle_possibly_unknown_test()
+        duration = 0
+        fallback_result_type = None
+        log = []
+        stack = []
+        result_type = None
+      elif matcher.group(1) == 'OK':
+        result_type = base_test_result.ResultType.PASS
+      elif matcher.group(1) == 'SKIPPED':
+        result_type = base_test_result.ResultType.SKIP
+      elif matcher.group(1) == 'FAILED':
+        result_type = base_test_result.ResultType.FAIL
+      elif matcher.group(1) == 'CRASHED':
+        fallback_result_type = base_test_result.ResultType.CRASH
+      # Be aware that test name and status might not appear on same line.
+      test_name = matcher.group(2) if matcher.group(2) else test_name
+      duration = int(matcher.group(3)) if matcher.group(3) else 0
+
+    else:
+      # Can possibly add more matchers, such as different results from DCHECK.
+      currently_running_matcher = _RE_TEST_CURRENTLY_RUNNING.match(l)
+      dcheck_matcher = _RE_TEST_DCHECK_FATAL.match(l)
+
+      if currently_running_matcher:
+        test_name = currently_running_matcher.group(1)
+        result_type = base_test_result.ResultType.CRASH
+        duration = None  # Don't know. Not using 0 as this is unknown vs 0.
+      elif dcheck_matcher:
+        result_type = base_test_result.ResultType.CRASH
+        duration = None  # Don't know.  Not using 0 as this is unknown vs 0.
+
+    if log is not None:
+      if not matcher and _STACK_LINE_RE.match(l):
+        stack.append(l)
+      else:
+        log.append(l)
+
+    if result_type and test_name:
+      # Don't bother symbolizing output if the test passed.
+      if result_type == base_test_result.ResultType.PASS:
+        stack = []
+      results.append(base_test_result.BaseTestResult(
+          TestNameWithoutDisabledPrefix(test_name), result_type, duration,
+          log=symbolize_stack_and_merge_with_log()))
+      test_name = None
+
+  handle_possibly_unknown_test()
+
+  return results
+
+
+def ParseGTestXML(xml_content):
+  """Parse gtest XML result."""
+  results = []
+  if not xml_content:
+    return results
+
+  html = HTMLParser.HTMLParser()
+
+  testsuites = xml.etree.ElementTree.fromstring(xml_content)
+  for testsuite in testsuites:
+    suite_name = testsuite.attrib['name']
+    for testcase in testsuite:
+      case_name = testcase.attrib['name']
+      result_type = base_test_result.ResultType.PASS
+      log = []
+      for failure in testcase:
+        result_type = base_test_result.ResultType.FAIL
+        log.append(html.unescape(failure.attrib['message']))
+
+      results.append(base_test_result.BaseTestResult(
+          '%s.%s' % (suite_name, TestNameWithoutDisabledPrefix(case_name)),
+          result_type,
+          int(float(testcase.attrib['time']) * 1000),
+          log=('\n'.join(log) if log else '')))
+
+  return results
+
+
+def ParseGTestJSON(json_content):
+  """Parse results in the JSON Test Results format."""
+  results = []
+  if not json_content:
+    return results
+
+  json_data = json.loads(json_content)
+
+  openstack = json_data['tests'].items()
+
+  while openstack:
+    name, value = openstack.pop()
+
+    if 'expected' in value and 'actual' in value:
+      result_type = base_test_result.ResultType.PASS if value[
+          'actual'] == 'PASS' else base_test_result.ResultType.FAIL
+      results.append(base_test_result.BaseTestResult(name, result_type))
+    else:
+      openstack += [("%s.%s" % (name, k), v) for k, v in value.iteritems()]
+
+  return results
+
+
+def TestNameWithoutDisabledPrefix(test_name):
+  """Modify the test name without disabled prefix if prefix 'DISABLED_' or
+  'FLAKY_' presents.
+
+  Args:
+    test_name: The name of a test.
+  Returns:
+    A test name without prefix 'DISABLED_' or 'FLAKY_'.
+  """
+  disabled_prefixes = [_RE_DISABLED, _RE_FLAKY]
+  for dp in disabled_prefixes:
+    test_name = dp.sub('', test_name)
+  return test_name
+
+class GtestTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, data_deps_delegate, error_func):
+    super(GtestTestInstance, self).__init__()
+    # TODO(jbudorick): Support multiple test suites.
+    if len(args.suite_name) > 1:
+      raise ValueError('Platform mode currently supports only 1 gtest suite')
+    self._coverage_dir = args.coverage_dir
+    self._exe_dist_dir = None
+    self._external_shard_index = args.test_launcher_shard_index
+    self._extract_test_list_from_filter = args.extract_test_list_from_filter
+    self._filter_tests_lock = threading.Lock()
+    self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket
+    self._isolated_script_test_output = args.isolated_script_test_output
+    self._isolated_script_test_perf_output = (
+        args.isolated_script_test_perf_output)
+    self._render_test_output_dir = args.render_test_output_dir
+    self._shard_timeout = args.shard_timeout
+    self._store_tombstones = args.store_tombstones
+    self._suite = args.suite_name[0]
+    self._symbolizer = stack_symbolizer.Symbolizer(None)
+    self._total_external_shards = args.test_launcher_total_shards
+    self._wait_for_java_debugger = args.wait_for_java_debugger
+
+    # GYP:
+    if args.executable_dist_dir:
+      self._exe_dist_dir = os.path.abspath(args.executable_dist_dir)
+    else:
+      # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly.
+      exe_dist_dir = os.path.join(constants.GetOutDirectory(),
+                                  '%s__dist' % self._suite)
+
+      if os.path.exists(exe_dist_dir):
+        self._exe_dist_dir = exe_dist_dir
+
+    incremental_part = ''
+    if args.test_apk_incremental_install_json:
+      incremental_part = '_incremental'
+
+    self._test_launcher_batch_limit = MAX_SHARDS
+    if (args.test_launcher_batch_limit
+        and 0 < args.test_launcher_batch_limit < MAX_SHARDS):
+      self._test_launcher_batch_limit = args.test_launcher_batch_limit
+
+    apk_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % self._suite,
+        '%s-debug%s.apk' % (self._suite, incremental_part))
+    self._test_apk_incremental_install_json = (
+        args.test_apk_incremental_install_json)
+    if not os.path.exists(apk_path):
+      self._apk_helper = None
+    else:
+      self._apk_helper = apk_helper.ApkHelper(apk_path)
+      self._extras = {
+          _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(),
+      }
+      if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES:
+        self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1
+      if self._suite in BROWSER_TEST_SUITES:
+        self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+        self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout)
+        self._shard_timeout = 10 * self._shard_timeout
+      if args.wait_for_java_debugger:
+        self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e15)  # Forever
+
+    if not self._apk_helper and not self._exe_dist_dir:
+      error_func('Could not find apk or executable for %s' % self._suite)
+
+    self._data_deps = []
+    self._gtest_filter = test_filter.InitializeFilterFromArgs(args)
+    self._run_disabled = args.run_disabled
+
+    self._data_deps_delegate = data_deps_delegate
+    self._runtime_deps_path = args.runtime_deps_path
+    if not self._runtime_deps_path:
+      logging.warning('No data dependencies will be pushed.')
+
+    if args.app_data_files:
+      self._app_data_files = args.app_data_files
+      if args.app_data_file_dir:
+        self._app_data_file_dir = args.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+    self._flags = None
+    self._initializeCommandLineFlags(args)
+
+    # TODO(jbudorick): Remove this once it's deployed.
+    self._enable_xml_result_parsing = args.enable_xml_result_parsing
+
+  def _initializeCommandLineFlags(self, args):
+    self._flags = []
+    if args.command_line_flags:
+      self._flags.extend(args.command_line_flags)
+    if args.device_flags_file:
+      with open(args.device_flags_file) as f:
+        stripped_lines = (l.strip() for l in f)
+        self._flags.extend(flag for flag in stripped_lines if flag)
+    if args.run_disabled:
+      self._flags.append('--gtest_also_run_disabled_tests')
+
+  @property
+  def activity(self):
+    return self._apk_helper and self._apk_helper.GetActivityName()
+
+  @property
+  def apk(self):
+    return self._apk_helper and self._apk_helper.path
+
+  @property
+  def apk_helper(self):
+    return self._apk_helper
+
+  @property
+  def app_file_dir(self):
+    return self._app_data_file_dir
+
+  @property
+  def app_files(self):
+    return self._app_data_files
+
+  @property
+  def coverage_dir(self):
+    return self._coverage_dir
+
+  @property
+  def enable_xml_result_parsing(self):
+    return self._enable_xml_result_parsing
+
+  @property
+  def exe_dist_dir(self):
+    return self._exe_dist_dir
+
+  @property
+  def external_shard_index(self):
+    return self._external_shard_index
+
+  @property
+  def extract_test_list_from_filter(self):
+    return self._extract_test_list_from_filter
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def gs_test_artifacts_bucket(self):
+    return self._gs_test_artifacts_bucket
+
+  @property
+  def gtest_filter(self):
+    return self._gtest_filter
+
+  @property
+  def isolated_script_test_output(self):
+    return self._isolated_script_test_output
+
+  @property
+  def isolated_script_test_perf_output(self):
+    return self._isolated_script_test_perf_output
+
+  @property
+  def render_test_output_dir(self):
+    return self._render_test_output_dir
+
+  @property
+  def package(self):
+    return self._apk_helper and self._apk_helper.GetPackageName()
+
+  @property
+  def permissions(self):
+    return self._apk_helper and self._apk_helper.GetPermissions()
+
+  @property
+  def runner(self):
+    return self._apk_helper and self._apk_helper.GetInstrumentationName()
+
+  @property
+  def shard_timeout(self):
+    return self._shard_timeout
+
+  @property
+  def store_tombstones(self):
+    return self._store_tombstones
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def symbolizer(self):
+    return self._symbolizer
+
+  @property
+  def test_apk_incremental_install_json(self):
+    return self._test_apk_incremental_install_json
+
+  @property
+  def test_launcher_batch_limit(self):
+    return self._test_launcher_batch_limit
+
+  @property
+  def total_external_shards(self):
+    return self._total_external_shards
+
+  @property
+  def wait_for_java_debugger(self):
+    return self._wait_for_java_debugger
+
+  #override
+  def TestType(self):
+    return 'gtest'
+
+  #override
+  def GetPreferredAbis(self):
+    if not self._apk_helper:
+      return None
+    return self._apk_helper.GetAbis()
+
+  #override
+  def SetUp(self):
+    """Map data dependencies via isolate."""
+    self._data_deps.extend(
+        self._data_deps_delegate(self._runtime_deps_path))
+
+  def GetDataDependencies(self):
+    """Returns the test suite's data dependencies.
+
+    Returns:
+      A list of (host_path, device_path) tuples to push. If device_path is
+      None, the client is responsible for determining where to push the file.
+    """
+    return self._data_deps
+
+  def FilterTests(self, test_list, disabled_prefixes=None):
+    """Filters |test_list| based on prefixes and, if present, a filter string.
+
+    Args:
+      test_list: The list of tests to filter.
+      disabled_prefixes: A list of test prefixes to filter. Defaults to
+        DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+    Returns:
+      A filtered list of tests to run.
+    """
+    gtest_filter_strings = [
+        self._GenerateDisabledFilterString(disabled_prefixes)]
+    if self._gtest_filter:
+      gtest_filter_strings.append(self._gtest_filter)
+
+    filtered_test_list = test_list
+    # This lock is required because on older versions of Python
+    # |unittest_util.FilterTestNames| use of |fnmatch| is not threadsafe.
+    with self._filter_tests_lock:
+      for gtest_filter_string in gtest_filter_strings:
+        logging.debug('Filtering tests using: %s', gtest_filter_string)
+        filtered_test_list = unittest_util.FilterTestNames(
+            filtered_test_list, gtest_filter_string)
+
+      if self._run_disabled and self._gtest_filter:
+        out_filtered_test_list = list(set(test_list)-set(filtered_test_list))
+        for test in out_filtered_test_list:
+          test_name_no_disabled = TestNameWithoutDisabledPrefix(test)
+          if test_name_no_disabled != test and unittest_util.FilterTestNames(
+              [test_name_no_disabled], self._gtest_filter):
+            filtered_test_list.append(test)
+    return filtered_test_list
+
+  def _GenerateDisabledFilterString(self, disabled_prefixes):
+    disabled_filter_items = []
+
+    if disabled_prefixes is None:
+      disabled_prefixes = ['FAILS_', 'PRE_']
+      if '--run-manual' not in self._flags:
+        disabled_prefixes += ['MANUAL_']
+      if not self._run_disabled:
+        disabled_prefixes += ['DISABLED_', 'FLAKY_']
+
+    disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+    disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+    disabled_tests_file_path = os.path.join(
+        host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+        'filter', '%s_disabled' % self._suite)
+    if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+      with open(disabled_tests_file_path) as disabled_tests_file:
+        disabled_filter_items += [
+            '%s' % l for l in (line.strip() for line in disabled_tests_file)
+            if l and not l.startswith('#')]
+
+    return '*-%s' % ':'.join(disabled_filter_items)
+
+  #override
+  def TearDown(self):
+    """Do nothing."""
+    pass
diff --git a/src/build/android/pylib/gtest/gtest_test_instance_test.py b/src/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000..1429e3d
--- /dev/null
+++ b/src/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+  def testParseGTestListTests_simple(self):
+    raw_output = [
+      'TestCaseOne.',
+      '  testOne',
+      '  testTwo',
+      'TestCaseTwo.',
+      '  testThree',
+      '  testFour',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCaseOne.testOne',
+      'TestCaseOne.testTwo',
+      'TestCaseTwo.testThree',
+      'TestCaseTwo.testFour',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_old(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_new(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.  # TypeParam = TypeParam0',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_old(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0',
+      '  testWithValueParam/1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_new(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0  # GetParam() = 0',
+      '  testWithValueParam/1  # GetParam() = 1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_emptyTestName(self):
+    raw_output = [
+      'TestCase.',
+      '  ',
+      '  nonEmptyTestName',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCase.nonEmptyTestName',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestOutput_pass(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[       OK ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testParseGTestOutput_fail(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[   FAILED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_crash(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[  CRASHED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_errorCrash(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ERROR:blah] Currently running: FooTest.Bar',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertIsNone(actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_fatalDcheck(self):
+    raw_output = [
+        '[ RUN      ] FooTest.Bar',
+        '[0324/183029.116334:FATAL:test_timeouts.cc(103)] Check failed: !init',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertIsNone(actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_unknown(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(0, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+  def testParseGTestOutput_nonterminalUnknown(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ RUN      ] FooTest.Baz',
+      '[       OK ] FooTest.Baz (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(2, len(actual))
+
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(0, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+    self.assertEquals('FooTest.Baz', actual[1].GetName())
+    self.assertEquals(1, actual[1].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType())
+
+  def testParseGTestOutput_deathTestCrashOk(self):
+    raw_output = [
+      '[ RUN      ] FooTest.Bar',
+      '[ CRASHED      ]',
+      '[       OK ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testParseGTestOutput_typeParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0, where TypeParam =  (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_valueParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0,' +
+        ' where GetParam() = 4-byte object <00-00 00-00> (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_typeAndValueParameterized(self):
+    raw_output = [
+        '[ RUN      ] Baz/FooTest.Bar/0',
+        '[   FAILED ] Baz/FooTest.Bar/0,' +
+        ' where TypeParam =  and GetParam() =  (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+  def testParseGTestOutput_skippedTest(self):
+    raw_output = [
+        '[ RUN      ] FooTest.Bar',
+        '[  SKIPPED ] FooTest.Bar (1 ms)',
+    ]
+    actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('FooTest.Bar', actual[0].GetName())
+    self.assertEquals(1, actual[0].GetDuration())
+    self.assertEquals(base_test_result.ResultType.SKIP, actual[0].GetType())
+
+  def testParseGTestXML_none(self):
+    actual = gtest_test_instance.ParseGTestXML(None)
+    self.assertEquals([], actual)
+
+  def testParseGTestJSON_none(self):
+    actual = gtest_test_instance.ParseGTestJSON(None)
+    self.assertEquals([], actual)
+
+  def testParseGTestJSON_example(self):
+    raw_json = """
+      {
+        "tests": {
+          "mojom_tests": {
+            "parse": {
+              "ast_unittest": {
+                "ASTTest": {
+                  "testNodeBase": {
+                    "expected": "PASS",
+                    "actual": "PASS",
+                    "artifacts": {
+                      "screenshot": ["screenshots/page.png"]
+                    }
+                  }
+                }
+              }
+            }
+          }
+        },
+        "interrupted": false,
+        "path_delimiter": ".",
+        "version": 3,
+        "seconds_since_epoch": 1406662283.764424,
+        "num_failures_by_type": {
+          "FAIL": 0,
+          "PASS": 1
+        },
+        "artifact_types": {
+          "screenshot": "image/png"
+        }
+      }"""
+    actual = gtest_test_instance.ParseGTestJSON(raw_json)
+    self.assertEquals(1, len(actual))
+    self.assertEquals('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase',
+                      actual[0].GetName())
+    self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+  def testTestNameWithoutDisabledPrefix_disabled(self):
+    test_name_list = [
+      'A.DISABLED_B',
+      'DISABLED_A.B',
+      'DISABLED_A.DISABLED_B',
+    ]
+    for test_name in test_name_list:
+      actual = gtest_test_instance \
+          .TestNameWithoutDisabledPrefix(test_name)
+      expected = 'A.B'
+      self.assertEquals(expected, actual)
+
+  def testTestNameWithoutDisabledPrefix_flaky(self):
+    test_name_list = [
+      'A.FLAKY_B',
+      'FLAKY_A.B',
+      'FLAKY_A.FLAKY_B',
+    ]
+    for test_name in test_name_list:
+      actual = gtest_test_instance \
+          .TestNameWithoutDisabledPrefix(test_name)
+      expected = 'A.B'
+      self.assertEquals(expected, actual)
+
+  def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self):
+    test_name = 'A.B'
+    actual = gtest_test_instance \
+        .TestNameWithoutDisabledPrefix(test_name)
+    expected = 'A.B'
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/__init__.py b/src/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/instrumentation/instrumentation_parser.py b/src/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000..dd9f9cc
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,111 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# AndroidJUnitRunner would status output -3 to indicate a test is skipped
+STATUS_CODE_SKIP = -3
+
+# AndroidJUnitRunner outputs -4 to indicate a failed assumption
+# "A test for which an assumption fails should not generate a test
+# case failure"
+# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html
+STATUS_CODE_ASSUMPTION_FAILURE = -4
+
+STATUS_CODE_TEST_DURATION = 1337
+
+# When a test batch fails due to post-test Assertion failures (eg.
+# LifetimeAssert).
+STATUS_CODE_BATCH_FAILURE = 1338
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+  def __init__(self, stream):
+    """An incremental parser for the output of Android instrumentation tests.
+
+    Example:
+
+      stream = adb.IterShell('am instrument -r ...')
+      parser = InstrumentationParser(stream)
+
+      for code, bundle in parser.IterStatus():
+        # do something with each instrumentation status
+        print('status:', code, bundle)
+
+      # do something with the final instrumentation result
+      code, bundle = parser.GetResult()
+      print('result:', code, bundle)
+
+    Args:
+      stream: a sequence of lines as produced by the raw output of an
+        instrumentation test (e.g. by |am instrument -r|).
+    """
+    self._stream = stream
+    self._code = None
+    self._bundle = None
+
+  def IterStatus(self):
+    """Iterate over statuses as they are produced by the instrumentation test.
+
+    Yields:
+      A tuple (code, bundle) for each instrumentation status found in the
+      output.
+    """
+    def join_bundle_values(bundle):
+      for key in bundle:
+        bundle[key] = '\n'.join(bundle[key])
+      return bundle
+
+    bundle = {'STATUS': {}, 'RESULT': {}}
+    header = None
+    key = None
+    for line in self._stream:
+      m = _INSTR_LINE_RE.match(line)
+      if m:
+        header, value = m.groups()
+        key = None
+        if header in ['STATUS', 'RESULT'] and '=' in value:
+          key, value = value.split('=', 1)
+          bundle[header][key] = [value]
+        elif header == 'STATUS_CODE':
+          yield int(value), join_bundle_values(bundle['STATUS'])
+          bundle['STATUS'] = {}
+        elif header == 'CODE':
+          self._code = int(value)
+        else:
+          logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+      elif key is not None:
+        bundle[header][key].append(line)
+
+    self._bundle = join_bundle_values(bundle['RESULT'])
+
+  def GetResult(self):
+    """Return the final instrumentation result.
+
+    Returns:
+      A pair (code, bundle) with the final instrumentation result. The |code|
+      may be None if no instrumentation result was found in the output.
+
+    Raises:
+      AssertionError if attempting to get the instrumentation result before
+      exhausting |IterStatus| first.
+    """
+    assert self._bundle is not None, (
+        'The IterStatus generator must be exhausted before reading the final'
+        ' instrumentation result.')
+    return self._code, self._bundle
diff --git a/src/build/android/pylib/instrumentation/instrumentation_parser_test.py b/src/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000..d664455
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+  def testInstrumentationParser_nothing(self):
+    parser = instrumentation_parser.InstrumentationParser([''])
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_noMatchingStarts(self):
+    raw_output = [
+      '',
+      'this.is.a.test.package.TestClass:.',
+      'Test result for =.',
+      'Time: 1.234',
+      '',
+      'OK (1 test)',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_resultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+      'INSTRUMENTATION_RESULT: longMsg=a foo',
+      'walked into',
+      'a bar',
+      'INSTRUMENTATION_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(-1, code)
+    self.assertEqual(
+        {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_oneStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: foo=1',
+      'INSTRUMENTATION_STATUS: bar=hello',
+      'INSTRUMENTATION_STATUS: world=false',
+      'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+      'INSTRUMENTATION_STATUS: test=testMethod',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (0, {
+        'foo': '1',
+        'bar': 'hello',
+        'world': 'false',
+        'class': 'this.is.a.test.package.TestClass',
+        'test': 'testMethod',
+      })
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_multiStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_STATUS: test_skipped=true',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+      'INSTRUMENTATION_STATUS: class=hello',
+      'INSTRUMENTATION_STATUS: test=world',
+      'INSTRUMENTATION_STATUS: stack=',
+      'foo/bar.py (27)',
+      'hello/world.py (42)',
+      'test/file.py (1)',
+      'INSTRUMENTATION_STATUS_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (1, {'class': 'foo', 'test': 'bar',}),
+      (0, {'test_skipped': 'true'}),
+      (-1, {
+        'class': 'hello',
+        'test': 'world',
+        'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+      }),
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_statusResultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_RESULT: result=hello',
+      'world',
+      '',
+      '',
+      'INSTRUMENTATION_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+
+    self.assertEqual(0, code)
+    self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+    self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/instrumentation_test_instance.py b/src/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000..5493c36
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,1039 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import logging
+import os
+import pickle
+import re
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_exception
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.symbols import deobfuscator
+from pylib.symbols import stack_symbolizer
+from pylib.utils import dexdump
+from pylib.utils import gold_utils
+from pylib.utils import instrumentation_tracing
+from pylib.utils import proguard
+from pylib.utils import shared_preference_utils
+from pylib.utils import test_filter
+
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import unittest_util # pylint: disable=import-error
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter'
+_DEFAULT_ANNOTATIONS = [
+    'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest']
+_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [
+    'DisabledTest', 'FlakyTest', 'Manual']
+_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS +
+                         _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS)
+
+_TEST_LIST_JUNIT4_RUNNERS = [
+    'org.chromium.base.test.BaseChromiumAndroidJUnitRunner']
+
+_SKIP_PARAMETERIZATION = 'SkipCommandLineParameterization'
+_PARAMETERIZED_COMMAND_LINE_FLAGS = 'ParameterizedCommandLineFlags'
+_PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES = (
+    'ParameterizedCommandLineFlags$Switches')
+_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 12
+
+# The ID of the bundle value Instrumentation uses to report which test index the
+# results are for in a collection of tests. Note that this index is 1-based.
+_BUNDLE_CURRENT_ID = 'current'
+# The ID of the bundle value Instrumentation uses to report the test class.
+_BUNDLE_CLASS_ID = 'class'
+# The ID of the bundle value Instrumentation uses to report the test name.
+_BUNDLE_TEST_ID = 'test'
+# The ID of the bundle value Instrumentation uses to report if a test was
+# skipped.
+_BUNDLE_SKIPPED_ID = 'test_skipped'
+# The ID of the bundle value Instrumentation uses to report the crash stack, if
+# the test crashed.
+_BUNDLE_STACK_ID = 'stack'
+
+# The ID of the bundle value Chrome uses to report the test duration.
+_BUNDLE_DURATION_ID = 'duration_ms'
+
+class MissingSizeAnnotationError(test_exception.TestException):
+  def __init__(self, class_name):
+    super(MissingSizeAnnotationError, self).__init__(class_name +
+        ': Test method is missing required size annotation. Add one of: ' +
+        ', '.join('@' + a for a in _VALID_ANNOTATIONS))
+
+
+class CommandLineParameterizationException(test_exception.TestException):
+
+  def __init__(self, msg):
+    super(CommandLineParameterizationException, self).__init__(msg)
+
+
+class TestListPickleException(test_exception.TestException):
+  pass
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation junit3_runner_class is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+  """Parses the output of an |am instrument -r| call.
+
+  Args:
+    raw_output: the output of an |am instrument -r| call as a list of lines
+  Returns:
+    A 3-tuple containing:
+      - the instrumentation code as an integer
+      - the instrumentation result as a list of lines
+      - the instrumentation statuses received as a list of 2-tuples
+        containing:
+        - the status code as an integer
+        - the bundle dump as a dict mapping string keys to a list of
+          strings, one for each line.
+  """
+  parser = instrumentation_parser.InstrumentationParser(raw_output)
+  statuses = list(parser.IterStatus())
+  code, bundle = parser.GetResult()
+  return (code, bundle, statuses)
+
+
+def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+                        device_abi, symbolizer):
+  """Generate test results from |statuses|.
+
+  Args:
+    result_code: The overall status code as an integer.
+    result_bundle: The summary bundle dump as a dict.
+    statuses: A list of 2-tuples containing:
+      - the status code as an integer
+      - the bundle dump as a dict mapping string keys to string values
+      Note that this is the same as the third item in the 3-tuple returned by
+      |_ParseAmInstrumentRawOutput|.
+    duration_ms: The duration of the test in milliseconds.
+    device_abi: The device_abi, which is needed for symbolization.
+    symbolizer: The symbolizer used to symbolize stack.
+
+  Returns:
+    A list containing an instance of InstrumentationTestResult for each test
+    parsed.
+  """
+
+  results = []
+
+  current_result = None
+  cumulative_duration = 0
+
+  for status_code, bundle in statuses:
+    # If the last test was a failure already, don't override that failure with
+    # post-test failures that could be caused by the original failure.
+    if (status_code == instrumentation_parser.STATUS_CODE_BATCH_FAILURE
+        and current_result.GetType() != base_test_result.ResultType.FAIL):
+      current_result.SetType(base_test_result.ResultType.FAIL)
+      _MaybeSetLog(bundle, current_result, symbolizer, device_abi)
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_TEST_DURATION:
+      # For the first result, duration will be set below to the difference
+      # between the reported and actual durations to account for overhead like
+      # starting instrumentation.
+      if results:
+        current_duration = int(bundle.get(_BUNDLE_DURATION_ID, duration_ms))
+        current_result.SetDuration(current_duration)
+        cumulative_duration += current_duration
+      continue
+
+    test_class = bundle.get(_BUNDLE_CLASS_ID, '')
+    test_method = bundle.get(_BUNDLE_TEST_ID, '')
+    if test_class and test_method:
+      test_name = '%s#%s' % (test_class, test_method)
+    else:
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_START:
+      if current_result:
+        results.append(current_result)
+      current_result = test_result.InstrumentationTestResult(
+          test_name, base_test_result.ResultType.UNKNOWN, duration_ms)
+    else:
+      if status_code == instrumentation_parser.STATUS_CODE_OK:
+        if bundle.get(_BUNDLE_SKIPPED_ID, '').lower() in ('true', '1', 'yes'):
+          current_result.SetType(base_test_result.ResultType.SKIP)
+        elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+          current_result.SetType(base_test_result.ResultType.PASS)
+      elif status_code == instrumentation_parser.STATUS_CODE_SKIP:
+        current_result.SetType(base_test_result.ResultType.SKIP)
+      elif status_code == instrumentation_parser.STATUS_CODE_ASSUMPTION_FAILURE:
+        current_result.SetType(base_test_result.ResultType.SKIP)
+      else:
+        if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+                               instrumentation_parser.STATUS_CODE_FAILURE):
+          logging.error('Unrecognized status code %d. Handling as an error.',
+                        status_code)
+        current_result.SetType(base_test_result.ResultType.FAIL)
+    _MaybeSetLog(bundle, current_result, symbolizer, device_abi)
+
+  if current_result:
+    if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+      crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+                 and any(_NATIVE_CRASH_RE.search(l)
+                         for l in result_bundle.itervalues()))
+      if crashed:
+        current_result.SetType(base_test_result.ResultType.CRASH)
+
+    results.append(current_result)
+
+  if results:
+    logging.info('Adding cumulative overhead to test %s: %dms',
+                 results[0].GetName(), duration_ms - cumulative_duration)
+    results[0].SetDuration(duration_ms - cumulative_duration)
+
+  return results
+
+
+def _MaybeSetLog(bundle, current_result, symbolizer, device_abi):
+  if _BUNDLE_STACK_ID in bundle:
+    if symbolizer and device_abi:
+      current_result.SetLog('%s\n%s' % (bundle[_BUNDLE_STACK_ID], '\n'.join(
+          symbolizer.ExtractAndResolveNativeStackTraces(
+              bundle[_BUNDLE_STACK_ID], device_abi))))
+    else:
+      current_result.SetLog(bundle[_BUNDLE_STACK_ID])
+
+
+def FilterTests(tests, filter_str=None, annotations=None,
+                excluded_annotations=None):
+  """Filter a list of tests
+
+  Args:
+    tests: a list of tests. e.g. [
+           {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+           {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+    filter_str: googletest-style filter string.
+    annotations: a dict of wanted annotations for test methods.
+    exclude_annotations: a dict of annotations to exclude.
+
+  Return:
+    A list of filtered tests
+  """
+  def gtest_filter(t):
+    if not filter_str:
+      return True
+    # Allow fully-qualified name as well as an omitted package.
+    unqualified_class_test = {
+      'class': t['class'].split('.')[-1],
+      'method': t['method']
+    }
+    names = [
+      GetTestName(t, sep='.'),
+      GetTestName(unqualified_class_test, sep='.'),
+      GetUniqueTestName(t, sep='.')
+    ]
+
+    if t['is_junit4']:
+      names += [
+          GetTestNameWithoutParameterPostfix(t, sep='.'),
+          GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
+      ]
+
+    pattern_groups = filter_str.split('-')
+    if len(pattern_groups) > 1:
+      negative_filter = pattern_groups[1]
+      if unittest_util.FilterTestNames(names, negative_filter):
+        return []
+
+    positive_filter = pattern_groups[0]
+    return unittest_util.FilterTestNames(names, positive_filter)
+
+  def annotation_filter(all_annotations):
+    if not annotations:
+      return True
+    return any_annotation_matches(annotations, all_annotations)
+
+  def excluded_annotation_filter(all_annotations):
+    if not excluded_annotations:
+      return True
+    return not any_annotation_matches(excluded_annotations,
+                                      all_annotations)
+
+  def any_annotation_matches(filter_annotations, all_annotations):
+    return any(
+        ak in all_annotations
+        and annotation_value_matches(av, all_annotations[ak])
+        for ak, av in filter_annotations)
+
+  def annotation_value_matches(filter_av, av):
+    if filter_av is None:
+      return True
+    elif isinstance(av, dict):
+      tav_from_dict = av['value']
+      # If tav_from_dict is an int, the 'in' operator breaks, so convert
+      # filter_av and manually compare. See https://crbug.com/1019707
+      if isinstance(tav_from_dict, int):
+        return int(filter_av) == tav_from_dict
+      else:
+        return filter_av in tav_from_dict
+    elif isinstance(av, list):
+      return filter_av in av
+    return filter_av == av
+
+  filtered_tests = []
+  for t in tests:
+    # Gtest filtering
+    if not gtest_filter(t):
+      continue
+
+    # Enforce that all tests declare their size.
+    if not any(a in _VALID_ANNOTATIONS for a in t['annotations']):
+      raise MissingSizeAnnotationError(GetTestName(t))
+
+    if (not annotation_filter(t['annotations'])
+        or not excluded_annotation_filter(t['annotations'])):
+      continue
+
+    filtered_tests.append(t)
+
+  return filtered_tests
+
+
+# TODO(yolandyan): remove this once the tests are converted to junit4
+def GetAllTestsFromJar(test_jar):
+  pickle_path = '%s-proguard.pickle' % test_jar
+  try:
+    tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_jar))
+  except TestListPickleException as e:
+    logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests from JAR via proguard.')
+    tests = _GetTestsFromProguard(test_jar)
+    SaveTestsToPickle(pickle_path, tests)
+  return tests
+
+
+def GetAllTestsFromApk(test_apk):
+  pickle_path = '%s-dexdump.pickle' % test_apk
+  try:
+    tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_apk))
+  except TestListPickleException as e:
+    logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests from dex via dexdump.')
+    tests = _GetTestsFromDexdump(test_apk)
+    SaveTestsToPickle(pickle_path, tests)
+  return tests
+
+def GetTestsFromPickle(pickle_path, test_mtime):
+  if not os.path.exists(pickle_path):
+    raise TestListPickleException('%s does not exist.' % pickle_path)
+  if os.path.getmtime(pickle_path) <= test_mtime:
+    raise TestListPickleException('File is stale: %s' % pickle_path)
+
+  with open(pickle_path, 'r') as f:
+    pickle_data = pickle.load(f)
+  if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+    raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.')
+  return pickle_data['TEST_METHODS']
+
+
+# TODO(yolandyan): remove this once the test listing from java runner lands
+@instrumentation_tracing.no_tracing
+def _GetTestsFromProguard(jar_path):
+  p = proguard.Dump(jar_path)
+  class_lookup = dict((c['class'], c) for c in p['classes'])
+
+  def is_test_class(c):
+    return c['class'].endswith('Test')
+
+  def is_test_method(m):
+    return m['method'].startswith('test')
+
+  def recursive_class_annotations(c):
+    s = c['superclass']
+    if s in class_lookup:
+      a = recursive_class_annotations(class_lookup[s])
+    else:
+      a = {}
+    a.update(c['annotations'])
+    return a
+
+  def stripped_test_class(c):
+    return {
+      'class': c['class'],
+      'annotations': recursive_class_annotations(c),
+      'methods': [m for m in c['methods'] if is_test_method(m)],
+      'superclass': c['superclass'],
+    }
+
+  return [stripped_test_class(c) for c in p['classes']
+          if is_test_class(c)]
+
+
+def _GetTestsFromDexdump(test_apk):
+  dex_dumps = dexdump.Dump(test_apk)
+  tests = []
+
+  def get_test_methods(methods):
+    return [
+        {
+          'method': m,
+          # No annotation info is available from dexdump.
+          # Set MediumTest annotation for default.
+          'annotations': {'MediumTest': None},
+        } for m in methods if m.startswith('test')]
+
+  for dump in dex_dumps:
+    for package_name, package_info in dump.iteritems():
+      for class_name, class_info in package_info['classes'].iteritems():
+        if class_name.endswith('Test'):
+          tests.append({
+              'class': '%s.%s' % (package_name, class_name),
+              'annotations': {},
+              'methods': get_test_methods(class_info['methods']),
+              'superclass': class_info['superclass'],
+          })
+  return tests
+
+def SaveTestsToPickle(pickle_path, tests):
+  pickle_data = {
+    'VERSION': _PICKLE_FORMAT_VERSION,
+    'TEST_METHODS': tests,
+  }
+  with open(pickle_path, 'w') as pickle_file:
+    pickle.dump(pickle_data, pickle_file)
+
+
+class MissingJUnit4RunnerException(test_exception.TestException):
+  """Raised when JUnit4 runner is not provided or specified in apk manifest"""
+
+  def __init__(self):
+    super(MissingJUnit4RunnerException, self).__init__(
+        'JUnit4 runner is not provided or specified in test apk manifest.')
+
+
+def GetTestName(test, sep='#'):
+  """Gets the name of the given test.
+
+  Note that this may return the same name for more than one test, e.g. if a
+  test is being run multiple times with different parameters.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+  Returns:
+    The test name as a string.
+  """
+  test_name = '%s%s%s' % (test['class'], sep, test['method'])
+  assert ' *-:' not in test_name, (
+      'The test name must not contain any of the characters in " *-:". See '
+      'https://crbug.com/912199')
+  return test_name
+
+
+def GetTestNameWithoutParameterPostfix(
+      test, sep='#', parameterization_sep='__'):
+  """Gets the name of the given JUnit4 test without parameter postfix.
+
+  For most WebView JUnit4 javatests, each test is parameterizatized with
+  "__sandboxed_mode" to run in both non-sandboxed mode and sandboxed mode.
+
+  This function returns the name of the test without parameterization
+  so test filters can match both parameterized and non-parameterized tests.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+    parameterization_sep: the character(s) that seperate method name and method
+                          parameterization postfix.
+  Returns:
+    The test name without parameter postfix as a string.
+  """
+  name = GetTestName(test, sep=sep)
+  return name.split(parameterization_sep)[0]
+
+
+def GetUniqueTestName(test, sep='#'):
+  """Gets the unique name of the given test.
+
+  This will include text to disambiguate between tests for which GetTestName
+  would return the same name.
+
+  Args:
+    test: the instrumentation test dict.
+    sep: the character(s) that should join the class name and the method name.
+  Returns:
+    The unique test name as a string.
+  """
+  display_name = GetTestName(test, sep=sep)
+  if test.get('flags', [None])[0]:
+    sanitized_flags = [x.replace('-', '_') for x in test['flags']]
+    display_name = '%s_with_%s' % (display_name, '_'.join(sanitized_flags))
+
+  assert ' *-:' not in display_name, (
+      'The test name must not contain any of the characters in " *-:". See '
+      'https://crbug.com/912199')
+
+  return display_name
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, data_deps_delegate, error_func):
+    super(InstrumentationTestInstance, self).__init__()
+
+    self._additional_apks = []
+    self._apk_under_test = None
+    self._apk_under_test_incremental_install_json = None
+    self._modules = None
+    self._fake_modules = None
+    self._additional_locales = None
+    self._package_info = None
+    self._suite = None
+    self._test_apk = None
+    self._test_apk_incremental_install_json = None
+    self._test_jar = None
+    self._test_package = None
+    self._junit3_runner_class = None
+    self._junit4_runner_class = None
+    self._junit4_runner_supports_listing = None
+    self._test_support_apk = None
+    self._initializeApkAttributes(args, error_func)
+
+    self._data_deps = None
+    self._data_deps_delegate = None
+    self._runtime_deps_path = None
+    self._initializeDataDependencyAttributes(args, data_deps_delegate)
+
+    self._annotations = None
+    self._excluded_annotations = None
+    self._test_filter = None
+    self._initializeTestFilterAttributes(args)
+
+    self._flags = None
+    self._use_apk_under_test_flags_file = False
+    self._initializeFlagAttributes(args)
+
+    self._screenshot_dir = None
+    self._timeout_scale = None
+    self._wait_for_java_debugger = None
+    self._initializeTestControlAttributes(args)
+
+    self._coverage_directory = None
+    self._jacoco_coverage_type = None
+    self._initializeTestCoverageAttributes(args)
+
+    self._store_tombstones = False
+    self._symbolizer = None
+    self._enable_java_deobfuscation = False
+    self._deobfuscator = None
+    self._initializeLogAttributes(args)
+
+    self._edit_shared_prefs = []
+    self._initializeEditPrefsAttributes(args)
+
+    self._replace_system_package = None
+    self._initializeReplaceSystemPackageAttributes(args)
+
+    self._system_packages_to_remove = None
+    self._initializeSystemPackagesToRemoveAttributes(args)
+
+    self._use_webview_provider = None
+    self._initializeUseWebviewProviderAttributes(args)
+
+    self._skia_gold_properties = None
+    self._initializeSkiaGoldAttributes(args)
+
+    self._wpr_enable_record = args.wpr_enable_record
+
+    self._external_shard_index = args.test_launcher_shard_index
+    self._total_external_shards = args.test_launcher_total_shards
+
+  def _initializeApkAttributes(self, args, error_func):
+    if args.apk_under_test:
+      apk_under_test_path = args.apk_under_test
+      if (not args.apk_under_test.endswith('.apk')
+          and not args.apk_under_test.endswith('.apks')):
+        apk_under_test_path = os.path.join(
+            constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+            '%s.apk' % args.apk_under_test)
+
+      # TODO(jbudorick): Move the realpath up to the argument parser once
+      # APK-by-name is no longer supported.
+      apk_under_test_path = os.path.realpath(apk_under_test_path)
+
+      if not os.path.exists(apk_under_test_path):
+        error_func('Unable to find APK under test: %s' % apk_under_test_path)
+
+      self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)
+
+    test_apk_path = args.test_apk
+    if not os.path.exists(test_apk_path):
+      test_apk_path = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.test_apk)
+      # TODO(jbudorick): Move the realpath up to the argument parser once
+      # APK-by-name is no longer supported.
+      test_apk_path = os.path.realpath(test_apk_path)
+
+    if not os.path.exists(test_apk_path):
+      error_func('Unable to find test APK: %s' % test_apk_path)
+
+    self._test_apk = apk_helper.ToHelper(test_apk_path)
+    self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+
+    self._apk_under_test_incremental_install_json = (
+        args.apk_under_test_incremental_install_json)
+    self._test_apk_incremental_install_json = (
+        args.test_apk_incremental_install_json)
+
+    if self._test_apk_incremental_install_json:
+      assert self._suite.endswith('_incremental')
+      self._suite = self._suite[:-len('_incremental')]
+
+    self._modules = args.modules
+    self._fake_modules = args.fake_modules
+    self._additional_locales = args.additional_locales
+
+    self._test_jar = args.test_jar
+    self._test_support_apk = apk_helper.ToHelper(os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%sSupport.apk' % self._suite))
+
+    if not self._test_jar:
+      logging.warning('Test jar not specified. Test runner will not have '
+                      'Java annotation info available. May not handle test '
+                      'timeouts correctly.')
+    elif not os.path.exists(self._test_jar):
+      error_func('Unable to find test JAR: %s' % self._test_jar)
+
+    self._test_package = self._test_apk.GetPackageName()
+    all_instrumentations = self._test_apk.GetAllInstrumentations()
+    all_junit3_runner_classes = [
+        x for x in all_instrumentations if ('0xffffffff' in x.get(
+            'chromium-junit3', ''))]
+    all_junit4_runner_classes = [
+        x for x in all_instrumentations if ('0xffffffff' not in x.get(
+            'chromium-junit3', ''))]
+
+    if len(all_junit3_runner_classes) > 1:
+      logging.warning('This test apk has more than one JUnit3 instrumentation')
+    if len(all_junit4_runner_classes) > 1:
+      logging.warning('This test apk has more than one JUnit4 instrumentation')
+
+    self._junit3_runner_class = (
+      all_junit3_runner_classes[0]['android:name']
+      if all_junit3_runner_classes else self.test_apk.GetInstrumentationName())
+
+    self._junit4_runner_class = (
+      all_junit4_runner_classes[0]['android:name']
+      if all_junit4_runner_classes else None)
+
+    if self._junit4_runner_class:
+      if self._test_apk_incremental_install_json:
+        self._junit4_runner_supports_listing = next(
+            (True for x in self._test_apk.GetAllMetadata()
+             if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS),
+            False)
+      else:
+        self._junit4_runner_supports_listing = (
+            self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS)
+
+    self._package_info = None
+    if self._apk_under_test:
+      package_under_test = self._apk_under_test.GetPackageName()
+      for package_info in constants.PACKAGE_INFO.itervalues():
+        if package_under_test == package_info.package:
+          self._package_info = package_info
+          break
+    if not self._package_info:
+      logging.warning(("Unable to find package info for %s. " +
+                       "(This may just mean that the test package is " +
+                       "currently being installed.)"),
+                       self._test_package)
+
+    for apk in args.additional_apks:
+      if not os.path.exists(apk):
+        error_func('Unable to find additional APK: %s' % apk)
+    self._additional_apks = (
+        [apk_helper.ToHelper(x) for x in args.additional_apks])
+
+  def _initializeDataDependencyAttributes(self, args, data_deps_delegate):
+    self._data_deps = []
+    self._data_deps_delegate = data_deps_delegate
+    self._runtime_deps_path = args.runtime_deps_path
+
+    if not self._runtime_deps_path:
+      logging.warning('No data dependencies will be pushed.')
+
+  def _initializeTestFilterAttributes(self, args):
+    self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+    def annotation_element(a):
+      a = a.split('=', 1)
+      return (a[0], a[1] if len(a) == 2 else None)
+
+    if args.annotation_str:
+      self._annotations = [
+          annotation_element(a) for a in args.annotation_str.split(',')]
+    elif not self._test_filter:
+      self._annotations = [
+          annotation_element(a) for a in _DEFAULT_ANNOTATIONS]
+    else:
+      self._annotations = []
+
+    if args.exclude_annotation_str:
+      self._excluded_annotations = [
+          annotation_element(a) for a in args.exclude_annotation_str.split(',')]
+    else:
+      self._excluded_annotations = []
+
+    requested_annotations = set(a[0] for a in self._annotations)
+    if not args.run_disabled:
+      self._excluded_annotations.extend(
+          annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS
+          if a not in requested_annotations)
+
+  def _initializeFlagAttributes(self, args):
+    self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file
+    self._flags = ['--enable-test-intents']
+    if args.command_line_flags:
+      self._flags.extend(args.command_line_flags)
+    if args.device_flags_file:
+      with open(args.device_flags_file) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend(flag for flag in stripped_lines if flag)
+    if args.strict_mode and args.strict_mode != 'off' and (
+        # TODO(yliuyliu): Turn on strict mode for coverage once
+        # crbug/1006397 is fixed.
+        not args.coverage_dir):
+      self._flags.append('--strict-mode=' + args.strict_mode)
+
+  def _initializeTestControlAttributes(self, args):
+    self._screenshot_dir = args.screenshot_dir
+    self._timeout_scale = args.timeout_scale or 1
+    self._wait_for_java_debugger = args.wait_for_java_debugger
+
+  def _initializeTestCoverageAttributes(self, args):
+    self._coverage_directory = args.coverage_dir
+    if ("Batch", "UnitTests") in self._annotations and (
+        "Batch", "UnitTests") not in self._excluded_annotations:
+      self._jacoco_coverage_type = "unit_tests_only"
+    elif ("Batch", "UnitTests") not in self._annotations and (
+        "Batch", "UnitTests") in self._excluded_annotations:
+      self._jacoco_coverage_type = "unit_tests_excluded"
+
+  def _initializeLogAttributes(self, args):
+    self._enable_java_deobfuscation = args.enable_java_deobfuscation
+    self._store_tombstones = args.store_tombstones
+    self._symbolizer = stack_symbolizer.Symbolizer(
+        self.apk_under_test.path if self.apk_under_test else None)
+
+  def _initializeEditPrefsAttributes(self, args):
+    if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file:
+      return
+    if not isinstance(args.shared_prefs_file, str):
+      logging.warning("Given non-string for a filepath")
+      return
+    self._edit_shared_prefs = shared_preference_utils.ExtractSettingsFromJson(
+        args.shared_prefs_file)
+
+  def _initializeReplaceSystemPackageAttributes(self, args):
+    if (not hasattr(args, 'replace_system_package')
+        or not args.replace_system_package):
+      return
+    self._replace_system_package = args.replace_system_package
+
+  def _initializeSystemPackagesToRemoveAttributes(self, args):
+    if (not hasattr(args, 'system_packages_to_remove')
+        or not args.system_packages_to_remove):
+      return
+    self._system_packages_to_remove = args.system_packages_to_remove
+
+  def _initializeUseWebviewProviderAttributes(self, args):
+    if (not hasattr(args, 'use_webview_provider')
+        or not args.use_webview_provider):
+      return
+    self._use_webview_provider = args.use_webview_provider
+
+  def _initializeSkiaGoldAttributes(self, args):
+    self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args)
+
+  @property
+  def additional_apks(self):
+    return self._additional_apks
+
+  @property
+  def apk_under_test(self):
+    return self._apk_under_test
+
+  @property
+  def apk_under_test_incremental_install_json(self):
+    return self._apk_under_test_incremental_install_json
+
+  @property
+  def modules(self):
+    return self._modules
+
+  @property
+  def fake_modules(self):
+    return self._fake_modules
+
+  @property
+  def additional_locales(self):
+    return self._additional_locales
+
+  @property
+  def coverage_directory(self):
+    return self._coverage_directory
+
+  @property
+  def edit_shared_prefs(self):
+    return self._edit_shared_prefs
+
+  @property
+  def external_shard_index(self):
+    return self._external_shard_index
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def jacoco_coverage_type(self):
+    return self._jacoco_coverage_type
+
+  @property
+  def junit3_runner_class(self):
+    return self._junit3_runner_class
+
+  @property
+  def junit4_runner_class(self):
+    return self._junit4_runner_class
+
+  @property
+  def junit4_runner_supports_listing(self):
+    return self._junit4_runner_supports_listing
+
+  @property
+  def package_info(self):
+    return self._package_info
+
+  @property
+  def replace_system_package(self):
+    return self._replace_system_package
+
+  @property
+  def use_webview_provider(self):
+    return self._use_webview_provider
+
+  @property
+  def screenshot_dir(self):
+    return self._screenshot_dir
+
+  @property
+  def skia_gold_properties(self):
+    return self._skia_gold_properties
+
+  @property
+  def store_tombstones(self):
+    return self._store_tombstones
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def symbolizer(self):
+    return self._symbolizer
+
+  @property
+  def system_packages_to_remove(self):
+    return self._system_packages_to_remove
+
+  @property
+  def test_apk(self):
+    return self._test_apk
+
+  @property
+  def test_apk_incremental_install_json(self):
+    return self._test_apk_incremental_install_json
+
+  @property
+  def test_jar(self):
+    return self._test_jar
+
+  @property
+  def test_support_apk(self):
+    return self._test_support_apk
+
+  @property
+  def test_package(self):
+    return self._test_package
+
+  @property
+  def timeout_scale(self):
+    return self._timeout_scale
+
+  @property
+  def total_external_shards(self):
+    return self._total_external_shards
+
+  @property
+  def use_apk_under_test_flags_file(self):
+    return self._use_apk_under_test_flags_file
+
+  @property
+  def wait_for_java_debugger(self):
+    return self._wait_for_java_debugger
+
+  @property
+  def wpr_record_mode(self):
+    return self._wpr_enable_record
+
+  @property
+  def wpr_replay_mode(self):
+    return not self._wpr_enable_record
+
+  #override
+  def TestType(self):
+    return 'instrumentation'
+
+  #override
+  def GetPreferredAbis(self):
+    # We could alternatively take the intersection of what they all support,
+    # but it should never be the case that they support different things.
+    apks = [self._test_apk, self._apk_under_test] + self._additional_apks
+    for apk in apks:
+      if apk:
+        ret = apk.GetAbis()
+        if ret:
+          return ret
+    return []
+
+  #override
+  def SetUp(self):
+    self._data_deps.extend(
+        self._data_deps_delegate(self._runtime_deps_path))
+    if self._enable_java_deobfuscation:
+      self._deobfuscator = deobfuscator.DeobfuscatorPool(
+          self.test_apk.path + '.mapping')
+
+  def GetDataDependencies(self):
+    return self._data_deps
+
+  def GetTests(self):
+    if self.test_jar:
+      raw_tests = GetAllTestsFromJar(self.test_jar)
+    else:
+      raw_tests = GetAllTestsFromApk(self.test_apk.path)
+    return self.ProcessRawTests(raw_tests)
+
+  def MaybeDeobfuscateLines(self, lines):
+    if not self._deobfuscator:
+      return lines
+    return self._deobfuscator.TransformLines(lines)
+
+  def ProcessRawTests(self, raw_tests):
+    inflated_tests = self._ParameterizeTestsWithFlags(
+        self._InflateTests(raw_tests))
+    if self._junit4_runner_class is None and any(
+        t['is_junit4'] for t in inflated_tests):
+      raise MissingJUnit4RunnerException()
+    filtered_tests = FilterTests(
+        inflated_tests, self._test_filter, self._annotations,
+        self._excluded_annotations)
+    if self._test_filter and not filtered_tests:
+      for t in inflated_tests:
+        logging.debug('  %s', GetUniqueTestName(t))
+      logging.warning('Unmatched Filter: %s', self._test_filter)
+    return filtered_tests
+
+  # pylint: disable=no-self-use
+  def _InflateTests(self, tests):
+    inflated_tests = []
+    for c in tests:
+      for m in c['methods']:
+        a = dict(c['annotations'])
+        a.update(m['annotations'])
+        inflated_tests.append({
+            'class': c['class'],
+            'method': m['method'],
+            'annotations': a,
+            # TODO(https://crbug.com/1084729): Remove is_junit4.
+            'is_junit4': True
+        })
+    return inflated_tests
+
+  def _ParameterizeTestsWithFlags(self, tests):
+
+    def _checkParameterization(annotations):
+      types = [
+          _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES,
+          _PARAMETERIZED_COMMAND_LINE_FLAGS,
+      ]
+      if types[0] in annotations and types[1] in annotations:
+        raise CommandLineParameterizationException(
+            'Multiple command-line parameterization types: {}.'.format(
+                ', '.join(types)))
+
+    def _switchesToFlags(switches):
+      return ['--{}'.format(s) for s in switches if s]
+
+    def _annotationToSwitches(clazz, methods):
+      if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES:
+        return [methods['value']]
+      elif clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS:
+        list_of_switches = []
+        for annotation in methods['value']:
+          for clazz, methods in annotation.iteritems():
+            list_of_switches += _annotationToSwitches(clazz, methods)
+        return list_of_switches
+      else:
+        return []
+
+    def _setTestFlags(test, flags):
+      if flags:
+        test['flags'] = flags
+      elif 'flags' in test:
+        del test['flags']
+
+    new_tests = []
+    for t in tests:
+      annotations = t['annotations']
+      list_of_switches = []
+      _checkParameterization(annotations)
+      if _SKIP_PARAMETERIZATION not in annotations:
+        for clazz, methods in annotations.iteritems():
+          list_of_switches += _annotationToSwitches(clazz, methods)
+      if list_of_switches:
+        _setTestFlags(t, _switchesToFlags(list_of_switches[0]))
+        for p in list_of_switches[1:]:
+          parameterized_t = copy.copy(t)
+          _setTestFlags(parameterized_t, _switchesToFlags(p))
+          new_tests.append(parameterized_t)
+    return tests + new_tests
+
+  @staticmethod
+  def ParseAmInstrumentRawOutput(raw_output):
+    return ParseAmInstrumentRawOutput(raw_output)
+
+  @staticmethod
+  def GenerateTestResults(result_code, result_bundle, statuses, duration_ms,
+                          device_abi, symbolizer):
+    return GenerateTestResults(result_code, result_bundle, statuses,
+                               duration_ms, device_abi, symbolizer)
+
+  #override
+  def TearDown(self):
+    self.symbolizer.CleanUp()
+    if self._deobfuscator:
+      self._deobfuscator.Close()
+      self._deobfuscator = None
diff --git a/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000..77918bb
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,1187 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for instrumentation_test_instance."""
+
+# pylint: disable=protected-access
+
+import collections
+import tempfile
+import unittest
+
+from pylib.base import base_test_result
+from pylib.instrumentation import instrumentation_test_instance
+
+import mock  # pylint: disable=import-error
+
+_INSTRUMENTATION_TEST_INSTANCE_PATH = (
+    'pylib.instrumentation.instrumentation_test_instance.%s')
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+  def setUp(self):
+    options = mock.Mock()
+    options.tool = ''
+
+  @staticmethod
+  def createTestInstance():
+    c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance'
+    # yapf: disable
+    with mock.patch('%s._initializeApkAttributes' % c), (
+         mock.patch('%s._initializeDataDependencyAttributes' % c)), (
+         mock.patch('%s._initializeTestFilterAttributes' %c)), (
+         mock.patch('%s._initializeFlagAttributes' % c)), (
+         mock.patch('%s._initializeTestControlAttributes' % c)), (
+         mock.patch('%s._initializeTestCoverageAttributes' % c)), (
+         mock.patch('%s._initializeSkiaGoldAttributes' % c)):
+      # yapf: enable
+      return instrumentation_test_instance.InstrumentationTestInstance(
+          mock.MagicMock(), mock.MagicMock(), lambda s: None)
+
+  _FlagAttributesArgs = collections.namedtuple('_FlagAttributesArgs', [
+      'command_line_flags', 'device_flags_file', 'strict_mode',
+      'use_apk_under_test_flags_file', 'coverage_dir'
+  ])
+
+  def createFlagAttributesArgs(self,
+                               command_line_flags=None,
+                               device_flags_file=None,
+                               strict_mode=None,
+                               use_apk_under_test_flags_file=False,
+                               coverage_dir=None):
+    return self._FlagAttributesArgs(command_line_flags, device_flags_file,
+                                    strict_mode, use_apk_under_test_flags_file,
+                                    coverage_dir)
+
+  def test_initializeFlagAttributes_commandLineFlags(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar'])
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+  def test_initializeFlagAttributes_deviceFlagsFile(self):
+    o = self.createTestInstance()
+    with tempfile.NamedTemporaryFile() as flags_file:
+      flags_file.write('\n'.join(['--foo', '--bar']))
+      flags_file.flush()
+
+      args = self.createFlagAttributesArgs(device_flags_file=flags_file.name)
+      o._initializeFlagAttributes(args)
+      self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+  def test_initializeFlagAttributes_strictModeOn(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(strict_mode='on')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+
+  def test_initializeFlagAttributes_strictModeOn_coverageOn(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(
+        strict_mode='on', coverage_dir='/coverage/dir')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents'])
+
+  def test_initializeFlagAttributes_strictModeOff(self):
+    o = self.createTestInstance()
+    args = self.createFlagAttributesArgs(strict_mode='off')
+    o._initializeFlagAttributes(args)
+    self.assertEquals(o._flags, ['--enable-test-intents'])
+
+  def testGetTests_noFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'MediumTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod2',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_simpleGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_simpleGtestUnqualifiedNameFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_parameterizedTestGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1__sandboxed_mode',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1',
+        'is_junit4': True,
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'method': 'testMethod1__sandboxed_mode',
+        'is_junit4': True,
+      },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_wildcardGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = 'org.chromium.test.SampleTest2.*'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_negativeGtestFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'MediumTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod2',
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._test_filter = '*-org.chromium.test.SampleTest.testMethod1'
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Foo']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._annotations = [('SmallTest', None)]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_excludedAnnotationFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Foo']
+                },
+                'MediumTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod2',
+        },
+    ]
+
+    o._excluded_annotations = [('SmallTest', None)]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationSimpleValueFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {
+              'SmallTest': None,
+              'TestValue': '1',
+            },
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {
+              'MediumTest': None,
+              'TestValue': '2',
+            },
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {
+              'SmallTest': None,
+              'TestValue': '3',
+            },
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Foo']
+                },
+                'SmallTest': None,
+                'TestValue': '1',
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod1',
+        },
+    ]
+
+    o._annotations = [('TestValue', '1')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTests_annotationDictValueFilter(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {'MediumTest': None},
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'java.lang.Object',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+      {
+        'annotations': {
+          'Feature': {'value': ['Bar']},
+          'SmallTest': None,
+        },
+        'class': 'org.chromium.test.SampleTest2',
+        'is_junit4': True,
+        'method': 'testMethod1',
+      },
+    ]
+
+    o._annotations = [('Feature', 'Bar')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGetTestName(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None',
+                 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA',
+      'is_junit4': True,
+      'method': 'testSimple'}
+    unqualified_class_test = {
+      'class': test['class'].split('.')[-1],
+      'method': test['method']
+    }
+
+    self.assertEquals(
+        instrumentation_test_instance.GetTestName(test, sep='.'),
+        'org.chromium.TestA.testSimple')
+    self.assertEquals(
+        instrumentation_test_instance.GetTestName(
+            unqualified_class_test, sep='.'),
+        'TestA.testSimple')
+
+  def testGetUniqueTestName(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA',
+      'flags': ['enable_features=abc'],
+      'is_junit4': True,
+      'method': 'testSimple'}
+    self.assertEquals(
+        instrumentation_test_instance.GetUniqueTestName(
+            test, sep='.'),
+        'org.chromium.TestA.testSimple_with_enable_features=abc')
+
+  def testGetTestNameWithoutParameterPostfix(self):
+    test = {
+      'annotations': {
+        'RunWith': {'value': 'class J4Runner'},
+        'SmallTest': {},
+        'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+                 'UiThreadTest': {}},
+      'class': 'org.chromium.TestA__sandbox_mode',
+      'flags': 'enable_features=abc',
+      'is_junit4': True,
+      'method': 'testSimple'}
+    unqualified_class_test = {
+      'class': test['class'].split('.')[-1],
+      'method': test['method']
+    }
+    self.assertEquals(
+        instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+            test, sep='.'),
+        'org.chromium.TestA')
+    self.assertEquals(
+        instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+            unqualified_class_test, sep='.'),
+        'TestA')
+
+  def testGetTests_multipleAnnotationValuesRequested(self):
+    o = self.createTestInstance()
+    raw_tests = [
+      {
+        'annotations': {'Feature': {'value': ['Foo']}},
+        'class': 'org.chromium.test.SampleTest',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+          {
+            'annotations': {
+              'Feature': {'value': ['Baz']},
+              'MediumTest': None,
+            },
+            'method': 'testMethod2',
+          },
+        ],
+      },
+      {
+        'annotations': {'Feature': {'value': ['Bar']}},
+        'class': 'org.chromium.test.SampleTest2',
+        'superclass': 'junit.framework.TestCase',
+        'methods': [
+          {
+            'annotations': {'SmallTest': None},
+            'method': 'testMethod1',
+          },
+        ],
+      }
+    ]
+
+    expected_tests = [
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Baz']
+                },
+                'MediumTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod2',
+        },
+        {
+            'annotations': {
+                'Feature': {
+                    'value': ['Bar']
+                },
+                'SmallTest': None,
+            },
+            'class': 'org.chromium.test.SampleTest2',
+            'is_junit4': True,
+            'method': 'testMethod1',
+        },
+    ]
+
+    o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')]
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testGenerateTestResults_noStatus(self):
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, [], 1000, None, None)
+    self.assertEqual([], results)
+
+  def testGenerateTestResults_testPassed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'true',
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_false(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'false',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testFailed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-2, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+  def testGenerateTestResults_testUnknownException(self):
+    stacktrace = 'long\nstacktrace'
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+        'stack': stacktrace,
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+    self.assertEqual(stacktrace, results[0].GetLog())
+
+  def testGenerateJUnitTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-3, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 1000, None, None)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testParameterizedCommandLineFlagsSwitches(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {
+            'ParameterizedCommandLineFlags$Switches': {
+                'value': ['enable-features=abc', 'enable-features=def']
+            }
+        },
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None
+                },
+                'method': 'testMethod1',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': ['enable-features=ghi', 'enable-features=jkl']
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': []
+                    },
+                },
+                'method': 'testMethod3',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'SkipCommandLineParameterization': None,
+                },
+                'method': 'testMethod4',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=abc', '--enable-features=def'],
+            'is_junit4': True,
+            'method': 'testMethod1'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=ghi', '--enable-features=jkl'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod4'
+        },
+    ]
+    for i in range(4):
+      expected_tests[i]['annotations'].update(raw_tests[0]['annotations'])
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testParameterizedCommandLineFlags(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {
+            'ParameterizedCommandLineFlags': {
+                'value': [
+                    {
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc',
+                                'force-fieldtrials=trial/group'
+                            ],
+                        }
+                    },
+                    {
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc2',
+                                'force-fieldtrials=trial/group2'
+                            ],
+                        }
+                    },
+                ],
+            },
+        },
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None
+                },
+                'method': 'testMethod1',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [{
+                            'ParameterizedCommandLineFlags$Switches': {
+                                'value': ['enable-features=def']
+                            }
+                        }],
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [],
+                    },
+                },
+                'method': 'testMethod3',
+            },
+            {
+                'annotations': {
+                    'MediumTest': None,
+                    'SkipCommandLineParameterization': None,
+                },
+                'method': 'testMethod4',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags':
+            ['--enable-features=abc', '--force-fieldtrials=trial/group'],
+            'is_junit4': True,
+            'method': 'testMethod1'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--enable-features=def'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'is_junit4': True,
+            'method': 'testMethod4'
+        },
+        {
+            'annotations': {},
+            'class':
+            'org.chromium.test.SampleTest',
+            'flags': [
+                '--enable-features=abc2',
+                '--force-fieldtrials=trial/group2',
+            ],
+            'is_junit4':
+            True,
+            'method':
+            'testMethod1'
+        },
+    ]
+    for i in range(4):
+      expected_tests[i]['annotations'].update(raw_tests[0]['annotations'])
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+    expected_tests[4]['annotations'].update(raw_tests[0]['annotations'])
+    expected_tests[4]['annotations'].update(
+        raw_tests[0]['methods'][0]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testDifferentCommandLineParameterizations(self):
+    o = self.createTestInstance()
+    raw_tests = [{
+        'annotations': {},
+        'class':
+        'org.chromium.test.SampleTest',
+        'superclass':
+        'java.lang.Object',
+        'methods': [
+            {
+                'annotations': {
+                    'SmallTest': None,
+                    'ParameterizedCommandLineFlags': {
+                        'value': [
+                            {
+                                'ParameterizedCommandLineFlags$Switches': {
+                                    'value': ['a1', 'a2'],
+                                }
+                            },
+                        ],
+                    },
+                },
+                'method': 'testMethod2',
+            },
+            {
+                'annotations': {
+                    'SmallTest': None,
+                    'ParameterizedCommandLineFlags$Switches': {
+                        'value': ['b1', 'b2'],
+                    },
+                },
+                'method': 'testMethod3',
+            },
+        ],
+    }]
+
+    expected_tests = [
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--a1', '--a2'],
+            'is_junit4': True,
+            'method': 'testMethod2'
+        },
+        {
+            'annotations': {},
+            'class': 'org.chromium.test.SampleTest',
+            'flags': ['--b1', '--b2'],
+            'is_junit4': True,
+            'method': 'testMethod3'
+        },
+    ]
+    for i in range(2):
+      expected_tests[i]['annotations'].update(
+          raw_tests[0]['methods'][i]['annotations'])
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    actual_tests = o.ProcessRawTests(raw_tests)
+    self.assertEquals(actual_tests, expected_tests)
+
+  def testMultipleCommandLineParameterizations_raises(self):
+    o = self.createTestInstance()
+    raw_tests = [
+        {
+            'annotations': {
+                'ParameterizedCommandLineFlags': {
+                    'value': [
+                        {
+                            'ParameterizedCommandLineFlags$Switches': {
+                                'value': [
+                                    'enable-features=abc',
+                                    'force-fieldtrials=trial/group',
+                                ],
+                            }
+                        },
+                    ],
+                },
+            },
+            'class':
+            'org.chromium.test.SampleTest',
+            'superclass':
+            'java.lang.Object',
+            'methods': [
+                {
+                    'annotations': {
+                        'SmallTest': None,
+                        'ParameterizedCommandLineFlags$Switches': {
+                            'value': [
+                                'enable-features=abc',
+                                'force-fieldtrials=trial/group',
+                            ],
+                        },
+                    },
+                    'method': 'testMethod1',
+                },
+            ],
+        },
+    ]
+
+    o._test_jar = 'path/to/test.jar'
+    o._junit4_runner_class = 'J4Runner'
+    self.assertRaises(
+        instrumentation_test_instance.CommandLineParameterizationException,
+        o.ProcessRawTests, [raw_tests[0]])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/instrumentation/json_perf_parser.py b/src/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000..c647890
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = [j for j in json_data if EntryFilter(j)]
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0:
+    result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/src/build/android/pylib/instrumentation/render_test.html.jinja b/src/build/android/pylib/instrumentation/render_test.html.jinja
new file mode 100644
index 0000000..81b85b7
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/render_test.html.jinja
@@ -0,0 +1,40 @@
+<html>
+<head>
+  <title>{{ test_name }}</title>
+  <script>
+  function toggleZoom() {
+    for (const img of document.getElementsByTagName("img")) {
+      if (img.hasAttribute('style')) {
+        img.removeAttribute('style');
+      } else {
+        img.style.width = '100%';
+      }
+    }
+  }
+  </script>
+</head>
+<body>
+  <a href="https://cs.chromium.org/search/?q={{ test_name }}&m=100&type=cs">Link to Golden (in repo)</a><br />
+  <a download="{{ test_name }}" href="{{ failure_link }}">Download Failure Image (right click and 'Save link as')</a>
+  <table>
+    <thead>
+      <tr>
+        <th>Failure</th>
+        <th>Golden</th>
+        <th>Diff</th>
+      </tr>
+    </thead>
+    <tbody style="vertical-align: top">
+      <tr onclick="toggleZoom()">
+        <td><img src="{{ failure_link }}" style="width: 100%" /></td>
+        {% if golden_link %}
+        <td><img src="{{ golden_link }}" style="width: 100%" /></td>
+        <td><img src="{{ diff_link }}" style="width: 100%" /></td>
+        {% else %}
+        <td>No Golden Image.</td>
+        {% endif %}
+      </tr>
+    </tbody>
+  </table>
+</body>
+</html>
diff --git a/src/build/android/pylib/instrumentation/test_result.py b/src/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000..a1c7307
--- /dev/null
+++ b/src/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+  """Result information for a single instrumentation test."""
+
+  def __init__(self, full_name, test_type, dur, log=''):
+    """Construct an InstrumentationTestResult object.
+
+    Args:
+      full_name: Full name of the test.
+      test_type: Type of the test result as defined in ResultType.
+      dur: Duration of the test run in milliseconds.
+      log: A string listing any errors.
+    """
+    super(InstrumentationTestResult, self).__init__(
+        full_name, test_type, dur, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self._test_name = name_pieces[1]
+      self._class_name = name_pieces[0]
+    else:
+      self._class_name = full_name
+      self._test_name = full_name
+
+  def SetDuration(self, duration):
+    """Set the test duration."""
+    self._duration = duration
diff --git a/src/build/android/pylib/junit/__init__.py b/src/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/junit/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/junit/junit_test_instance.py b/src/build/android/pylib/junit/junit_test_instance.py
new file mode 100644
index 0000000..a3d18e0
--- /dev/null
+++ b/src/build/android/pylib/junit/junit_test_instance.py
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.utils import test_filter
+
+
+class JunitTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, _):
+    super(JunitTestInstance, self).__init__()
+
+    self._coverage_dir = args.coverage_dir
+    self._debug_socket = args.debug_socket
+    self._coverage_on_the_fly = args.coverage_on_the_fly
+    self._package_filter = args.package_filter
+    self._resource_apk = args.resource_apk
+    self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir
+    self._runner_filter = args.runner_filter
+    self._shards = args.shards
+    self._test_filter = test_filter.InitializeFilterFromArgs(args)
+    self._test_suite = args.test_suite
+
+  #override
+  def TestType(self):
+    return 'junit'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
+
+  @property
+  def coverage_dir(self):
+    return self._coverage_dir
+
+  @property
+  def coverage_on_the_fly(self):
+    return self._coverage_on_the_fly
+
+  @property
+  def debug_socket(self):
+    return self._debug_socket
+
+  @property
+  def package_filter(self):
+    return self._package_filter
+
+  @property
+  def resource_apk(self):
+    return self._resource_apk
+
+  @property
+  def robolectric_runtime_deps_dir(self):
+    return self._robolectric_runtime_deps_dir
+
+  @property
+  def runner_filter(self):
+    return self._runner_filter
+
+  @property
+  def test_filter(self):
+    return self._test_filter
+
+  @property
+  def shards(self):
+    return self._shards
+
+  @property
+  def suite(self):
+    return self._test_suite
diff --git a/src/build/android/pylib/local/__init__.py b/src/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/device/__init__.py b/src/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/device/local_device_environment.py b/src/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000..d2a9077a
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,328 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import datetime
+import functools
+import logging
+import os
+import shutil
+import tempfile
+import threading
+
+import devil_chromium
+from devil import base_error
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import logcat_monitor
+from devil.android.sdk import adb_wrapper
+from devil.utils import file_utils
+from devil.utils import parallelizer
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import environment
+from pylib.utils import instrumentation_tracing
+from py_trace_event import trace_event
+
+
+LOGCAT_FILTERS = [
+  'chromium:v',
+  'cr_*:v',
+  'DEBUG:I',
+  'StrictMode:D',
+]
+
+
+def _DeviceCachePath(device):
+  file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+  return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def handle_shard_failures(f):
+  """A decorator that handles device failures for per-device functions.
+
+  Args:
+    f: the function being decorated. The function must take at least one
+      argument, and that argument must be the device.
+  """
+  return handle_shard_failures_with(None)(f)
+
+
+# TODO(jbudorick): Refactor this to work as a decorator or context manager.
+def handle_shard_failures_with(on_failure):
+  """A decorator that handles device failures for per-device functions.
+
+  This calls on_failure in the event of a failure.
+
+  Args:
+    f: the function being decorated. The function must take at least one
+      argument, and that argument must be the device.
+    on_failure: A binary function to call on failure.
+  """
+  def decorator(f):
+    @functools.wraps(f)
+    def wrapper(dev, *args, **kwargs):
+      try:
+        return f(dev, *args, **kwargs)
+      except device_errors.CommandTimeoutError:
+        logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev))
+      except device_errors.DeviceUnreachableError:
+        logging.exception('Shard died: %s(%s)', f.__name__, str(dev))
+      except base_error.BaseError:
+        logging.exception('Shard failed: %s(%s)', f.__name__, str(dev))
+      except SystemExit:
+        logging.exception('Shard killed: %s(%s)', f.__name__, str(dev))
+        raise
+      if on_failure:
+        on_failure(dev, f.__name__)
+      return None
+
+    return wrapper
+
+  return decorator
+
+
+def place_nomedia_on_device(dev, device_root):
+  """Places .nomedia file in test data root.
+
+  This helps to prevent system from scanning media files inside test data.
+
+  Args:
+    dev: Device to place .nomedia file.
+    device_root: Base path on device to place .nomedia file.
+  """
+
+  dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+  dev.WriteFile('%s/.nomedia' % device_root, 'https://crbug.com/796640')
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+  def __init__(self, args, output_manager, _error_func):
+    super(LocalDeviceEnvironment, self).__init__(output_manager)
+    self._current_try = 0
+    self._denylist = (device_denylist.Denylist(args.denylist_file)
+                      if args.denylist_file else None)
+    self._device_serials = args.test_devices
+    self._devices_lock = threading.Lock()
+    self._devices = None
+    self._concurrent_adb = args.enable_concurrent_adb
+    self._enable_device_cache = args.enable_device_cache
+    self._logcat_monitors = []
+    self._logcat_output_dir = args.logcat_output_dir
+    self._logcat_output_file = args.logcat_output_file
+    self._max_tries = 1 + args.num_retries
+    self._preferred_abis = None
+    self._recover_devices = args.recover_devices
+    self._skip_clear_data = args.skip_clear_data
+    self._tool_name = args.tool
+    self._trace_output = None
+    if hasattr(args, 'trace_output'):
+      self._trace_output = args.trace_output
+    self._trace_all = None
+    if hasattr(args, 'trace_all'):
+      self._trace_all = args.trace_all
+
+    devil_chromium.Initialize(
+        output_directory=constants.GetOutDirectory(),
+        adb_path=args.adb_path)
+
+    # Some things such as Forwarder require ADB to be in the environment path,
+    # while others like Devil's bundletool.py require Java on the path.
+    adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath())
+    if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+      os.environ['PATH'] = os.pathsep.join(
+          [adb_dir, host_paths.JAVA_PATH, os.environ['PATH']])
+
+  #override
+  def SetUp(self):
+    if self.trace_output and self._trace_all:
+      to_include = [r"pylib\..*", r"devil\..*", "__main__"]
+      to_exclude = ["logging"]
+      instrumentation_tracing.start_instrumenting(self.trace_output, to_include,
+                                                  to_exclude)
+    elif self.trace_output:
+      self.EnableTracing()
+
+  # Must be called before accessing |devices|.
+  def SetPreferredAbis(self, abis):
+    assert self._devices is None
+    self._preferred_abis = abis
+
+  def _InitDevices(self):
+    device_arg = []
+    if self._device_serials:
+      device_arg = self._device_serials
+
+    self._devices = device_utils.DeviceUtils.HealthyDevices(
+        self._denylist,
+        retries=5,
+        enable_usb_resets=True,
+        enable_device_files_cache=self._enable_device_cache,
+        default_retries=self._max_tries - 1,
+        device_arg=device_arg,
+        abis=self._preferred_abis)
+
+    if self._logcat_output_file:
+      self._logcat_output_dir = tempfile.mkdtemp()
+
+    @handle_shard_failures_with(on_failure=self.DenylistDevice)
+    def prepare_device(d):
+      d.WaitUntilFullyBooted()
+
+      if self._enable_device_cache:
+        cache_path = _DeviceCachePath(d)
+        if os.path.exists(cache_path):
+          logging.info('Using device cache: %s', cache_path)
+          with open(cache_path) as f:
+            d.LoadCacheData(f.read())
+          # Delete cached file so that any exceptions cause it to be cleared.
+          os.unlink(cache_path)
+
+      if self._logcat_output_dir:
+        logcat_file = os.path.join(
+            self._logcat_output_dir,
+            '%s_%s' % (d.adb.GetDeviceSerial(),
+                       datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S')))
+        monitor = logcat_monitor.LogcatMonitor(
+            d.adb, clear=True, output_file=logcat_file)
+        self._logcat_monitors.append(monitor)
+        monitor.Start()
+
+    self.parallel_devices.pMap(prepare_device)
+
+  @property
+  def current_try(self):
+    return self._current_try
+
+  def IncrementCurrentTry(self):
+    self._current_try += 1
+
+  def ResetCurrentTry(self):
+    self._current_try = 0
+
+  @property
+  def denylist(self):
+    return self._denylist
+
+  @property
+  def concurrent_adb(self):
+    return self._concurrent_adb
+
+  @property
+  def devices(self):
+    # Initialize lazily so that host-only tests do not fail when no devices are
+    # attached.
+    if self._devices is None:
+      self._InitDevices()
+    return self._devices
+
+  @property
+  def max_tries(self):
+    return self._max_tries
+
+  @property
+  def parallel_devices(self):
+    return parallelizer.SyncParallelizer(self.devices)
+
+  @property
+  def recover_devices(self):
+    return self._recover_devices
+
+  @property
+  def skip_clear_data(self):
+    return self._skip_clear_data
+
+  @property
+  def tool(self):
+    return self._tool_name
+
+  @property
+  def trace_output(self):
+    return self._trace_output
+
+  #override
+  def TearDown(self):
+    if self.trace_output and self._trace_all:
+      instrumentation_tracing.stop_instrumenting()
+    elif self.trace_output:
+      self.DisableTracing()
+
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    if not self._devices:
+      return
+
+    @handle_shard_failures_with(on_failure=self.DenylistDevice)
+    def tear_down_device(d):
+      # Write the cache even when not using it so that it will be ready the
+      # first time that it is enabled. Writing it every time is also necessary
+      # so that an invalid cache can be flushed just by disabling it for one
+      # run.
+      cache_path = _DeviceCachePath(d)
+      if os.path.exists(os.path.dirname(cache_path)):
+        with open(cache_path, 'w') as f:
+          f.write(d.DumpCacheData())
+          logging.info('Wrote device cache: %s', cache_path)
+      else:
+        logging.warning(
+            'Unable to write device cache as %s directory does not exist',
+            os.path.dirname(cache_path))
+
+    self.parallel_devices.pMap(tear_down_device)
+
+    for m in self._logcat_monitors:
+      try:
+        m.Stop()
+        m.Close()
+        _, temp_path = tempfile.mkstemp()
+        with open(m.output_file, 'r') as infile:
+          with open(temp_path, 'w') as outfile:
+            for line in infile:
+              outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line))
+        shutil.move(temp_path, m.output_file)
+      except base_error.BaseError:
+        logging.exception('Failed to stop logcat monitor for %s',
+                          m.adb.GetDeviceSerial())
+      except IOError:
+        logging.exception('Failed to locate logcat for device %s',
+                          m.adb.GetDeviceSerial())
+
+    if self._logcat_output_file:
+      file_utils.MergeFiles(
+          self._logcat_output_file,
+          [m.output_file for m in self._logcat_monitors
+           if os.path.exists(m.output_file)])
+      shutil.rmtree(self._logcat_output_dir)
+
+  def DenylistDevice(self, device, reason='local_device_failure'):
+    device_serial = device.adb.GetDeviceSerial()
+    if self._denylist:
+      self._denylist.Extend([device_serial], reason=reason)
+    with self._devices_lock:
+      self._devices = [d for d in self._devices if str(d) != device_serial]
+    logging.error('Device %s denylisted: %s', device_serial, reason)
+    if not self._devices:
+      raise device_errors.NoDevicesError(
+          'All devices were denylisted due to errors')
+
+  @staticmethod
+  def DisableTracing():
+    if not trace_event.trace_is_enabled():
+      logging.warning('Tracing is not running.')
+    else:
+      trace_event.trace_disable()
+
+  def EnableTracing(self):
+    if trace_event.trace_is_enabled():
+      logging.warning('Tracing is already running.')
+    else:
+      trace_event.trace_enable(self._trace_output)
diff --git a/src/build/android/pylib/local/device/local_device_gtest_run.py b/src/build/android/pylib/local/device/local_device_gtest_run.py
new file mode 100644
index 0000000..753556d
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_gtest_run.py
@@ -0,0 +1,891 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import collections
+import itertools
+import logging
+import math
+import os
+import posixpath
+import subprocess
+import shutil
+import time
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import logcat_monitor
+from devil.android import ports
+from devil.android.sdk import version_codes
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import google_storage_helper
+from pylib.utils import logdog_helper
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+_MAX_INLINE_FLAGS_LENGTH = 50  # Arbitrarily chosen.
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTest.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+    'org.chromium.native_test.NativeTest.CommandLineFlags')
+_EXTRA_COVERAGE_DEVICE_FILE = (
+    'org.chromium.native_test.NativeTest.CoverageDeviceFile')
+_EXTRA_STDOUT_FILE = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.StdoutFile')
+_EXTRA_TEST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.Test')
+_EXTRA_TEST_LIST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.TestList')
+
+_SECONDS_TO_NANOS = int(1e9)
+
+# Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the
+# host machine.
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+  'components_browsertests', 'content_unittests', 'content_browsertests',
+  'net_unittests', 'services_unittests', 'unit_tests'
+]
+
+# These are use for code coverage.
+_LLVM_PROFDATA_PATH = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                                   'llvm-build', 'Release+Asserts', 'bin',
+                                   'llvm-profdata')
+# Name of the file extension for profraw data files.
+_PROFRAW_FILE_EXTENSION = 'profraw'
+# Name of the file where profraw data files are merged.
+_MERGE_PROFDATA_FILE_NAME = 'coverage_merged.' + _PROFRAW_FILE_EXTENSION
+
+# No-op context manager. If we used Python 3, we could change this to
+# contextlib.ExitStack()
+class _NullContextManager(object):
+  def __enter__(self):
+    pass
+  def __exit__(self, *args):
+    pass
+
+
+def _GenerateSequentialFileNames(filename):
+  """Infinite generator of names: 'name.ext', 'name_1.ext', 'name_2.ext', ..."""
+  yield filename
+  base, ext = os.path.splitext(filename)
+  for i in itertools.count(1):
+    yield '%s_%d%s' % (base, i, ext)
+
+
+def _ExtractTestsFromFilter(gtest_filter):
+  """Returns the list of tests specified by the given filter.
+
+  Returns:
+    None if the device should be queried for the test list instead.
+  """
+  # Empty means all tests, - means exclude filter.
+  if not gtest_filter or '-' in gtest_filter:
+    return None
+
+  patterns = gtest_filter.split(':')
+  # For a single pattern, allow it even if it has a wildcard so long as the
+  # wildcard comes at the end and there is at least one . to prove the scope is
+  # not too large.
+  # This heuristic is not necessarily faster, but normally is.
+  if len(patterns) == 1 and patterns[0].endswith('*'):
+    no_suffix = patterns[0].rstrip('*')
+    if '*' not in no_suffix and '.' in no_suffix:
+      return patterns
+
+  if '*' in gtest_filter:
+    return None
+  return patterns
+
+
+def _GetDeviceTimeoutMultiplier():
+  # Emulated devices typically run 20-150x slower than real-time.
+  # Give a way to control this through the DEVICE_TIMEOUT_MULTIPLIER
+  # environment variable.
+  multiplier = os.getenv("DEVICE_TIMEOUT_MULTIPLIER")
+  if multiplier:
+    return int(multiplier)
+  return 1
+
+
+def _MergeCoverageFiles(coverage_dir, profdata_dir):
+  """Merge coverage data files.
+
+  Each instrumentation activity generates a separate profraw data file. This
+  merges all profraw files in profdata_dir into a single file in
+  coverage_dir. This happens after each test, rather than waiting until after
+  all tests are ran to reduce the memory footprint used by all the profraw
+  files.
+
+  Args:
+    coverage_dir: The path to the coverage directory.
+    profdata_dir: The directory where the profraw data file(s) are located.
+
+  Return:
+    None
+  """
+  # profdata_dir may not exist if pulling coverage files failed.
+  if not os.path.exists(profdata_dir):
+    logging.debug('Profraw directory does not exist.')
+    return
+
+  merge_file = os.path.join(coverage_dir, _MERGE_PROFDATA_FILE_NAME)
+  profraw_files = [
+      os.path.join(profdata_dir, f) for f in os.listdir(profdata_dir)
+      if f.endswith(_PROFRAW_FILE_EXTENSION)
+  ]
+
+  try:
+    logging.debug('Merging target profraw files into merged profraw file.')
+    subprocess_cmd = [
+        _LLVM_PROFDATA_PATH,
+        'merge',
+        '-o',
+        merge_file,
+        '-sparse=true',
+    ]
+    # Grow the merge file by merging it with itself and the new files.
+    if os.path.exists(merge_file):
+      subprocess_cmd.append(merge_file)
+    subprocess_cmd.extend(profraw_files)
+    output = subprocess.check_output(subprocess_cmd)
+    logging.debug('Merge output: %s', output)
+  except subprocess.CalledProcessError:
+    # Don't raise error as that will kill the test run. When code coverage
+    # generates a report, that will raise the error in the report generation.
+    logging.error(
+        'Failed to merge target profdata files to create merged profraw file.')
+
+  # Free up memory space on bot as all data is in the merge file.
+  for f in profraw_files:
+    os.remove(f)
+
+
+def _PullCoverageFiles(device, device_coverage_dir, output_dir):
+  """Pulls coverage files on device to host directory.
+
+  Args:
+    device: The working device.
+    device_coverage_dir: The directory to store coverage data on device.
+    output_dir: The output directory on host.
+  """
+  try:
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    device.PullFile(device_coverage_dir, output_dir)
+    if not os.listdir(os.path.join(output_dir, 'profraw')):
+      logging.warning('No coverage data was generated for this run')
+  except (OSError, base_error.BaseError) as e:
+    logging.warning('Failed to handle coverage data after tests: %s', e)
+  finally:
+    device.RemovePath(device_coverage_dir, force=True, recursive=True)
+
+
+def _GetDeviceCoverageDir(device):
+  """Gets the directory to generate coverage data on device.
+
+  Args:
+    device: The working device.
+
+  Returns:
+    The directory path on the device.
+  """
+  return posixpath.join(device.GetExternalStoragePath(), 'chrome', 'test',
+                        'coverage', 'profraw')
+
+
+def _GetLLVMProfilePath(device_coverage_dir, suite, coverage_index):
+  """Gets 'LLVM_PROFILE_FILE' environment variable path.
+
+  Dumping data to ONLY 1 file may cause warning and data overwrite in
+  browsertests, so that pattern "%2m" is used to expand to 2 raw profiles
+  at runtime.
+
+  Args:
+    device_coverage_dir: The directory to generate data on device.
+    suite: Test suite name.
+    coverage_index: The incremental index for this test suite.
+
+  Returns:
+    The path pattern for environment variable 'LLVM_PROFILE_FILE'.
+  """
+  return posixpath.join(device_coverage_dir,
+                        '_'.join([suite,
+                                  str(coverage_index), '%2m.profraw']))
+
+
+class _ApkDelegate(object):
+  def __init__(self, test_instance, tool):
+    self._activity = test_instance.activity
+    self._apk_helper = test_instance.apk_helper
+    self._test_apk_incremental_install_json = (
+        test_instance.test_apk_incremental_install_json)
+    self._package = test_instance.package
+    self._runner = test_instance.runner
+    self._permissions = test_instance.permissions
+    self._suite = test_instance.suite
+    self._component = '%s/%s' % (self._package, self._runner)
+    self._extras = test_instance.extras
+    self._wait_for_java_debugger = test_instance.wait_for_java_debugger
+    self._tool = tool
+    self._coverage_dir = test_instance.coverage_dir
+    self._coverage_index = 0
+
+  def GetTestDataRoot(self, device):
+    # pylint: disable=no-self-use
+    return posixpath.join(device.GetExternalStoragePath(),
+                          'chromium_tests_root')
+
+  def Install(self, device):
+    if self._test_apk_incremental_install_json:
+      installer.Install(device, self._test_apk_incremental_install_json,
+                        apk=self._apk_helper, permissions=self._permissions)
+    else:
+      device.Install(
+          self._apk_helper,
+          allow_downgrade=True,
+          reinstall=True,
+          permissions=self._permissions)
+
+  def ResultsDirectory(self, device):
+    return device.GetApplicationDataDirectory(self._package)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    extras = dict(self._extras)
+    device_api = device.build_version_sdk
+
+    if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+      device_coverage_dir = _GetDeviceCoverageDir(device)
+      extras[_EXTRA_COVERAGE_DEVICE_FILE] = _GetLLVMProfilePath(
+          device_coverage_dir, self._suite, self._coverage_index)
+      self._coverage_index += 1
+
+    if ('timeout' in kwargs
+        and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras):
+      # Make sure the instrumentation doesn't kill the test before the
+      # scripts do. The provided timeout value is in seconds, but the
+      # instrumentation deals with nanoseconds because that's how Android
+      # handles time.
+      extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int(
+          kwargs['timeout'] * _SECONDS_TO_NANOS)
+
+    # pylint: disable=redefined-variable-type
+    command_line_file = _NullContextManager()
+    if flags:
+      if len(flags) > _MAX_INLINE_FLAGS_LENGTH:
+        command_line_file = device_temp_file.DeviceTempFile(device.adb)
+        device.WriteFile(command_line_file.name, '_ %s' % flags)
+        extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+      else:
+        extras[_EXTRA_COMMAND_LINE_FLAGS] = flags
+
+    test_list_file = _NullContextManager()
+    if test:
+      if len(test) > 1:
+        test_list_file = device_temp_file.DeviceTempFile(device.adb)
+        device.WriteFile(test_list_file.name, '\n'.join(test))
+        extras[_EXTRA_TEST_LIST] = test_list_file.name
+      else:
+        extras[_EXTRA_TEST] = test[0]
+    # pylint: enable=redefined-variable-type
+
+    # We need to use GetAppWritablePath here instead of GetExternalStoragePath
+    # since we will not have yet applied legacy storage permission workarounds
+    # on R+.
+    stdout_file = device_temp_file.DeviceTempFile(
+        device.adb, dir=device.GetAppWritablePath(), suffix='.gtest_out')
+    extras[_EXTRA_STDOUT_FILE] = stdout_file.name
+
+    if self._wait_for_java_debugger:
+      cmd = ['am', 'set-debug-app', '-w', self._package]
+      device.RunShellCommand(cmd, check_return=True)
+      logging.warning('*' * 80)
+      logging.warning('Waiting for debugger to attach to process: %s',
+                      self._package)
+      logging.warning('*' * 80)
+
+    with command_line_file, test_list_file, stdout_file:
+      try:
+        device.StartInstrumentation(
+            self._component, extras=extras, raw=False, **kwargs)
+      except device_errors.CommandFailedError:
+        logging.exception('gtest shard failed.')
+      except device_errors.CommandTimeoutError:
+        logging.exception('gtest shard timed out.')
+      except device_errors.DeviceUnreachableError:
+        logging.exception('gtest shard device unreachable.')
+      except Exception:
+        device.ForceStop(self._package)
+        raise
+      finally:
+        if self._coverage_dir and device_api >= version_codes.LOLLIPOP:
+          if not os.path.isdir(self._coverage_dir):
+            os.makedirs(self._coverage_dir)
+        # TODO(crbug.com/1179004) Use _MergeCoverageFiles when llvm-profdata
+        # not found is fixed.
+          _PullCoverageFiles(
+              device, device_coverage_dir,
+              os.path.join(self._coverage_dir, str(self._coverage_index)))
+
+      return device.ReadFile(stdout_file.name).splitlines()
+
+  def PullAppFiles(self, device, files, directory):
+    device_dir = device.GetApplicationDataDirectory(self._package)
+    host_dir = os.path.join(directory, str(device))
+    for f in files:
+      device_file = posixpath.join(device_dir, f)
+      host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+      for host_file in _GenerateSequentialFileNames(host_file):
+        if not os.path.exists(host_file):
+          break
+      device.PullFile(device_file, host_file)
+
+  def Clear(self, device):
+    device.ClearApplicationState(self._package, permissions=self._permissions)
+
+
+class _ExeDelegate(object):
+
+  def __init__(self, tr, test_instance, tool):
+    self._host_dist_dir = test_instance.exe_dist_dir
+    self._exe_file_name = os.path.basename(
+        test_instance.exe_dist_dir)[:-len('__dist')]
+    self._device_dist_dir = posixpath.join(
+        constants.TEST_EXECUTABLE_DIR,
+        os.path.basename(test_instance.exe_dist_dir))
+    self._test_run = tr
+    self._tool = tool
+    self._suite = test_instance.suite
+    self._coverage_dir = test_instance.coverage_dir
+    self._coverage_index = 0
+
+  def GetTestDataRoot(self, device):
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return posixpath.join(constants.TEST_EXECUTABLE_DIR, 'chromium_tests_root')
+
+  def Install(self, device):
+    # TODO(jbudorick): Look into merging this with normal data deps pushing if
+    # executables become supported on nonlocal environments.
+    device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)],
+                            delete_device_stale=True)
+
+  def ResultsDirectory(self, device):
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return constants.TEST_EXECUTABLE_DIR
+
+  def Run(self, test, device, flags=None, **kwargs):
+    tool = self._test_run.GetTool(device).GetTestWrapper()
+    if tool:
+      cmd = [tool]
+    else:
+      cmd = []
+    cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name))
+
+    if test:
+      cmd.append('--gtest_filter=%s' % ':'.join(test))
+    if flags:
+      # TODO(agrieve): This won't work if multiple flags are passed.
+      cmd.append(flags)
+    cwd = constants.TEST_EXECUTABLE_DIR
+
+    env = {
+      'LD_LIBRARY_PATH': self._device_dist_dir
+    }
+
+    if self._coverage_dir:
+      device_coverage_dir = _GetDeviceCoverageDir(device)
+      env['LLVM_PROFILE_FILE'] = _GetLLVMProfilePath(
+          device_coverage_dir, self._suite, self._coverage_index)
+      self._coverage_index += 1
+
+    if self._tool != 'asan':
+      env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS
+
+    try:
+      gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      external = device.GetExternalStoragePath()
+      env['GCOV_PREFIX'] = '%s/gcov' % external
+      env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+    except (device_errors.CommandFailedError, KeyError):
+      pass
+
+    # Executable tests return a nonzero exit code on test failure, which is
+    # fine from the test runner's perspective; thus check_return=False.
+    output = device.RunShellCommand(
+        cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
+
+    if self._coverage_dir:
+      _PullCoverageFiles(
+          device, device_coverage_dir,
+          os.path.join(self._coverage_dir, str(self._coverage_index)))
+
+    return output
+
+  def PullAppFiles(self, device, files, directory):
+    pass
+
+  def Clear(self, device):
+    device.KillAll(self._exe_file_name,
+                   blocking=True,
+                   timeout=30 * _GetDeviceTimeoutMultiplier(),
+                   quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+  def __init__(self, env, test_instance):
+    assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+    assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+    super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+    if self._test_instance.apk_helper:
+      self._installed_packages = [
+          self._test_instance.apk_helper.GetPackageName()
+      ]
+
+    # pylint: disable=redefined-variable-type
+    if self._test_instance.apk:
+      self._delegate = _ApkDelegate(self._test_instance, env.tool)
+    elif self._test_instance.exe_dist_dir:
+      self._delegate = _ExeDelegate(self, self._test_instance, self._env.tool)
+    if self._test_instance.isolated_script_test_perf_output:
+      self._test_perf_output_filenames = _GenerateSequentialFileNames(
+          self._test_instance.isolated_script_test_perf_output)
+    else:
+      self._test_perf_output_filenames = itertools.repeat(None)
+    # pylint: enable=redefined-variable-type
+    self._crashes = set()
+    self._servers = collections.defaultdict(list)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    @local_device_environment.handle_shard_failures_with(
+        on_failure=self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_set_up(device, host_device_tuples):
+      def install_apk(dev):
+        # Install test APK.
+        self._delegate.Install(dev)
+
+      def push_test_data(dev):
+        # Push data dependencies.
+        device_root = self._delegate.GetTestDataRoot(dev)
+        host_device_tuples_substituted = [
+            (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+            for h, d in host_device_tuples]
+        local_device_environment.place_nomedia_on_device(dev, device_root)
+        dev.PushChangedFiles(
+            host_device_tuples_substituted,
+            delete_device_stale=True,
+            # Some gtest suites, e.g. unit_tests, have data dependencies that
+            # can take longer than the default timeout to push. See
+            # crbug.com/791632 for context.
+            timeout=600 * math.ceil(_GetDeviceTimeoutMultiplier() / 10))
+        if not host_device_tuples:
+          dev.RemovePath(device_root, force=True, recursive=True, rename=True)
+          dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+      def init_tool_and_start_servers(dev):
+        tool = self.GetTool(dev)
+        tool.CopyFiles(dev)
+        tool.SetupEnvironment()
+
+        try:
+          # See https://crbug.com/1030827.
+          # This is a hack that may break in the future. We're relying on the
+          # fact that adb doesn't use ipv6 for it's server, and so doesn't
+          # listen on ipv6, but ssh remote forwarding does. 5037 is the port
+          # number adb uses for its server.
+          if "[::1]:5037" in subprocess.check_output(
+              "ss -o state listening 'sport = 5037'", shell=True):
+            logging.error(
+                'Test Server cannot be started with a remote-forwarded adb '
+                'server. Continuing anyways, but some tests may fail.')
+            return
+        except subprocess.CalledProcessError:
+          pass
+
+        self._servers[str(dev)] = []
+        if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+          self._servers[str(dev)].append(
+              local_test_server_spawner.LocalTestServerSpawner(
+                  ports.AllocateTestServerPort(), dev, tool))
+
+        for s in self._servers[str(dev)]:
+          s.SetUp()
+
+      def bind_crash_handler(step, dev):
+        return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+      # Explicitly enable root to ensure that tests run under deterministic
+      # conditions. Without this explicit call, EnableRoot() is called from
+      # push_test_data() when PushChangedFiles() determines that it should use
+      # _PushChangedFilesZipped(), which is only most of the time.
+      # Root is required (amongst maybe other reasons) to pull the results file
+      # from the device, since it lives within the application's data directory
+      # (via GetApplicationDataDirectory()).
+      device.EnableRoot()
+
+      steps = [
+          bind_crash_handler(s, device)
+          for s in (install_apk, push_test_data, init_tool_and_start_servers)]
+      if self._env.concurrent_adb:
+        reraiser_thread.RunAsync(steps)
+      else:
+        for step in steps:
+          step()
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  #override
+  def _CreateShards(self, tests):
+    # _crashes are tests that might crash and make the tests in the same shard
+    # following the crashed testcase not run.
+    # Thus we need to create separate shards for each crashed testcase,
+    # so that other tests can be run.
+    device_count = len(self._env.devices)
+    shards = []
+
+    # Add shards with only one suspect testcase.
+    shards += [[crash] for crash in self._crashes if crash in tests]
+
+    # Delete suspect testcase from tests.
+    tests = [test for test in tests if not test in self._crashes]
+
+    max_shard_size = self._test_instance.test_launcher_batch_limit
+
+    shards.extend(self._PartitionTests(tests, device_count, max_shard_size))
+    return shards
+
+  #override
+  def _GetTests(self):
+    if self._test_instance.extract_test_list_from_filter:
+      # When the exact list of tests to run is given via command-line (e.g. when
+      # locally iterating on a specific test), skip querying the device (which
+      # takes ~3 seconds).
+      tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter)
+      if tests:
+        return tests
+
+    # Even when there's only one device, it still makes sense to retrieve the
+    # test list so that tests can be split up and run in batches rather than all
+    # at once (since test output is not streamed).
+    @local_device_environment.handle_shard_failures_with(
+        on_failure=self._env.DenylistDevice)
+    def list_tests(dev):
+      timeout = 30 * _GetDeviceTimeoutMultiplier()
+      retries = 1
+      if self._test_instance.wait_for_java_debugger:
+        timeout = None
+
+      flags = [
+          f for f in self._test_instance.flags
+          if f not in ['--wait-for-debugger', '--wait-for-java-debugger']
+      ]
+      flags.append('--gtest_list_tests')
+
+      # TODO(crbug.com/726880): Remove retries when no longer necessary.
+      for i in range(0, retries+1):
+        logging.info('flags:')
+        for f in flags:
+          logging.info('  %s', f)
+
+        with self._ArchiveLogcat(dev, 'list_tests'):
+          raw_test_list = crash_handler.RetryOnSystemCrash(
+              lambda d: self._delegate.Run(
+                  None, d, flags=' '.join(flags), timeout=timeout),
+              device=dev)
+
+        tests = gtest_test_instance.ParseGTestListTests(raw_test_list)
+        if not tests:
+          logging.info('No tests found. Output:')
+          for l in raw_test_list:
+            logging.info('  %s', l)
+          if i < retries:
+            logging.info('Retrying...')
+        else:
+          break
+      return tests
+
+    # Query all devices in case one fails.
+    test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+    # If all devices failed to list tests, raise an exception.
+    # Check that tl is not None and is not empty.
+    if all(not tl for tl in test_lists):
+      raise device_errors.CommandFailedError(
+          'Failed to list tests on any device')
+    tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+    tests = self._test_instance.FilterTests(tests)
+    tests = self._ApplyExternalSharding(
+        tests, self._test_instance.external_shard_index,
+        self._test_instance.total_external_shards)
+    return tests
+
+  def _UploadTestArtifacts(self, device, test_artifacts_dir):
+    # TODO(jbudorick): Reconcile this with the output manager once
+    # https://codereview.chromium.org/2933993002/ lands.
+    if test_artifacts_dir:
+      with tempfile_ext.NamedTemporaryDirectory() as test_artifacts_host_dir:
+        device.PullFile(test_artifacts_dir.name, test_artifacts_host_dir)
+        with tempfile_ext.NamedTemporaryDirectory() as temp_zip_dir:
+          zip_base_name = os.path.join(temp_zip_dir, 'test_artifacts')
+          test_artifacts_zip = shutil.make_archive(
+              zip_base_name, 'zip', test_artifacts_host_dir)
+          link = google_storage_helper.upload(
+              google_storage_helper.unique_name(
+                  'test_artifacts', device=device),
+              test_artifacts_zip,
+              bucket='%s/test_artifacts' % (
+                  self._test_instance.gs_test_artifacts_bucket))
+          logging.info('Uploading test artifacts to %s.', link)
+          return link
+    return None
+
+  def _PullRenderTestOutput(self, device, render_test_output_device_dir):
+    # We pull the render tests into a temp directory then copy them over
+    # individually. Otherwise we end up with a temporary directory name
+    # in the host output directory.
+    with tempfile_ext.NamedTemporaryDirectory() as tmp_host_dir:
+      try:
+        device.PullFile(render_test_output_device_dir, tmp_host_dir)
+      except device_errors.CommandFailedError:
+        logging.exception('Failed to pull render test output dir %s',
+                          render_test_output_device_dir)
+      temp_host_dir = os.path.join(
+          tmp_host_dir, os.path.basename(render_test_output_device_dir))
+      for output_file in os.listdir(temp_host_dir):
+        src_path = os.path.join(temp_host_dir, output_file)
+        dst_path = os.path.join(self._test_instance.render_test_output_dir,
+                                output_file)
+        shutil.move(src_path, dst_path)
+
+  @contextlib.contextmanager
+  def _ArchiveLogcat(self, device, test):
+    if isinstance(test, str):
+      desc = test
+    else:
+      desc = hash(tuple(test))
+
+    stream_name = 'logcat_%s_shard%s_%s_%s' % (
+        desc, self._test_instance.external_shard_index,
+        time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+
+    logcat_file = None
+    logmon = None
+    try:
+      with self._env.output_manager.ArchivedTempfile(stream_name,
+                                                     'logcat') as logcat_file:
+        with logcat_monitor.LogcatMonitor(
+            device.adb,
+            filter_specs=local_device_environment.LOGCAT_FILTERS,
+            output_file=logcat_file.name,
+            check_error=False) as logmon:
+          with contextlib_ext.Optional(trace_event.trace(str(test)),
+                                       self._env.trace_output):
+            yield logcat_file
+    finally:
+      if logmon:
+        logmon.Close()
+      if logcat_file and logcat_file.Link():
+        logging.info('Logcat saved to %s', logcat_file.Link())
+
+  #override
+  def _RunTest(self, device, test):
+    # Run the test.
+    timeout = (self._test_instance.shard_timeout *
+               self.GetTool(device).GetTimeoutScale() *
+               _GetDeviceTimeoutMultiplier())
+    if self._test_instance.wait_for_java_debugger:
+      timeout = None
+    if self._test_instance.store_tombstones:
+      tombstones.ClearAllTombstones(device)
+    test_perf_output_filename = next(self._test_perf_output_filenames)
+
+    if self._test_instance.isolated_script_test_output:
+      suffix = '.json'
+    else:
+      suffix = '.xml'
+
+    with device_temp_file.DeviceTempFile(
+        adb=device.adb,
+        dir=self._delegate.ResultsDirectory(device),
+        suffix=suffix) as device_tmp_results_file:
+      with contextlib_ext.Optional(
+          device_temp_file.NamedDeviceTemporaryDirectory(
+              adb=device.adb, dir='/sdcard/'),
+          self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir:
+        with (contextlib_ext.Optional(
+            device_temp_file.DeviceTempFile(
+                adb=device.adb, dir=self._delegate.ResultsDirectory(device)),
+            test_perf_output_filename)) as isolated_script_test_perf_output:
+          with contextlib_ext.Optional(
+              device_temp_file.NamedDeviceTemporaryDirectory(adb=device.adb,
+                                                             dir='/sdcard/'),
+              self._test_instance.render_test_output_dir
+          ) as render_test_output_dir:
+
+            flags = list(self._test_instance.flags)
+            if self._test_instance.enable_xml_result_parsing:
+              flags.append('--gtest_output=xml:%s' %
+                           device_tmp_results_file.name)
+
+            if self._test_instance.gs_test_artifacts_bucket:
+              flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name)
+
+            if self._test_instance.isolated_script_test_output:
+              flags.append('--isolated-script-test-output=%s' %
+                           device_tmp_results_file.name)
+
+            if test_perf_output_filename:
+              flags.append('--isolated_script_test_perf_output=%s' %
+                           isolated_script_test_perf_output.name)
+
+            if self._test_instance.render_test_output_dir:
+              flags.append('--render-test-output-dir=%s' %
+                           render_test_output_dir.name)
+
+            logging.info('flags:')
+            for f in flags:
+              logging.info('  %s', f)
+
+            with self._ArchiveLogcat(device, test) as logcat_file:
+              output = self._delegate.Run(test,
+                                          device,
+                                          flags=' '.join(flags),
+                                          timeout=timeout,
+                                          retries=0)
+
+            if self._test_instance.enable_xml_result_parsing:
+              try:
+                gtest_xml = device.ReadFile(device_tmp_results_file.name)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull gtest results XML file %s',
+                                  device_tmp_results_file.name)
+                gtest_xml = None
+
+            if self._test_instance.isolated_script_test_output:
+              try:
+                gtest_json = device.ReadFile(device_tmp_results_file.name)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull gtest results JSON file %s',
+                                  device_tmp_results_file.name)
+                gtest_json = None
+
+            if test_perf_output_filename:
+              try:
+                device.PullFile(isolated_script_test_perf_output.name,
+                                test_perf_output_filename)
+              except device_errors.CommandFailedError:
+                logging.exception('Failed to pull chartjson results %s',
+                                  isolated_script_test_perf_output.name)
+
+            test_artifacts_url = self._UploadTestArtifacts(
+                device, test_artifacts_dir)
+
+            if render_test_output_dir:
+              self._PullRenderTestOutput(device, render_test_output_dir.name)
+
+    for s in self._servers[str(device)]:
+      s.Reset()
+    if self._test_instance.app_files:
+      self._delegate.PullAppFiles(device, self._test_instance.app_files,
+                                  self._test_instance.app_file_dir)
+    if not self._env.skip_clear_data:
+      self._delegate.Clear(device)
+
+    for l in output:
+      logging.info(l)
+
+    # Parse the output.
+    # TODO(jbudorick): Transition test scripts away from parsing stdout.
+    if self._test_instance.enable_xml_result_parsing:
+      results = gtest_test_instance.ParseGTestXML(gtest_xml)
+    elif self._test_instance.isolated_script_test_output:
+      results = gtest_test_instance.ParseGTestJSON(gtest_json)
+    else:
+      results = gtest_test_instance.ParseGTestOutput(
+          output, self._test_instance.symbolizer, device.product_cpu_abi)
+
+    tombstones_url = None
+    for r in results:
+      if logcat_file:
+        r.SetLink('logcat', logcat_file.Link())
+
+      if self._test_instance.gs_test_artifacts_bucket:
+        r.SetLink('test_artifacts', test_artifacts_url)
+
+      if r.GetType() == base_test_result.ResultType.CRASH:
+        self._crashes.add(r.GetName())
+        if self._test_instance.store_tombstones:
+          if not tombstones_url:
+            resolved_tombstones = tombstones.ResolveTombstones(
+                device,
+                resolve_all_tombstones=True,
+                include_stack_symbols=False,
+                wipe_tombstones=True)
+            stream_name = 'tombstones_%s_%s' % (
+                time.strftime('%Y%m%dT%H%M%S', time.localtime()),
+                device.serial)
+            tombstones_url = logdog_helper.text(
+                stream_name, '\n'.join(resolved_tombstones))
+          r.SetLink('tombstones', tombstones_url)
+
+    tests_stripped_disabled_prefix = set()
+    for t in test:
+      tests_stripped_disabled_prefix.add(
+          gtest_test_instance.TestNameWithoutDisabledPrefix(t))
+    not_run_tests = tests_stripped_disabled_prefix.difference(
+        set(r.GetName() for r in results))
+    return results, list(not_run_tests) if results else None
+
+  #override
+  def TearDown(self):
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    @local_device_environment.handle_shard_failures
+    @trace_event.traced
+    def individual_device_tear_down(dev):
+      for s in self._servers.get(str(dev), []):
+        s.TearDown()
+
+      tool = self.GetTool(dev)
+      tool.CleanUpEnvironment()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
diff --git a/src/build/android/pylib/local/device/local_device_gtest_run_test.py b/src/build/android/pylib/local/device/local_device_gtest_run_test.py
new file mode 100755
index 0000000..b08b24b
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_gtest_run_test.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env vpython
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for local_device_gtest_test_run."""
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import os
+import tempfile
+import unittest
+
+from pylib.gtest import gtest_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from py_utils import tempfile_ext
+
+import mock  # pylint: disable=import-error
+
+
+class LocalDeviceGtestRunTest(unittest.TestCase):
+  def setUp(self):
+    self._obj = local_device_gtest_run.LocalDeviceGtestRun(
+        mock.MagicMock(spec=local_device_environment.LocalDeviceEnvironment),
+        mock.MagicMock(spec=gtest_test_instance.GtestTestInstance))
+
+  def testExtractTestsFromFilter(self):
+    # Checks splitting by colons.
+    self.assertEqual([
+        'b17',
+        'm4e3',
+        'p51',
+    ], local_device_gtest_run._ExtractTestsFromFilter('b17:m4e3:p51'))
+    # Checks the '-' sign.
+    self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('-mk2'))
+    # Checks the more than one asterick.
+    self.assertIsNone(
+        local_device_gtest_run._ExtractTestsFromFilter('.mk2*:.M67*'))
+    # Checks just an asterick without a period
+    self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilter('M67*'))
+    # Checks an asterick at the end with a period.
+    self.assertEqual(['.M67*'],
+                     local_device_gtest_run._ExtractTestsFromFilter('.M67*'))
+
+  def testGetLLVMProfilePath(self):
+    path = local_device_gtest_run._GetLLVMProfilePath('test_dir', 'sr71', '5')
+    self.assertEqual(path, os.path.join('test_dir', 'sr71_5_%2m.profraw'))
+
+  @mock.patch('subprocess.check_output')
+  def testMergeCoverageFiles(self, mock_sub):
+    with tempfile_ext.NamedTemporaryDirectory() as cov_tempd:
+      pro_tempd = os.path.join(cov_tempd, 'profraw')
+      os.mkdir(pro_tempd)
+      profdata = tempfile.NamedTemporaryFile(
+          dir=pro_tempd,
+          delete=False,
+          suffix=local_device_gtest_run._PROFRAW_FILE_EXTENSION)
+      local_device_gtest_run._MergeCoverageFiles(cov_tempd, pro_tempd)
+      # Merged file should be deleted.
+      self.assertFalse(os.path.exists(profdata.name))
+      self.assertTrue(mock_sub.called)
+
+  @mock.patch('pylib.utils.google_storage_helper.upload')
+  def testUploadTestArtifacts(self, mock_gsh):
+    link = self._obj._UploadTestArtifacts(mock.MagicMock(), None)
+    self.assertFalse(mock_gsh.called)
+    self.assertIsNone(link)
+
+    result = 'A/10/warthog/path'
+    mock_gsh.return_value = result
+    with tempfile_ext.NamedTemporaryFile() as temp_f:
+      link = self._obj._UploadTestArtifacts(mock.MagicMock(), temp_f)
+    self.assertTrue(mock_gsh.called)
+    self.assertEqual(result, link)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000..7f16d6a
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,1471 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import copy
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import time
+
+from devil import base_error
+from devil.android import apk_helper
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import flag_changer
+from devil.android.sdk import shared_prefs
+from devil.android import logcat_monitor
+from devil.android.tools import system_app
+from devil.android.tools import webview_app
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import output_manager
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.output import remote_output_manager
+from pylib.utils import chrome_proxy_utils
+from pylib.utils import gold_utils
+from pylib.utils import instrumentation_tracing
+from pylib.utils import shared_preference_utils
+from py_trace_event import trace_event
+from py_trace_event import trace_time
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+with host_paths.SysPath(
+    os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0):
+  import jinja2  # pylint: disable=import-error
+  import markupsafe  # pylint: disable=import-error,unused-import
+
+
+_JINJA_TEMPLATE_DIR = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation')
+_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja'
+
+_WPR_GO_LINUX_X86_64_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                         'third_party', 'webpagereplay', 'bin',
+                                         'linux', 'x86_64', 'wpr')
+
+_TAG = 'test_runner_py'
+
+TIMEOUT_ANNOTATIONS = [
+    ('Manual', 10 * 60 * 60),
+    ('IntegrationTest', 10 * 60),
+    ('External', 10 * 60),
+    ('EnormousTest', 5 * 60),
+    ('LargeTest', 2 * 60),
+    ('MediumTest', 30),
+    ('SmallTest', 10),
+]
+
+# Account for Instrumentation and process init overhead.
+FIXED_TEST_TIMEOUT_OVERHEAD = 60
+
+# 30 minute max timeout for an instrumentation invocation to avoid shard
+# timeouts when tests never finish. The shard timeout is currently 60 minutes,
+# so this needs to be less than that.
+MAX_BATCH_TEST_TIMEOUT = 30 * 60
+
+LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v', 'DEBUG:I',
+                  'StrictMode:D', '%s:I' % _TAG]
+
+EXTRA_SCREENSHOT_FILE = (
+    'org.chromium.base.test.ScreenshotOnFailureStatement.ScreenshotFile')
+
+EXTRA_UI_CAPTURE_DIR = (
+    'org.chromium.base.test.util.Screenshooter.ScreenshotDir')
+
+EXTRA_TRACE_FILE = ('org.chromium.base.test.BaseJUnit4ClassRunner.TraceFile')
+
+_EXTRA_TEST_LIST = (
+    'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList')
+
+_EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.'
+                             'ChromeUiApplicationTestRule.PackageUnderTest')
+
+FEATURE_ANNOTATION = 'Feature'
+RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest'
+WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory'
+WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest'
+
+_DEVICE_GOLD_DIR = 'skia_gold'
+# A map of Android product models to SDK ints.
+RENDER_TEST_MODEL_SDK_CONFIGS = {
+    # Android x86 emulator.
+    'Android SDK built for x86': [23],
+}
+
+_BATCH_SUFFIX = '_batch'
+_TEST_BATCH_MAX_GROUP_SIZE = 256
+
+
+@contextlib.contextmanager
+def _LogTestEndpoints(device, test_name):
+  device.RunShellCommand(
+      ['log', '-p', 'i', '-t', _TAG, 'START %s' % test_name],
+      check_return=True)
+  try:
+    yield
+  finally:
+    device.RunShellCommand(
+        ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name],
+        check_return=True)
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner
+# is deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+  # Dismiss any error dialogs. Limit the number in case we have an error
+  # loop or we are failing to dismiss.
+  try:
+    for _ in xrange(10):
+      package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1)
+      if not package:
+        return False
+      # Assume test package convention of ".test" suffix
+      if package in package_name:
+        return True
+  except device_errors.CommandFailedError:
+    logging.exception('Error while attempting to dismiss crash dialog.')
+  return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+    r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+def _GetTargetPackageName(test_apk):
+  # apk_under_test does not work for smoke tests, where it is set to an
+  # apk that is not listed as the targetPackage in the test apk's manifest.
+  return test_apk.GetAllInstrumentations()[0]['android:targetPackage']
+
+
+class LocalDeviceInstrumentationTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceInstrumentationTestRun, self).__init__(
+        env, test_instance)
+    self._chrome_proxy = None
+    self._context_managers = collections.defaultdict(list)
+    self._flag_changers = {}
+    self._render_tests_device_output_dir = None
+    self._shared_prefs_to_restore = []
+    self._skia_gold_session_manager = None
+    self._skia_gold_work_dir = None
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    target_package = _GetTargetPackageName(self._test_instance.test_apk)
+
+    @local_device_environment.handle_shard_failures_with(
+        self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_set_up(device, host_device_tuples):
+      steps = []
+
+      if self._test_instance.replace_system_package:
+        @trace_event.traced
+        def replace_package(dev):
+          # We need the context manager to be applied before modifying any
+          # shared preference files in case the replacement APK needs to be
+          # set up, and it needs to be applied while the test is running.
+          # Thus, it needs to be applied early during setup, but must still be
+          # applied during _RunTest, which isn't possible using 'with' without
+          # applying the context manager up in test_runner. Instead, we
+          # manually invoke its __enter__ and __exit__ methods in setup and
+          # teardown.
+          system_app_context = system_app.ReplaceSystemApp(
+              dev, self._test_instance.replace_system_package.package,
+              self._test_instance.replace_system_package.replacement_apk)
+          # Pylint is not smart enough to realize that this field has
+          # an __enter__ method, and will complain loudly.
+          # pylint: disable=no-member
+          system_app_context.__enter__()
+          # pylint: enable=no-member
+          self._context_managers[str(dev)].append(system_app_context)
+
+        steps.append(replace_package)
+
+      if self._test_instance.system_packages_to_remove:
+
+        @trace_event.traced
+        def remove_packages(dev):
+          logging.info('Attempting to remove system packages %s',
+                       self._test_instance.system_packages_to_remove)
+          system_app.RemoveSystemApps(
+              dev, self._test_instance.system_packages_to_remove)
+          logging.info('Done removing system packages')
+
+        # This should be at the front in case we're removing the package to make
+        # room for another APK installation later on. Since we disallow
+        # concurrent adb with this option specified, this should be safe.
+        steps.insert(0, remove_packages)
+
+      if self._test_instance.use_webview_provider:
+        @trace_event.traced
+        def use_webview_provider(dev):
+          # We need the context manager to be applied before modifying any
+          # shared preference files in case the replacement APK needs to be
+          # set up, and it needs to be applied while the test is running.
+          # Thus, it needs to be applied early during setup, but must still be
+          # applied during _RunTest, which isn't possible using 'with' without
+          # applying the context manager up in test_runner. Instead, we
+          # manually invoke its __enter__ and __exit__ methods in setup and
+          # teardown.
+          webview_context = webview_app.UseWebViewProvider(
+              dev, self._test_instance.use_webview_provider)
+          # Pylint is not smart enough to realize that this field has
+          # an __enter__ method, and will complain loudly.
+          # pylint: disable=no-member
+          webview_context.__enter__()
+          # pylint: enable=no-member
+          self._context_managers[str(dev)].append(webview_context)
+
+        steps.append(use_webview_provider)
+
+      def install_helper(apk,
+                         modules=None,
+                         fake_modules=None,
+                         permissions=None,
+                         additional_locales=None):
+
+        @instrumentation_tracing.no_tracing
+        @trace_event.traced
+        def install_helper_internal(d, apk_path=None):
+          # pylint: disable=unused-argument
+          d.Install(apk,
+                    modules=modules,
+                    fake_modules=fake_modules,
+                    permissions=permissions,
+                    additional_locales=additional_locales)
+
+        return install_helper_internal
+
+      def incremental_install_helper(apk, json_path, permissions):
+
+        @trace_event.traced
+        def incremental_install_helper_internal(d, apk_path=None):
+          # pylint: disable=unused-argument
+          installer.Install(d, json_path, apk=apk, permissions=permissions)
+        return incremental_install_helper_internal
+
+      permissions = self._test_instance.test_apk.GetPermissions()
+      if self._test_instance.test_apk_incremental_install_json:
+        steps.append(incremental_install_helper(
+                         self._test_instance.test_apk,
+                         self._test_instance.
+                             test_apk_incremental_install_json,
+                         permissions))
+      else:
+        steps.append(
+            install_helper(
+                self._test_instance.test_apk, permissions=permissions))
+
+      steps.extend(
+          install_helper(apk) for apk in self._test_instance.additional_apks)
+
+      # We'll potentially need the package names later for setting app
+      # compatibility workarounds.
+      for apk in (self._test_instance.additional_apks +
+                  [self._test_instance.test_apk]):
+        self._installed_packages.append(apk_helper.GetPackageName(apk))
+
+      # The apk under test needs to be installed last since installing other
+      # apks after will unintentionally clear the fake module directory.
+      # TODO(wnwen): Make this more robust, fix crbug.com/1010954.
+      if self._test_instance.apk_under_test:
+        self._installed_packages.append(
+            apk_helper.GetPackageName(self._test_instance.apk_under_test))
+        permissions = self._test_instance.apk_under_test.GetPermissions()
+        if self._test_instance.apk_under_test_incremental_install_json:
+          steps.append(
+              incremental_install_helper(
+                  self._test_instance.apk_under_test,
+                  self._test_instance.apk_under_test_incremental_install_json,
+                  permissions))
+        else:
+          steps.append(
+              install_helper(self._test_instance.apk_under_test,
+                             self._test_instance.modules,
+                             self._test_instance.fake_modules, permissions,
+                             self._test_instance.additional_locales))
+
+      @trace_event.traced
+      def set_debug_app(dev):
+        # Set debug app in order to enable reading command line flags on user
+        # builds
+        cmd = ['am', 'set-debug-app', '--persistent']
+        if self._test_instance.wait_for_java_debugger:
+          cmd.append('-w')
+        cmd.append(target_package)
+        dev.RunShellCommand(cmd, check_return=True)
+
+      @trace_event.traced
+      def edit_shared_prefs(dev):
+        for setting in self._test_instance.edit_shared_prefs:
+          shared_pref = shared_prefs.SharedPrefs(
+              dev, setting['package'], setting['filename'],
+              use_encrypted_path=setting.get('supports_encrypted_path', False))
+          pref_to_restore = copy.copy(shared_pref)
+          pref_to_restore.Load()
+          self._shared_prefs_to_restore.append(pref_to_restore)
+
+          shared_preference_utils.ApplySharedPreferenceSetting(
+              shared_pref, setting)
+
+      @trace_event.traced
+      def set_vega_permissions(dev):
+        # Normally, installation of VrCore automatically grants storage
+        # permissions. However, since VrCore is part of the system image on
+        # the Vega standalone headset, we don't install the APK as part of test
+        # setup. Instead, grant the permissions here so that it can take
+        # screenshots.
+        if dev.product_name == 'vega':
+          dev.GrantPermissions('com.google.vr.vrcore', [
+              'android.permission.WRITE_EXTERNAL_STORAGE',
+              'android.permission.READ_EXTERNAL_STORAGE'
+          ])
+
+      @instrumentation_tracing.no_tracing
+      def push_test_data(dev):
+        device_root = posixpath.join(dev.GetExternalStoragePath(),
+                                     'chromium_tests_root')
+        host_device_tuples_substituted = [
+            (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+            for h, d in host_device_tuples]
+        logging.info('Pushing data dependencies.')
+        for h, d in host_device_tuples_substituted:
+          logging.debug('  %r -> %r', h, d)
+        local_device_environment.place_nomedia_on_device(dev, device_root)
+        dev.PushChangedFiles(host_device_tuples_substituted,
+                             delete_device_stale=True)
+        if not host_device_tuples_substituted:
+          dev.RunShellCommand(['rm', '-rf', device_root], check_return=True)
+          dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+      @trace_event.traced
+      def create_flag_changer(dev):
+        if self._test_instance.flags:
+          self._CreateFlagChangerIfNeeded(dev)
+          logging.debug('Attempting to set flags: %r',
+                        self._test_instance.flags)
+          self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+        valgrind_tools.SetChromeTimeoutScale(
+            dev, self._test_instance.timeout_scale)
+
+      steps += [
+          set_debug_app, edit_shared_prefs, push_test_data, create_flag_changer,
+          set_vega_permissions
+      ]
+
+      def bind_crash_handler(step, dev):
+        return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+      steps = [bind_crash_handler(s, device) for s in steps]
+
+      try:
+        if self._env.concurrent_adb:
+          reraiser_thread.RunAsync(steps)
+        else:
+          for step in steps:
+            step()
+        if self._test_instance.store_tombstones:
+          tombstones.ClearAllTombstones(device)
+      except device_errors.CommandFailedError:
+        if not device.IsOnline():
+          raise
+
+        # A bugreport can be large and take a while to generate, so only capture
+        # one if we're using a remote manager.
+        if isinstance(
+            self._env.output_manager,
+            remote_output_manager.RemoteOutputManager):
+          logging.error(
+              'Error when setting up device for tests. Taking a bugreport for '
+              'investigation. This may take a while...')
+          report_name = '%s.bugreport' % device.serial
+          with self._env.output_manager.ArchivedTempfile(
+              report_name, 'bug_reports') as report_file:
+            device.TakeBugReport(report_file.name)
+          logging.error('Bug report saved to %s', report_file.Link())
+        raise
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+    # Created here instead of on a per-test basis so that the downloaded
+    # expectations can be re-used between tests, saving a significant amount
+    # of time.
+    self._skia_gold_work_dir = tempfile.mkdtemp()
+    self._skia_gold_session_manager = gold_utils.AndroidSkiaGoldSessionManager(
+        self._skia_gold_work_dir, self._test_instance.skia_gold_properties)
+    if self._test_instance.wait_for_java_debugger:
+      logging.warning('*' * 80)
+      logging.warning('Waiting for debugger to attach to process: %s',
+                      target_package)
+      logging.warning('*' * 80)
+
+  #override
+  def TearDown(self):
+    shutil.rmtree(self._skia_gold_work_dir)
+    self._skia_gold_work_dir = None
+    self._skia_gold_session_manager = None
+    # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+    # timeout, there's a high probability that ADB is non-responsive. In these
+    # cases, sending an ADB command will potentially take a long time to time
+    # out. Before this happens, the process will be hard-killed for not
+    # responding to SIGTERM fast enough.
+    if self._received_sigterm:
+      return
+
+    @local_device_environment.handle_shard_failures_with(
+        self._env.DenylistDevice)
+    @trace_event.traced
+    def individual_device_tear_down(dev):
+      if str(dev) in self._flag_changers:
+        self._flag_changers[str(dev)].Restore()
+
+      # Remove package-specific configuration
+      dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True)
+
+      valgrind_tools.SetChromeTimeoutScale(dev, None)
+
+      # Restore any shared preference files that we stored during setup.
+      # This should be run sometime before the replace package contextmanager
+      # gets exited so we don't have to special case restoring files of
+      # replaced system apps.
+      for pref_to_restore in self._shared_prefs_to_restore:
+        pref_to_restore.Commit(force_commit=True)
+
+      # Context manager exit handlers are applied in reverse order
+      # of the enter handlers.
+      for context in reversed(self._context_managers[str(dev)]):
+        # See pylint-related comment above with __enter__()
+        # pylint: disable=no-member
+        context.__exit__(*sys.exc_info())
+        # pylint: enable=no-member
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
+  def _CreateFlagChangerIfNeeded(self, device):
+    if str(device) not in self._flag_changers:
+      cmdline_file = 'test-cmdline-file'
+      if self._test_instance.use_apk_under_test_flags_file:
+        if self._test_instance.package_info:
+          cmdline_file = self._test_instance.package_info.cmdline_file
+        else:
+          raise Exception('No PackageInfo found but'
+                          '--use-apk-under-test-flags-file is specified.')
+      self._flag_changers[str(device)] = flag_changer.FlagChanger(
+          device, cmdline_file)
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _GetTests(self):
+    if self._test_instance.junit4_runner_supports_listing:
+      raw_tests = self._GetTestsFromRunner()
+      tests = self._test_instance.ProcessRawTests(raw_tests)
+    else:
+      tests = self._test_instance.GetTests()
+    tests = self._ApplyExternalSharding(
+        tests, self._test_instance.external_shard_index,
+        self._test_instance.total_external_shards)
+    return tests
+
+  #override
+  def _GroupTests(self, tests):
+    batched_tests = dict()
+    other_tests = []
+    for test in tests:
+      annotations = test['annotations']
+      if 'Batch' in annotations and 'RequiresRestart' not in annotations:
+        batch_name = annotations['Batch']['value']
+        if not batch_name:
+          batch_name = test['class']
+
+        # Feature flags won't work in instrumentation tests unless the activity
+        # is restarted.
+        # Tests with identical features are grouped to minimize restarts.
+        if 'Batch$SplitByFeature' in annotations:
+          if 'Features$EnableFeatures' in annotations:
+            batch_name += '|enabled:' + ','.join(
+                sorted(annotations['Features$EnableFeatures']['value']))
+          if 'Features$DisableFeatures' in annotations:
+            batch_name += '|disabled:' + ','.join(
+                sorted(annotations['Features$DisableFeatures']['value']))
+
+        if not batch_name in batched_tests:
+          batched_tests[batch_name] = []
+        batched_tests[batch_name].append(test)
+      else:
+        other_tests.append(test)
+
+    all_tests = []
+    for _, tests in batched_tests.items():
+      tests.sort()  # Ensure a consistent ordering across external shards.
+      all_tests.extend([
+          tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE]
+          for i in range(0, len(tests), _TEST_BATCH_MAX_GROUP_SIZE)
+      ])
+    all_tests.extend(other_tests)
+    return all_tests
+
+  #override
+  def _GetUniqueTestName(self, test):
+    return instrumentation_test_instance.GetUniqueTestName(test)
+
+  #override
+  def _RunTest(self, device, test):
+    extras = {}
+
+    # Provide package name under test for apk_under_test.
+    if self._test_instance.apk_under_test:
+      package_name = self._test_instance.apk_under_test.GetPackageName()
+      extras[_EXTRA_PACKAGE_UNDER_TEST] = package_name
+
+    flags_to_add = []
+    test_timeout_scale = None
+    if self._test_instance.coverage_directory:
+      coverage_basename = '%s' % ('%s_%s_group' %
+                                  (test[0]['class'], test[0]['method'])
+                                  if isinstance(test, list) else '%s_%s' %
+                                  (test['class'], test['method']))
+      if self._test_instance.jacoco_coverage_type:
+        coverage_basename += "_" + self._test_instance.jacoco_coverage_type
+      extras['coverage'] = 'true'
+      coverage_directory = os.path.join(
+          device.GetExternalStoragePath(), 'chrome', 'test', 'coverage')
+      if not device.PathExists(coverage_directory):
+        device.RunShellCommand(['mkdir', '-p', coverage_directory],
+                               check_return=True)
+      coverage_device_file = os.path.join(coverage_directory, coverage_basename)
+      coverage_device_file += '.exec'
+      extras['coverageFile'] = coverage_device_file
+    # Save screenshot if screenshot dir is specified (save locally) or if
+    # a GS bucket is passed (save in cloud).
+    screenshot_device_file = device_temp_file.DeviceTempFile(
+        device.adb, suffix='.png', dir=device.GetExternalStoragePath())
+    extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
+
+    # Set up the screenshot directory. This needs to be done for each test so
+    # that we only get screenshots created by that test. It has to be on
+    # external storage since the default location doesn't allow file creation
+    # from the instrumentation test app on Android L and M.
+    ui_capture_dir = device_temp_file.NamedDeviceTemporaryDirectory(
+        device.adb,
+        dir=device.GetExternalStoragePath())
+    extras[EXTRA_UI_CAPTURE_DIR] = ui_capture_dir.name
+
+    if self._env.trace_output:
+      trace_device_file = device_temp_file.DeviceTempFile(
+          device.adb, suffix='.json', dir=device.GetExternalStoragePath())
+      extras[EXTRA_TRACE_FILE] = trace_device_file.name
+
+    target = '%s/%s' % (self._test_instance.test_package,
+                        self._test_instance.junit4_runner_class)
+    if isinstance(test, list):
+
+      def name_and_timeout(t):
+        n = instrumentation_test_instance.GetTestName(t)
+        i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+        return (n, i)
+
+      test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+      test_name = instrumentation_test_instance.GetTestName(
+          test[0]) + _BATCH_SUFFIX
+      extras['class'] = ','.join(test_names)
+      test_display_name = test_name
+      timeout = min(MAX_BATCH_TEST_TIMEOUT,
+                    FIXED_TEST_TIMEOUT_OVERHEAD + sum(timeouts))
+    else:
+      assert test['is_junit4']
+      test_name = instrumentation_test_instance.GetTestName(test)
+      test_display_name = self._GetUniqueTestName(test)
+
+      extras['class'] = test_name
+      if 'flags' in test and test['flags']:
+        flags_to_add.extend(test['flags'])
+      timeout = FIXED_TEST_TIMEOUT_OVERHEAD + self._GetTimeoutFromAnnotations(
+          test['annotations'], test_display_name)
+
+      test_timeout_scale = self._GetTimeoutScaleFromAnnotations(
+          test['annotations'])
+      if test_timeout_scale and test_timeout_scale != 1:
+        valgrind_tools.SetChromeTimeoutScale(
+            device, test_timeout_scale * self._test_instance.timeout_scale)
+
+    if self._test_instance.wait_for_java_debugger:
+      timeout = None
+    logging.info('preparing to run %s: %s', test_display_name, test)
+
+    if _IsRenderTest(test):
+      # TODO(mikecase): Add DeviceTempDirectory class and use that instead.
+      self._render_tests_device_output_dir = posixpath.join(
+          device.GetExternalStoragePath(), 'render_test_output_dir')
+      flags_to_add.append('--render-test-output-dir=%s' %
+                          self._render_tests_device_output_dir)
+
+    if _IsWPRRecordReplayTest(test):
+      wpr_archive_relative_path = _GetWPRArchivePath(test)
+      if not wpr_archive_relative_path:
+        raise RuntimeError('Could not find the WPR archive file path '
+                           'from annotation.')
+      wpr_archive_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                      wpr_archive_relative_path)
+      if not os.path.isdir(wpr_archive_path):
+        raise RuntimeError('WPRArchiveDirectory annotation should point '
+                           'to a directory only. '
+                           '{0} exist: {1}'.format(
+                               wpr_archive_path,
+                               os.path.exists(wpr_archive_path)))
+
+      # Some linux version does not like # in the name. Replaces it with __.
+      archive_path = os.path.join(
+          wpr_archive_path,
+          _ReplaceUncommonChars(self._GetUniqueTestName(test)) + '.wprgo')
+
+      if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH):
+        # If we got to this stage, then we should have
+        # checkout_android set.
+        raise RuntimeError(
+            'WPR Go binary not found at {}'.format(_WPR_GO_LINUX_X86_64_PATH))
+      # Tells the server to use the binaries retrieved from CIPD.
+      chrome_proxy_utils.ChromeProxySession.SetWPRServerBinary(
+          _WPR_GO_LINUX_X86_64_PATH)
+      self._chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+      self._chrome_proxy.wpr_record_mode = self._test_instance.wpr_record_mode
+      self._chrome_proxy.Start(device, archive_path)
+      flags_to_add.extend(self._chrome_proxy.GetFlags())
+
+    if flags_to_add:
+      self._CreateFlagChangerIfNeeded(device)
+      self._flag_changers[str(device)].PushFlags(add=flags_to_add)
+
+    time_ms = lambda: int(time.time() * 1e3)
+    start_ms = time_ms()
+
+    with ui_capture_dir:
+      with self._ArchiveLogcat(device, test_name) as logcat_file:
+        output = device.StartInstrumentation(
+            target, raw=True, extras=extras, timeout=timeout, retries=0)
+
+      duration_ms = time_ms() - start_ms
+
+      with contextlib_ext.Optional(
+          trace_event.trace('ProcessResults'),
+          self._env.trace_output):
+        output = self._test_instance.MaybeDeobfuscateLines(output)
+        # TODO(jbudorick): Make instrumentation tests output a JSON so this
+        # doesn't have to parse the output.
+        result_code, result_bundle, statuses = (
+            self._test_instance.ParseAmInstrumentRawOutput(output))
+        results = self._test_instance.GenerateTestResults(
+            result_code, result_bundle, statuses, duration_ms,
+            device.product_cpu_abi, self._test_instance.symbolizer)
+
+      if self._env.trace_output:
+        self._SaveTraceData(trace_device_file, device, test['class'])
+
+
+      def restore_flags():
+        if flags_to_add:
+          self._flag_changers[str(device)].Restore()
+
+      def restore_timeout_scale():
+        if test_timeout_scale:
+          valgrind_tools.SetChromeTimeoutScale(
+              device, self._test_instance.timeout_scale)
+
+      def handle_coverage_data():
+        if self._test_instance.coverage_directory:
+          try:
+            if not os.path.exists(self._test_instance.coverage_directory):
+              os.makedirs(self._test_instance.coverage_directory)
+            device.PullFile(coverage_device_file,
+                            self._test_instance.coverage_directory)
+            device.RemovePath(coverage_device_file, True)
+          except (OSError, base_error.BaseError) as e:
+            logging.warning('Failed to handle coverage data after tests: %s', e)
+
+      def handle_render_test_data():
+        if _IsRenderTest(test):
+          # Render tests do not cause test failure by default. So we have to
+          # check to see if any failure images were generated even if the test
+          # does not fail.
+          try:
+            self._ProcessRenderTestResults(device, results)
+          finally:
+            device.RemovePath(self._render_tests_device_output_dir,
+                              recursive=True,
+                              force=True)
+            self._render_tests_device_output_dir = None
+
+      def pull_ui_screen_captures():
+        screenshots = []
+        for filename in device.ListDirectory(ui_capture_dir.name):
+          if filename.endswith('.json'):
+            screenshots.append(pull_ui_screenshot(filename))
+        if screenshots:
+          json_archive_name = 'ui_capture_%s_%s.json' % (
+              test_name.replace('#', '.'),
+              time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+          with self._env.output_manager.ArchivedTempfile(
+              json_archive_name, 'ui_capture', output_manager.Datatype.JSON
+              ) as json_archive:
+            json.dump(screenshots, json_archive)
+          _SetLinkOnResults(results, test_name, 'ui screenshot',
+                            json_archive.Link())
+
+      def pull_ui_screenshot(filename):
+        source_dir = ui_capture_dir.name
+        json_path = posixpath.join(source_dir, filename)
+        json_data = json.loads(device.ReadFile(json_path))
+        image_file_path = posixpath.join(source_dir, json_data['location'])
+        with self._env.output_manager.ArchivedTempfile(
+            json_data['location'], 'ui_capture', output_manager.Datatype.PNG
+            ) as image_archive:
+          device.PullFile(image_file_path, image_archive.name)
+        json_data['image_link'] = image_archive.Link()
+        return json_data
+
+      def stop_chrome_proxy():
+        # Removes the port forwarding
+        if self._chrome_proxy:
+          self._chrome_proxy.Stop(device)
+          if not self._chrome_proxy.wpr_replay_mode:
+            logging.info('WPR Record test generated archive file %s',
+                         self._chrome_proxy.wpr_archive_path)
+          self._chrome_proxy = None
+
+
+      # While constructing the TestResult objects, we can parallelize several
+      # steps that involve ADB. These steps should NOT depend on any info in
+      # the results! Things such as whether the test CRASHED have not yet been
+      # determined.
+      post_test_steps = [
+          restore_flags, restore_timeout_scale, stop_chrome_proxy,
+          handle_coverage_data, handle_render_test_data, pull_ui_screen_captures
+      ]
+      if self._env.concurrent_adb:
+        reraiser_thread.RunAsync(post_test_steps)
+      else:
+        for step in post_test_steps:
+          step()
+
+    if logcat_file:
+      _SetLinkOnResults(results, test_name, 'logcat', logcat_file.Link())
+
+    # Update the result name if the test used flags.
+    if flags_to_add:
+      for r in results:
+        if r.GetName() == test_name:
+          r.SetName(test_display_name)
+
+    # Add UNKNOWN results for any missing tests.
+    iterable_test = test if isinstance(test, list) else [test]
+    test_names = set(self._GetUniqueTestName(t) for t in iterable_test)
+    results_names = set(r.GetName() for r in results)
+    results.extend(
+        base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN)
+        for u in test_names.difference(results_names))
+
+    # Update the result type if we detect a crash.
+    try:
+      if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+        for r in results:
+          if r.GetType() == base_test_result.ResultType.UNKNOWN:
+            r.SetType(base_test_result.ResultType.CRASH)
+    except device_errors.CommandTimeoutError:
+      logging.warning('timed out when detecting/dismissing error dialogs')
+      # Attach screenshot to the test to help with debugging the dialog boxes.
+      self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+                           results, 'dialog_box_screenshot')
+
+    # The crash result can be set above or in
+    # InstrumentationTestRun.GenerateTestResults. If a test crashes,
+    # subprocesses such as the one used by EmbeddedTestServerRule can be left
+    # alive in a bad state, so kill them now.
+    for r in results:
+      if r.GetType() == base_test_result.ResultType.CRASH:
+        for apk in self._test_instance.additional_apks:
+          device.ForceStop(apk.GetPackageName())
+
+    # Handle failures by:
+    #   - optionally taking a screenshot
+    #   - logging the raw output at INFO level
+    #   - clearing the application state while persisting permissions
+    if any(r.GetType() not in (base_test_result.ResultType.PASS,
+                               base_test_result.ResultType.SKIP)
+           for r in results):
+      self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+                           results, 'post_test_screenshot')
+
+      logging.info('detected failure in %s. raw output:', test_display_name)
+      for l in output:
+        logging.info('  %s', l)
+      if (not self._env.skip_clear_data
+          and self._test_instance.package_info):
+        permissions = (
+            self._test_instance.apk_under_test.GetPermissions()
+            if self._test_instance.apk_under_test
+            else None)
+        device.ClearApplicationState(self._test_instance.package_info.package,
+                                     permissions=permissions)
+    else:
+      logging.debug('raw output from %s:', test_display_name)
+      for l in output:
+        logging.debug('  %s', l)
+
+    if self._test_instance.store_tombstones:
+      resolved_tombstones = tombstones.ResolveTombstones(
+          device,
+          resolve_all_tombstones=True,
+          include_stack_symbols=False,
+          wipe_tombstones=True,
+          tombstone_symbolizer=self._test_instance.symbolizer)
+      if resolved_tombstones:
+        tombstone_filename = 'tombstones_%s_%s' % (time.strftime(
+            '%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+        with self._env.output_manager.ArchivedTempfile(
+            tombstone_filename, 'tombstones') as tombstone_file:
+          tombstone_file.write('\n'.join(resolved_tombstones))
+
+        # Associate tombstones with first crashing test.
+        for result in results:
+          if result.GetType() == base_test_result.ResultType.CRASH:
+            result.SetLink('tombstones', tombstone_file.Link())
+            break
+        else:
+          # We don't always detect crashes correctly. In this case,
+          # associate with the first test.
+          results[0].SetLink('tombstones', tombstone_file.Link())
+
+    unknown_tests = set(r.GetName() for r in results
+                        if r.GetType() == base_test_result.ResultType.UNKNOWN)
+
+    # If a test that is batched crashes, the rest of the tests in that batch
+    # won't be ran and will have their status left as unknown in results,
+    # so rerun the tests. (see crbug/1127935)
+    # Need to "unbatch" the tests, so that on subsequent tries, the tests can
+    # get ran individually. This prevents an unrecognized crash from preventing
+    # the tests in the batch from being ran. Running the test as unbatched does
+    # not happen until a retry happens at the local_device_test_run/environment
+    # level.
+    tests_to_rerun = []
+    for t in iterable_test:
+      if self._GetUniqueTestName(t) in unknown_tests:
+        prior_attempts = t.get('run_attempts', 0)
+        t['run_attempts'] = prior_attempts + 1
+        # It's possible every test in the batch could crash, so need to
+        # try up to as many times as tests that there are.
+        if prior_attempts < len(results):
+          if t['annotations']:
+            t['annotations'].pop('Batch', None)
+          tests_to_rerun.append(t)
+
+    # If we have a crash that isn't recognized as a crash in a batch, the tests
+    # will be marked as unknown. Sometimes a test failure causes a crash, but
+    # the crash isn't recorded because the failure was detected first.
+    # When the UNKNOWN tests are reran while unbatched and pass,
+    # they'll have an UNKNOWN, PASS status, so will be improperly marked as
+    # flaky, so change status to NOTRUN and don't try rerunning. They will
+    # get rerun individually at the local_device_test_run/environment level.
+    # as the "Batch" annotation was removed.
+    found_crash_or_fail = False
+    for r in results:
+      if (r.GetType() == base_test_result.ResultType.CRASH
+          or r.GetType() == base_test_result.ResultType.FAIL):
+        found_crash_or_fail = True
+        break
+    if not found_crash_or_fail:
+      # Don't bother rerunning since the unrecognized crashes in
+      # the batch will keep failing.
+      tests_to_rerun = None
+      for r in results:
+        if r.GetType() == base_test_result.ResultType.UNKNOWN:
+          r.SetType(base_test_result.ResultType.NOTRUN)
+
+    return results, tests_to_rerun if tests_to_rerun else None
+
+  def _GetTestsFromRunner(self):
+    test_apk_path = self._test_instance.test_apk.path
+    pickle_path = '%s-runner.pickle' % test_apk_path
+    # For incremental APKs, the code doesn't live in the apk, so instead check
+    # the timestamp of the target's .stamp file.
+    if self._test_instance.test_apk_incremental_install_json:
+      with open(self._test_instance.test_apk_incremental_install_json) as f:
+        data = json.load(f)
+      out_dir = constants.GetOutDirectory()
+      test_mtime = max(
+          os.path.getmtime(os.path.join(out_dir, p)) for p in data['dex_files'])
+    else:
+      test_mtime = os.path.getmtime(test_apk_path)
+
+    try:
+      return instrumentation_test_instance.GetTestsFromPickle(
+          pickle_path, test_mtime)
+    except instrumentation_test_instance.TestListPickleException as e:
+      logging.info('Could not get tests from pickle: %s', e)
+    logging.info('Getting tests by having %s list them.',
+                 self._test_instance.junit4_runner_class)
+    def list_tests(d):
+      def _run(dev):
+        # We need to use GetAppWritablePath instead of GetExternalStoragePath
+        # here because we will not have applied legacy storage workarounds on R+
+        # yet.
+        with device_temp_file.DeviceTempFile(
+            dev.adb, suffix='.json',
+            dir=dev.GetAppWritablePath()) as dev_test_list_json:
+          junit4_runner_class = self._test_instance.junit4_runner_class
+          test_package = self._test_instance.test_package
+          extras = {
+            'log': 'true',
+            # Workaround for https://github.com/mockito/mockito/issues/922
+            'notPackage': 'net.bytebuddy',
+          }
+          extras[_EXTRA_TEST_LIST] = dev_test_list_json.name
+          target = '%s/%s' % (test_package, junit4_runner_class)
+          timeout = 240
+          if self._test_instance.wait_for_java_debugger:
+            timeout = None
+          with self._ArchiveLogcat(dev, 'list_tests'):
+            test_list_run_output = dev.StartInstrumentation(
+                target, extras=extras, retries=0, timeout=timeout)
+          if any(test_list_run_output):
+            logging.error('Unexpected output while listing tests:')
+            for line in test_list_run_output:
+              logging.error('  %s', line)
+          with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+            host_file = os.path.join(host_dir, 'list_tests.json')
+            dev.PullFile(dev_test_list_json.name, host_file)
+            with open(host_file, 'r') as host_file:
+              return json.load(host_file)
+
+      return crash_handler.RetryOnSystemCrash(_run, d)
+
+    raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+    # If all devices failed to list tests, raise an exception.
+    # Check that tl is not None and is not empty.
+    if all(not tl for tl in raw_test_lists):
+      raise device_errors.CommandFailedError(
+          'Failed to list tests on any device')
+
+    # Get the first viable list of raw tests
+    raw_tests = [tl for tl in raw_test_lists if tl][0]
+
+    instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests)
+    return raw_tests
+
+  @contextlib.contextmanager
+  def _ArchiveLogcat(self, device, test_name):
+    stream_name = 'logcat_%s_shard%s_%s_%s' % (
+        test_name.replace('#', '.'), self._test_instance.external_shard_index,
+        time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial)
+
+    logcat_file = None
+    logmon = None
+    try:
+      with self._env.output_manager.ArchivedTempfile(
+          stream_name, 'logcat') as logcat_file:
+        with logcat_monitor.LogcatMonitor(
+            device.adb,
+            filter_specs=local_device_environment.LOGCAT_FILTERS,
+            output_file=logcat_file.name,
+            transform_func=self._test_instance.MaybeDeobfuscateLines,
+            check_error=False) as logmon:
+          with _LogTestEndpoints(device, test_name):
+            with contextlib_ext.Optional(
+                trace_event.trace(test_name),
+                self._env.trace_output):
+              yield logcat_file
+    finally:
+      if logmon:
+        logmon.Close()
+      if logcat_file and logcat_file.Link():
+        logging.info('Logcat saved to %s', logcat_file.Link())
+
+  def _SaveTraceData(self, trace_device_file, device, test_class):
+    trace_host_file = self._env.trace_output
+
+    if device.FileExists(trace_device_file.name):
+      try:
+        java_trace_json = device.ReadFile(trace_device_file.name)
+      except IOError:
+        raise Exception('error pulling trace file from device')
+      finally:
+        trace_device_file.close()
+
+      process_name = '%s (device %s)' % (test_class, device.serial)
+      process_hash = int(hashlib.md5(process_name).hexdigest()[:6], 16)
+
+      java_trace = json.loads(java_trace_json)
+      java_trace.sort(key=lambda event: event['ts'])
+
+      get_date_command = 'echo $EPOCHREALTIME'
+      device_time = device.RunShellCommand(get_date_command, single_line=True)
+      device_time = float(device_time) * 1e6
+      system_time = trace_time.Now()
+      time_difference = system_time - device_time
+
+      threads_to_add = set()
+      for event in java_trace:
+        # Ensure thread ID and thread name will be linked in the metadata.
+        threads_to_add.add((event['tid'], event['name']))
+
+        event['pid'] = process_hash
+
+        # Adjust time stamp to align with Python trace times (from
+        # trace_time.Now()).
+        event['ts'] += time_difference
+
+      for tid, thread_name in threads_to_add:
+        thread_name_metadata = {'pid': process_hash, 'tid': tid,
+                                'ts': 0, 'ph': 'M', 'cat': '__metadata',
+                                'name': 'thread_name',
+                                'args': {'name': thread_name}}
+        java_trace.append(thread_name_metadata)
+
+      process_name_metadata = {'pid': process_hash, 'tid': 0, 'ts': 0,
+                               'ph': 'M', 'cat': '__metadata',
+                               'name': 'process_name',
+                               'args': {'name': process_name}}
+      java_trace.append(process_name_metadata)
+
+      java_trace_json = json.dumps(java_trace)
+      java_trace_json = java_trace_json.rstrip(' ]')
+
+      with open(trace_host_file, 'r') as host_handle:
+        host_contents = host_handle.readline()
+
+      if host_contents:
+        java_trace_json = ',%s' % java_trace_json.lstrip(' [')
+
+      with open(trace_host_file, 'a') as host_handle:
+        host_handle.write(java_trace_json)
+
+  def _SaveScreenshot(self, device, screenshot_device_file, test_name, results,
+                      link_name):
+    screenshot_filename = '%s-%s.png' % (
+        test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+    if device.FileExists(screenshot_device_file.name):
+      with self._env.output_manager.ArchivedTempfile(
+          screenshot_filename, 'screenshot',
+          output_manager.Datatype.PNG) as screenshot_host_file:
+        try:
+          device.PullFile(screenshot_device_file.name,
+                          screenshot_host_file.name)
+        finally:
+          screenshot_device_file.close()
+      _SetLinkOnResults(results, test_name, link_name,
+                        screenshot_host_file.Link())
+
+  def _ProcessRenderTestResults(self, device, results):
+    if not self._render_tests_device_output_dir:
+      return
+    self._ProcessSkiaGoldRenderTestResults(device, results)
+
+  def _ProcessSkiaGoldRenderTestResults(self, device, results):
+    gold_dir = posixpath.join(self._render_tests_device_output_dir,
+                              _DEVICE_GOLD_DIR)
+    if not device.FileExists(gold_dir):
+      return
+
+    gold_properties = self._test_instance.skia_gold_properties
+    with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+      use_luci = not (gold_properties.local_pixel_tests
+                      or gold_properties.no_luci_auth)
+
+      # Pull everything at once instead of pulling individually, as it's
+      # slightly faster since each command over adb has some overhead compared
+      # to doing the same thing locally.
+      host_dir = os.path.join(host_dir, _DEVICE_GOLD_DIR)
+      device.PullFile(gold_dir, host_dir)
+      for image_name in os.listdir(host_dir):
+        if not image_name.endswith('.png'):
+          continue
+
+        render_name = image_name[:-4]
+        json_name = render_name + '.json'
+        json_path = os.path.join(host_dir, json_name)
+        image_path = os.path.join(host_dir, image_name)
+        full_test_name = None
+        if not os.path.exists(json_path):
+          _FailTestIfNecessary(results, full_test_name)
+          _AppendToLog(
+              results, full_test_name,
+              'Unable to find corresponding JSON file for image %s '
+              'when doing Skia Gold comparison.' % image_name)
+          continue
+
+        # Add 'ignore': '1' if a comparison failure would not be surfaced, as
+        # that implies that we aren't actively maintaining baselines for the
+        # test. This helps prevent unrelated CLs from getting comments posted to
+        # them.
+        # Additionally, add the ignore if we're running on a trybot and this is
+        # not our final retry attempt in order to prevent unrelated CLs from
+        # getting spammed if a test is flaky.
+        should_rewrite = False
+        with open(json_path) as infile:
+          # All the key/value pairs in the JSON file are strings, so convert
+          # to a bool.
+          json_dict = json.load(infile)
+          fail_on_unsupported = json_dict.get('fail_on_unsupported_configs',
+                                              'false')
+          fail_on_unsupported = fail_on_unsupported.lower() == 'true'
+          # Grab the full test name so we can associate the comparison with a
+          # particular test, which is necessary if tests are batched together.
+          # Remove the key/value pair from the JSON since we don't need/want to
+          # upload it to Gold.
+          full_test_name = json_dict.get('full_test_name')
+          if 'full_test_name' in json_dict:
+            should_rewrite = True
+            del json_dict['full_test_name']
+
+        running_on_unsupported = (
+            device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get(
+                device.product_model, []) and not fail_on_unsupported)
+        # TODO(skbug.com/10787): Remove the ignore on non-final retry once we
+        # fully switch over to using the Gerrit plugin for surfacing Gold
+        # information since it does not spam people with emails due to automated
+        # comments.
+        not_final_retry = self._env.current_try + 1 != self._env.max_tries
+        tryjob_but_not_final_retry =\
+            not_final_retry and gold_properties.IsTryjobRun()
+        should_ignore_in_gold =\
+            running_on_unsupported or tryjob_but_not_final_retry
+        # We still want to fail the test even if we're ignoring the image in
+        # Gold if we're running on a supported configuration, so
+        # should_ignore_in_gold != should_hide_failure.
+        should_hide_failure = running_on_unsupported
+        if should_ignore_in_gold:
+          should_rewrite = True
+          json_dict['ignore'] = '1'
+        if should_rewrite:
+          with open(json_path, 'w') as outfile:
+            json.dump(json_dict, outfile)
+
+        gold_session = self._skia_gold_session_manager.GetSkiaGoldSession(
+            keys_input=json_path)
+
+        try:
+          status, error = gold_session.RunComparison(
+              name=render_name,
+              png_file=image_path,
+              output_manager=self._env.output_manager,
+              use_luci=use_luci)
+        except Exception as e:  # pylint: disable=broad-except
+          _FailTestIfNecessary(results, full_test_name)
+          _AppendToLog(results, full_test_name,
+                       'Skia Gold comparison raised exception: %s' % e)
+          continue
+
+        if not status:
+          continue
+
+        # Don't fail the test if we ran on an unsupported configuration unless
+        # the test has explicitly opted in, as it's likely that baselines
+        # aren't maintained for that configuration.
+        if should_hide_failure:
+          if self._test_instance.skia_gold_properties.local_pixel_tests:
+            _AppendToLog(
+                results, full_test_name,
+                'Gold comparison for %s failed, but model %s with SDK '
+                '%d is not a supported configuration. This failure would be '
+                'ignored on the bots, but failing since tests are being run '
+                'locally.' %
+                (render_name, device.product_model, device.build_version_sdk))
+          else:
+            _AppendToLog(
+                results, full_test_name,
+                'Gold comparison for %s failed, but model %s with SDK '
+                '%d is not a supported configuration, so ignoring failure.' %
+                (render_name, device.product_model, device.build_version_sdk))
+            continue
+
+        _FailTestIfNecessary(results, full_test_name)
+        failure_log = (
+            'Skia Gold reported failure for RenderTest %s. See '
+            'RENDER_TESTS.md for how to fix this failure.' % render_name)
+        status_codes =\
+            self._skia_gold_session_manager.GetSessionClass().StatusCodes
+        if status == status_codes.AUTH_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Gold authentication failed with output %s' % error)
+        elif status == status_codes.INIT_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Gold initialization failed with output %s' % error)
+        elif status == status_codes.COMPARISON_FAILURE_REMOTE:
+          public_triage_link, internal_triage_link =\
+              gold_session.GetTriageLinks(render_name)
+          if not public_triage_link:
+            _AppendToLog(
+                results, full_test_name,
+                'Failed to get triage link for %s, raw output: %s' %
+                (render_name, error))
+            _AppendToLog(
+                results, full_test_name, 'Reason for no triage link: %s' %
+                gold_session.GetTriageLinkOmissionReason(render_name))
+            continue
+          if gold_properties.IsTryjobRun():
+            _SetLinkOnResults(results, full_test_name,
+                              'Public Skia Gold triage link for entire CL',
+                              public_triage_link)
+            _SetLinkOnResults(results, full_test_name,
+                              'Internal Skia Gold triage link for entire CL',
+                              internal_triage_link)
+          else:
+            _SetLinkOnResults(
+                results, full_test_name,
+                'Public Skia Gold triage link for %s' % render_name,
+                public_triage_link)
+            _SetLinkOnResults(
+                results, full_test_name,
+                'Internal Skia Gold triage link for %s' % render_name,
+                internal_triage_link)
+          _AppendToLog(results, full_test_name, failure_log)
+
+        elif status == status_codes.COMPARISON_FAILURE_LOCAL:
+          given_link = gold_session.GetGivenImageLink(render_name)
+          closest_link = gold_session.GetClosestImageLink(render_name)
+          diff_link = gold_session.GetDiffImageLink(render_name)
+
+          processed_template_output = _GenerateRenderTestHtml(
+              render_name, given_link, closest_link, diff_link)
+          with self._env.output_manager.ArchivedTempfile(
+              '%s.html' % render_name, 'gold_local_diffs',
+              output_manager.Datatype.HTML) as html_results:
+            html_results.write(processed_template_output)
+          _SetLinkOnResults(results, full_test_name, render_name,
+                            html_results.Link())
+          _AppendToLog(
+              results, full_test_name,
+              'See %s link for diff image with closest positive.' % render_name)
+        elif status == status_codes.LOCAL_DIFF_FAILURE:
+          _AppendToLog(results, full_test_name,
+                       'Failed to generate diffs from Gold: %s' % error)
+        else:
+          logging.error(
+              'Given unhandled SkiaGoldSession StatusCode %s with error %s',
+              status, error)
+
+  #override
+  def _ShouldRetry(self, test, result):
+    # We've tried to disable retries in the past with mixed results.
+    # See crbug.com/619055 for historical context and crbug.com/797002
+    # for ongoing efforts.
+    if 'Batch' in test['annotations'] and test['annotations']['Batch'][
+        'value'] == 'UnitTests':
+      return False
+    del test, result
+    return True
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  @classmethod
+  def _GetTimeoutScaleFromAnnotations(cls, annotations):
+    try:
+      return int(annotations.get('TimeoutScale', {}).get('value', 1))
+    except ValueError as e:
+      logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+      return 1
+
+  @classmethod
+  def _GetTimeoutFromAnnotations(cls, annotations, test_name):
+    for k, v in TIMEOUT_ANNOTATIONS:
+      if k in annotations:
+        timeout = v
+        break
+    else:
+      logging.warning('Using default 1 minute timeout for %s', test_name)
+      timeout = 60
+
+    timeout *= cls._GetTimeoutScaleFromAnnotations(annotations)
+
+    return timeout
+
+
+def _IsWPRRecordReplayTest(test):
+  """Determines whether a test or a list of tests is a WPR RecordReplay Test."""
+  if not isinstance(test, list):
+    test = [test]
+  return any([
+      WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+          FEATURE_ANNOTATION, {}).get('value', ()) for t in test
+  ])
+
+
+def _GetWPRArchivePath(test):
+  """Retrieves the archive path from the WPRArchiveDirectory annotation."""
+  return test['annotations'].get(WPR_ARCHIVE_FILE_PATH_ANNOTATION,
+                                 {}).get('value', ())
+
+
+def _ReplaceUncommonChars(original):
+  """Replaces uncommon characters with __."""
+  if not original:
+    raise ValueError('parameter should not be empty')
+
+  uncommon_chars = ['#']
+  for char in uncommon_chars:
+    original = original.replace(char, '__')
+  return original
+
+
+def _IsRenderTest(test):
+  """Determines if a test or list of tests has a RenderTest amongst them."""
+  if not isinstance(test, list):
+    test = [test]
+  return any([RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+              FEATURE_ANNOTATION, {}).get('value', ()) for t in test])
+
+
+def _GenerateRenderTestHtml(image_name, failure_link, golden_link, diff_link):
+  """Generates a RenderTest results page.
+
+  Displays the generated (failure) image, the golden image, and the diff
+  between them.
+
+  Args:
+    image_name: The name of the image whose comparison failed.
+    failure_link: The URL to the generated/failure image.
+    golden_link: The URL to the golden image.
+    diff_link: The URL to the diff image between the failure and golden images.
+
+  Returns:
+    A string containing the generated HTML.
+  """
+  jinja2_env = jinja2.Environment(
+      loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR), trim_blocks=True)
+  template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
+  # pylint: disable=no-member
+  return template.render(
+      test_name=image_name,
+      failure_link=failure_link,
+      golden_link=golden_link,
+      diff_link=diff_link)
+
+
+def _FailTestIfNecessary(results, full_test_name):
+  """Marks the given results as failed if it wasn't already.
+
+  Marks the result types as ResultType.FAIL unless they were already some sort
+  of failure type, e.g. ResultType.CRASH.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, failing all tests in the batch.',
+        full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    if result.GetType() not in [
+        base_test_result.ResultType.FAIL, base_test_result.ResultType.CRASH,
+        base_test_result.ResultType.TIMEOUT, base_test_result.ResultType.UNKNOWN
+    ]:
+      result.SetType(base_test_result.ResultType.FAIL)
+
+
+def _AppendToLog(results, full_test_name, line):
+  """Appends the given line to the end of the logs of the given results.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+    line: A string to be appended as a neww line to the log of |result|.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, appending to log of all tests '
+        'in the batch.', full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    result.SetLog(result.GetLog() + '\n' + line)
+
+
+def _SetLinkOnResults(results, full_test_name, link_name, link):
+  """Sets the given link on the given results.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.SomeTestClass#someTestMethod.
+    link_name: A string containing the name of the link being set.
+    link: A string containing the lkink being set.
+  """
+  found_matching_test = _MatchingTestInResults(results, full_test_name)
+  if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name):
+    logging.error(
+        'Could not find result specific to %s, adding link to results of all '
+        'tests in the batch.', full_test_name)
+  for result in results:
+    if found_matching_test and result.GetName() != full_test_name:
+      continue
+    result.SetLink(link_name, link)
+
+
+def _MatchingTestInResults(results, full_test_name):
+  """Checks if any tests named |full_test_name| are in |results|.
+
+  Args:
+    results: A list of base_test_result.BaseTestResult objects.
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.Some
+
+  Returns:
+    True if one of the results in |results| has the same name as
+    |full_test_name|, otherwise False.
+  """
+  return any([r for r in results if r.GetName() == full_test_name])
+
+
+def _ShouldReportNoMatchingResult(full_test_name):
+  """Determines whether a failure to find a matching result is actually bad.
+
+  Args:
+    full_test_name: A string containing the full name of the test, e.g.
+        org.chromium.chrome.Some
+
+  Returns:
+    False if the failure to find a matching result is expected and should not
+    be reported, otherwise True.
+  """
+  if full_test_name is not None and full_test_name.endswith(_BATCH_SUFFIX):
+    # Handle batched tests, whose reported name is the first test's name +
+    # "_batch".
+    return False
+  return True
diff --git a/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
new file mode 100755
index 0000000..7870cd1
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for local_device_instrumentation_test_run."""
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base import base_test_result
+from pylib.base import mock_environment
+from pylib.base import mock_test_instance
+from pylib.local.device import local_device_instrumentation_test_run
+
+
+class LocalDeviceInstrumentationTestRunTest(unittest.TestCase):
+
+  def setUp(self):
+    super(LocalDeviceInstrumentationTestRunTest, self).setUp()
+    self._env = mock_environment.MockEnvironment()
+    self._ti = mock_test_instance.MockTestInstance()
+    self._obj = (
+        local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun(
+            self._env, self._ti))
+
+  # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth
+  # retaining and remove these tests if not.
+
+  def testShouldRetry_failure(self):
+    test = {
+        'annotations': {},
+        'class': 'SadTest',
+        'method': 'testFailure',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testFailure', base_test_result.ResultType.FAIL)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testShouldRetry_retryOnFailure(self):
+    test = {
+        'annotations': {'RetryOnFailure': None},
+        'class': 'SadTest',
+        'method': 'testRetryOnFailure',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testRetryOnFailure', base_test_result.ResultType.FAIL)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testShouldRetry_notRun(self):
+    test = {
+        'annotations': {},
+        'class': 'SadTest',
+        'method': 'testNotRun',
+        'is_junit4': True,
+    }
+    result = base_test_result.BaseTestResult(
+        'SadTest.testNotRun', base_test_result.ResultType.NOTRUN)
+    self.assertTrue(self._obj._ShouldRetry(test, result))
+
+  def testIsWPRRecordReplayTest_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['WPRRecordReplayTest', 'dummy']
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertTrue(
+        local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+  def testIsWPRRecordReplayTest_noMatchedKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['abc', 'dummy']
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(
+        local_device_instrumentation_test_run._IsWPRRecordReplayTest(test))
+
+  def testGetWPRArchivePath_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'WPRArchiveDirectory': {
+                'value': 'abc'
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertEqual(
+        local_device_instrumentation_test_run._GetWPRArchivePath(test), 'abc')
+
+  def testGetWPRArchivePath_noMatchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': 'abc'
+            }
+        },
+        'class': 'WPRDummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(
+        local_device_instrumentation_test_run._GetWPRArchivePath(test))
+
+  def testIsRenderTest_matchedWithKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['RenderTest', 'dummy']
+            }
+        },
+        'class': 'DummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertTrue(local_device_instrumentation_test_run._IsRenderTest(test))
+
+  def testIsRenderTest_noMatchedKey(self):
+    test = {
+        'annotations': {
+            'Feature': {
+                'value': ['abc', 'dummy']
+            }
+        },
+        'class': 'DummyTest',
+        'method': 'testRun',
+        'is_junit4': True,
+    }
+    self.assertFalse(local_device_instrumentation_test_run._IsRenderTest(test))
+
+  def testReplaceUncommonChars(self):
+    original = 'abc#edf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abc__edf')
+    original = 'abc#edf#hhf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abc__edf__hhf')
+    original = 'abcedfhhf'
+    self.assertEqual(
+        local_device_instrumentation_test_run._ReplaceUncommonChars(original),
+        'abcedfhhf')
+    original = None
+    with self.assertRaises(ValueError):
+      local_device_instrumentation_test_run._ReplaceUncommonChars(original)
+    original = ''
+    with self.assertRaises(ValueError):
+      local_device_instrumentation_test_run._ReplaceUncommonChars(original)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/device/local_device_monkey_test_run.py b/src/build/android/pylib/local/device/local_device_monkey_test_run.py
new file mode 100644
index 0000000..f0d2339
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_monkey_test_run.py
@@ -0,0 +1,128 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceMonkeyTestRun, self).__init__(env, test_instance)
+
+  def TestPackage(self):
+    return 'monkey'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def _RunTest(self, device, test):
+    device.ClearApplicationState(self._test_instance.package)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Launch Chrome and verify Chrome has the same PID before and after
+    # the test.
+    device.StartActivity(
+        intent.Intent(package=self._test_instance.package,
+                      activity=self._test_instance.activity,
+                      action='android.intent.action.MAIN'),
+        blocking=True, force_stop=True)
+    before_pids = device.GetPids(self._test_instance.package)
+
+    output = ''
+    if before_pids:
+      if len(before_pids.get(self._test_instance.package, [])) > 1:
+        raise Exception(
+            'At most one instance of process %s expected but found pids: '
+            '%s' % (self._test_instance.package, before_pids))
+      output = '\n'.join(self._LaunchMonkeyTest(device))
+      after_pids = device.GetPids(self._test_instance.package)
+
+    crashed = True
+    if not self._test_instance.package in before_pids:
+      logging.error('Failed to start the process.')
+    elif not self._test_instance.package in after_pids:
+      logging.error('Process %s has died.',
+                    before_pids[self._test_instance.package])
+    elif (before_pids[self._test_instance.package] !=
+          after_pids[self._test_instance.package]):
+      logging.error('Detected process restart %s -> %s',
+                    before_pids[self._test_instance.package],
+                    after_pids[self._test_instance.package])
+    else:
+      crashed = False
+
+    success_pattern = 'Events injected: %d' % self._test_instance.event_count
+    if success_pattern in output and not crashed:
+      result = base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.PASS, log=output)
+    else:
+      result = base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.FAIL, log=output)
+      if 'chrome' in self._test_instance.package:
+        logging.warning('Starting MinidumpUploadService...')
+        # TODO(jbudorick): Update this after upstreaming.
+        minidump_intent = intent.Intent(
+            action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+            package=self._test_instance.package,
+            activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+        try:
+          device.RunShellCommand(
+              ['am', 'startservice'] + minidump_intent.am_args,
+              as_root=True, check_return=True)
+        except device_errors.CommandFailedError:
+          logging.exception('Failed to start MinidumpUploadService')
+
+    return result, None
+
+  #override
+  def TearDown(self):
+    pass
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _ShouldShard(self):
+    # TODO(mikecase): Run Monkey test concurrently on each attached device.
+    return False
+
+  #override
+  def _GetTests(self):
+    return ['MonkeyTest']
+
+  def _LaunchMonkeyTest(self, device):
+    try:
+      cmd = ['monkey',
+             '-p', self._test_instance.package,
+             '--throttle', str(self._test_instance.throttle),
+             '-s', str(self._test_instance.seed),
+             '--monitor-native-crashes',
+             '--kill-process-after-error']
+      for category in self._test_instance.categories:
+        cmd.extend(['-c', category])
+      for _ in range(self._test_instance.verbose_count):
+        cmd.append('-v')
+      cmd.append(str(self._test_instance.event_count))
+      return device.RunShellCommand(
+          cmd, timeout=self._test_instance.timeout, check_return=True)
+    finally:
+      try:
+        # Kill the monkey test process on the device. If you manually
+        # interrupt the test run, this will prevent the monkey test from
+        # continuing to run.
+        device.KillAll('com.android.commands.monkey')
+      except device_errors.CommandFailedError:
+        pass
diff --git a/src/build/android/pylib/local/device/local_device_test_run.py b/src/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000..6fa0af7
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,394 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import logging
+import posixpath
+import signal
+import thread
+import threading
+
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android.sdk import version_codes
+from devil.android.tools import device_recovery
+from devil.utils import signal_handler
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+from pylib.local.device import local_device_environment
+
+
+_SIGTERM_TEST_LOG = (
+  '  Suite execution terminated, probably due to swarming timeout.\n'
+  '  Your test may not have run.')
+
+
+def SubstituteDeviceRoot(device_path, device_root):
+  if not device_path:
+    return device_root
+  elif isinstance(device_path, list):
+    return posixpath.join(*(p if p else device_root for p in device_path))
+  else:
+    return device_path
+
+
+class TestsTerminated(Exception):
+  pass
+
+
+class InvalidShardingSettings(Exception):
+  def __init__(self, shard_index, total_shards):
+    super(InvalidShardingSettings, self).__init__(
+        'Invalid sharding settings. shard_index: %d total_shards: %d'
+            % (shard_index, total_shards))
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+  def __init__(self, env, test_instance):
+    super(LocalDeviceTestRun, self).__init__(env, test_instance)
+    self._tools = {}
+    # This is intended to be filled by a child class.
+    self._installed_packages = []
+    env.SetPreferredAbis(test_instance.GetPreferredAbis())
+
+  #override
+  def RunTests(self, results):
+    tests = self._GetTests()
+
+    exit_now = threading.Event()
+
+    @local_device_environment.handle_shard_failures
+    def run_tests_on_device(dev, tests, results):
+      # This is performed here instead of during setup because restarting the
+      # device clears app compatibility flags, which will happen if a device
+      # needs to be recovered.
+      SetAppCompatibilityFlagsIfNecessary(self._installed_packages, dev)
+      consecutive_device_errors = 0
+      for test in tests:
+        if not test:
+          logging.warning('No tests in shared. Continuing.')
+          tests.test_completed()
+          continue
+        if exit_now.isSet():
+          thread.exit()
+
+        result = None
+        rerun = None
+        try:
+          result, rerun = crash_handler.RetryOnSystemCrash(
+              lambda d, t=test: self._RunTest(d, t),
+              device=dev)
+          consecutive_device_errors = 0
+          if isinstance(result, base_test_result.BaseTestResult):
+            results.AddResult(result)
+          elif isinstance(result, list):
+            results.AddResults(result)
+          else:
+            raise Exception(
+                'Unexpected result type: %s' % type(result).__name__)
+        except device_errors.CommandTimeoutError:
+          # Test timeouts don't count as device errors for the purpose
+          # of bad device detection.
+          consecutive_device_errors = 0
+
+          if isinstance(test, list):
+            results.AddResults(
+                base_test_result.BaseTestResult(
+                    self._GetUniqueTestName(t),
+                    base_test_result.ResultType.TIMEOUT)
+                for t in test)
+          else:
+            results.AddResult(
+                base_test_result.BaseTestResult(
+                    self._GetUniqueTestName(test),
+                    base_test_result.ResultType.TIMEOUT))
+        except Exception as e:  # pylint: disable=broad-except
+          if isinstance(tests, test_collection.TestCollection):
+            rerun = test
+          if (isinstance(e, device_errors.DeviceUnreachableError)
+              or not isinstance(e, base_error.BaseError)):
+            # If we get a device error but believe the device is still
+            # reachable, attempt to continue using it. Otherwise, raise
+            # the exception and terminate this run_tests_on_device call.
+            raise
+
+          consecutive_device_errors += 1
+          if consecutive_device_errors >= 3:
+            # We believe the device is still reachable and may still be usable,
+            # but if it fails repeatedly, we shouldn't attempt to keep using
+            # it.
+            logging.error('Repeated failures on device %s. Abandoning.',
+                          str(dev))
+            raise
+
+          logging.exception(
+              'Attempting to continue using device %s despite failure (%d/3).',
+              str(dev), consecutive_device_errors)
+
+        finally:
+          if isinstance(tests, test_collection.TestCollection):
+            if rerun:
+              tests.add(rerun)
+            tests.test_completed()
+
+      logging.info('Finished running tests on this device.')
+
+    def stop_tests(_signum, _frame):
+      logging.critical('Received SIGTERM. Stopping test execution.')
+      exit_now.set()
+      raise TestsTerminated()
+
+    try:
+      with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
+        self._env.ResetCurrentTry()
+        while self._env.current_try < self._env.max_tries and tests:
+          tries = self._env.current_try
+          grouped_tests = self._GroupTests(tests)
+          logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
+          if tries > 0 and self._env.recover_devices:
+            if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1
+                   for d in self._env.devices):
+              logging.info(
+                  'Attempting to recover devices due to known issue on L MR1. '
+                  'See crbug.com/787056 for details.')
+              self._env.parallel_devices.pMap(
+                  device_recovery.RecoverDevice, None)
+            elif tries + 1 == self._env.max_tries:
+              logging.info(
+                  'Attempting to recover devices prior to last test attempt.')
+              self._env.parallel_devices.pMap(
+                  device_recovery.RecoverDevice, None)
+          logging.info('Will run %d tests on %d devices: %s',
+                       len(tests), len(self._env.devices),
+                       ', '.join(str(d) for d in self._env.devices))
+          for t in tests:
+            logging.debug('  %s', t)
+
+          try_results = base_test_result.TestRunResults()
+          test_names = (self._GetUniqueTestName(t) for t in tests)
+          try_results.AddResults(
+              base_test_result.BaseTestResult(
+                  t, base_test_result.ResultType.NOTRUN)
+              for t in test_names if not t.endswith('*'))
+
+          # As soon as we know the names of the tests, we populate |results|.
+          # The tests in try_results will have their results updated by
+          # try_results.AddResult() as they are run.
+          results.append(try_results)
+
+          try:
+            if self._ShouldShard():
+              tc = test_collection.TestCollection(
+                  self._CreateShards(grouped_tests))
+              self._env.parallel_devices.pMap(
+                  run_tests_on_device, tc, try_results).pGet(None)
+            else:
+              self._env.parallel_devices.pMap(run_tests_on_device,
+                                              grouped_tests,
+                                              try_results).pGet(None)
+          except TestsTerminated:
+            for unknown_result in try_results.GetUnknown():
+              try_results.AddResult(
+                  base_test_result.BaseTestResult(
+                      unknown_result.GetName(),
+                      base_test_result.ResultType.TIMEOUT,
+                      log=_SIGTERM_TEST_LOG))
+            raise
+
+          self._env.IncrementCurrentTry()
+          tests = self._GetTestsToRetry(tests, try_results)
+
+          logging.info('FINISHED TRY #%d/%d', tries + 1, self._env.max_tries)
+          if tests:
+            logging.info('%d failed tests remain.', len(tests))
+          else:
+            logging.info('All tests completed.')
+    except TestsTerminated:
+      pass
+
+  def _GetTestsToRetry(self, tests, try_results):
+
+    def is_failure_result(test_result):
+      if isinstance(test_result, list):
+        return any(is_failure_result(r) for r in test_result)
+      return (
+          test_result is None
+          or test_result.GetType() not in (
+              base_test_result.ResultType.PASS,
+              base_test_result.ResultType.SKIP))
+
+    all_test_results = {r.GetName(): r for r in try_results.GetAll()}
+
+    tests_and_names = ((t, self._GetUniqueTestName(t)) for t in tests)
+
+    tests_and_results = {}
+    for test, name in tests_and_names:
+      if name.endswith('*'):
+        tests_and_results[name] = (
+            test,
+            [r for n, r in all_test_results.iteritems()
+             if fnmatch.fnmatch(n, name)])
+      else:
+        tests_and_results[name] = (test, all_test_results.get(name))
+
+    failed_tests_and_results = (
+        (test, result) for test, result in tests_and_results.itervalues()
+        if is_failure_result(result)
+    )
+
+    return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)]
+
+  def _ApplyExternalSharding(self, tests, shard_index, total_shards):
+    logging.info('Using external sharding settings. This is shard %d/%d',
+                 shard_index, total_shards)
+
+    if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
+      raise InvalidShardingSettings(shard_index, total_shards)
+
+    sharded_tests = []
+
+    # Group tests by tests that should run in the same test invocation - either
+    # unit tests or batched tests.
+    grouped_tests = self._GroupTests(tests)
+
+    # Partition grouped tests approximately evenly across shards.
+    partitioned_tests = self._PartitionTests(grouped_tests, total_shards,
+                                             float('inf'))
+    if len(partitioned_tests) <= shard_index:
+      return []
+    for t in partitioned_tests[shard_index]:
+      if isinstance(t, list):
+        sharded_tests.extend(t)
+      else:
+        sharded_tests.append(t)
+    return sharded_tests
+
+  # Partition tests evenly into |num_desired_partitions| partitions where
+  # possible. However, many constraints make partitioning perfectly impossible.
+  # If the max_partition_size isn't large enough, extra partitions may be
+  # created (infinite max size should always return precisely the desired
+  # number of partitions). Even if the |max_partition_size| is technically large
+  # enough to hold all of the tests in |num_desired_partitions|, we attempt to
+  # keep test order relatively stable to minimize flakes, so when tests are
+  # grouped (eg. batched tests), we cannot perfectly fill all paritions as that
+  # would require breaking up groups.
+  def _PartitionTests(self, tests, num_desired_partitions, max_partition_size):
+    # pylint: disable=no-self-use
+    partitions = []
+
+    # Sort by hash so we don't put all tests in a slow suite in the same
+    # partition.
+    tests = sorted(
+        tests,
+        key=lambda t: hash(
+            self._GetUniqueTestName(t[0] if isinstance(t, list) else t)))
+
+    def CountTestsIndividually(test):
+      if not isinstance(test, list):
+        return False
+      annotations = test[0]['annotations']
+      # UnitTests tests are really fast, so to balance shards better, count
+      # UnitTests Batches as single tests.
+      return ('Batch' not in annotations
+              or annotations['Batch']['value'] != 'UnitTests')
+
+    num_not_yet_allocated = sum(
+        [len(test) - 1 for test in tests if CountTestsIndividually(test)])
+    num_not_yet_allocated += len(tests)
+
+    # Fast linear partition approximation capped by max_partition_size. We
+    # cannot round-robin or otherwise re-order tests dynamically because we want
+    # test order to remain stable.
+    partition_size = min(num_not_yet_allocated // num_desired_partitions,
+                         max_partition_size)
+    partitions.append([])
+    last_partition_size = 0
+    for test in tests:
+      test_count = len(test) if CountTestsIndividually(test) else 1
+      num_not_yet_allocated -= test_count
+      # Make a new shard whenever we would overfill the previous one. However,
+      # if the size of the test group is larger than the max partition size on
+      # its own, just put the group in its own shard instead of splitting up the
+      # group.
+      if (last_partition_size + test_count > partition_size
+          and last_partition_size > 0):
+        num_desired_partitions -= 1
+        partitions.append([])
+        partitions[-1].append(test)
+        last_partition_size = test_count
+        if num_desired_partitions <= 0:
+          # Too many tests for number of partitions, just fill all partitions
+          # beyond num_desired_partitions.
+          partition_size = max_partition_size
+        else:
+          # Re-balance remaining partitions.
+          partition_size = min(num_not_yet_allocated // num_desired_partitions,
+                               max_partition_size)
+      else:
+        partitions[-1].append(test)
+        last_partition_size += test_count
+
+    if not partitions[-1]:
+      partitions.pop()
+    return partitions
+
+  def GetTool(self, device):
+    if str(device) not in self._tools:
+      self._tools[str(device)] = valgrind_tools.CreateTool(
+          self._env.tool, device)
+    return self._tools[str(device)]
+
+  def _CreateShards(self, tests):
+    raise NotImplementedError
+
+  def _GetUniqueTestName(self, test):
+    # pylint: disable=no-self-use
+    return test
+
+  def _ShouldRetry(self, test, result):
+    # pylint: disable=no-self-use,unused-argument
+    return True
+
+  def _GetTests(self):
+    raise NotImplementedError
+
+  def _GroupTests(self, tests):
+    # pylint: disable=no-self-use
+    return tests
+
+  def _RunTest(self, device, test):
+    raise NotImplementedError
+
+  def _ShouldShard(self):
+    raise NotImplementedError
+
+
+def SetAppCompatibilityFlagsIfNecessary(packages, device):
+  """Sets app compatibility flags on the given packages and device.
+
+  Args:
+    packages: A list of strings containing package names to apply flags to.
+    device: A DeviceUtils instance to apply the flags on.
+  """
+
+  def set_flag_for_packages(flag, enable):
+    enable_str = 'enable' if enable else 'disable'
+    for p in packages:
+      cmd = ['am', 'compat', enable_str, flag, p]
+      device.RunShellCommand(cmd)
+
+  sdk_version = device.build_version_sdk
+  if sdk_version >= version_codes.R:
+    # These flags are necessary to use the legacy storage permissions on R+.
+    # See crbug.com/1173699 for more information.
+    set_flag_for_packages('DEFAULT_SCOPED_STORAGE', False)
+    set_flag_for_packages('FORCE_ENABLE_SCOPED_STORAGE', False)
+
+
+class NoTestsError(Exception):
+  """Error for when no tests are found."""
diff --git a/src/build/android/pylib/local/device/local_device_test_run_test.py b/src/build/android/pylib/local/device/local_device_test_run_test.py
new file mode 100755
index 0000000..77bbc2e
--- /dev/null
+++ b/src/build/android/pylib/local/device/local_device_test_run_test.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import unittest
+
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+import mock  # pylint: disable=import-error
+
+
+class SubstituteDeviceRootTest(unittest.TestCase):
+
+  def testNoneDevicePath(self):
+    self.assertEquals(
+        '/fake/device/root',
+        local_device_test_run.SubstituteDeviceRoot(
+            None, '/fake/device/root'))
+
+  def testStringDevicePath(self):
+    self.assertEquals(
+        '/another/fake/device/path',
+        local_device_test_run.SubstituteDeviceRoot(
+            '/another/fake/device/path', '/fake/device/root'))
+
+  def testListWithNoneDevicePath(self):
+    self.assertEquals(
+        '/fake/device/root/subpath',
+        local_device_test_run.SubstituteDeviceRoot(
+            [None, 'subpath'], '/fake/device/root'))
+
+  def testListWithoutNoneDevicePath(self):
+    self.assertEquals(
+        '/another/fake/device/path',
+        local_device_test_run.SubstituteDeviceRoot(
+            ['/', 'another', 'fake', 'device', 'path'],
+            '/fake/device/root'))
+
+
+class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun):
+
+  # pylint: disable=abstract-method
+
+  def __init__(self):
+    super(TestLocalDeviceTestRun, self).__init__(
+        mock.MagicMock(), mock.MagicMock())
+
+
+class TestLocalDeviceNonStringTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+
+  # pylint: disable=abstract-method
+
+  def __init__(self):
+    super(TestLocalDeviceNonStringTestRun, self).__init__(
+        mock.MagicMock(), mock.MagicMock())
+
+  def _GetUniqueTestName(self, test):
+    return test['name']
+
+
+class LocalDeviceTestRunTest(unittest.TestCase):
+
+  def testGetTestsToRetry_allTestsPassed(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(0, len(tests_to_retry))
+
+  def testGetTestsToRetry_testFailed(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test1', base_test_result.ResultType.FAIL),
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('Test1', tests_to_retry)
+
+  def testGetTestsToRetry_testUnknown(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = ['Test1'] + [r.GetName() for r in results]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('Test1', tests_to_retry)
+
+  def testGetTestsToRetry_wildcardFilter_allPass(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.PASS),
+    ]
+
+    tests = ['TestCase.*']
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(0, len(tests_to_retry))
+
+  def testGetTestsToRetry_wildcardFilter_oneFails(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.FAIL),
+    ]
+
+    tests = ['TestCase.*']
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIn('TestCase.*', tests_to_retry)
+
+  def testGetTestsToRetry_nonStringTests(self):
+    results = [
+        base_test_result.BaseTestResult(
+            'TestCase.Test1', base_test_result.ResultType.PASS),
+        base_test_result.BaseTestResult(
+            'TestCase.Test2', base_test_result.ResultType.FAIL),
+    ]
+
+    tests = [
+        {'name': 'TestCase.Test1'},
+        {'name': 'TestCase.Test2'},
+    ]
+    try_results = base_test_result.TestRunResults()
+    try_results.AddResults(results)
+
+    test_run = TestLocalDeviceNonStringTestRun()
+    tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+    self.assertEquals(1, len(tests_to_retry))
+    self.assertIsInstance(tests_to_retry[0], dict)
+    self.assertEquals(tests[1], tests_to_retry[0])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/local/emulator/__init__.py b/src/build/android/pylib/local/emulator/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/emulator/avd.py b/src/build/android/pylib/local/emulator/avd.py
new file mode 100644
index 0000000..51365eb
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/avd.py
@@ -0,0 +1,606 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import contextlib
+import json
+import logging
+import os
+import socket
+import stat
+import subprocess
+import threading
+
+from google.protobuf import text_format  # pylint: disable=import-error
+
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from devil.utils import cmd_helper
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+from pylib import constants
+from pylib.local.emulator import ini
+from pylib.local.emulator.proto import avd_pb2
+
+_ALL_PACKAGES = object()
+_DEFAULT_AVDMANAGER_PATH = os.path.join(
+    constants.ANDROID_SDK_ROOT, 'cmdline-tools', 'latest', 'bin', 'avdmanager')
+# Default to a 480dp mdpi screen (a relatively large phone).
+# See https://developer.android.com/training/multiscreen/screensizes
+# and https://developer.android.com/training/multiscreen/screendensities
+# for more information.
+_DEFAULT_SCREEN_DENSITY = 160
+_DEFAULT_SCREEN_HEIGHT = 960
+_DEFAULT_SCREEN_WIDTH = 480
+
+
+class AvdException(Exception):
+  """Raised when this module has a problem interacting with an AVD."""
+
+  def __init__(self, summary, command=None, stdout=None, stderr=None):
+    message_parts = [summary]
+    if command:
+      message_parts.append('  command: %s' % ' '.join(command))
+    if stdout:
+      message_parts.append('  stdout:')
+      message_parts.extend('    %s' % line for line in stdout.splitlines())
+    if stderr:
+      message_parts.append('  stderr:')
+      message_parts.extend('    %s' % line for line in stderr.splitlines())
+
+    super(AvdException, self).__init__('\n'.join(message_parts))
+
+
+def _Load(avd_proto_path):
+  """Loads an Avd proto from a textpb file at the given path.
+
+  Should not be called outside of this module.
+
+  Args:
+    avd_proto_path: path to a textpb file containing an Avd message.
+  """
+  with open(avd_proto_path) as avd_proto_file:
+    return text_format.Merge(avd_proto_file.read(), avd_pb2.Avd())
+
+
+class _AvdManagerAgent(object):
+  """Private utility for interacting with avdmanager."""
+
+  def __init__(self, avd_home, sdk_root):
+    """Create an _AvdManagerAgent.
+
+    Args:
+      avd_home: path to ANDROID_AVD_HOME directory.
+        Typically something like /path/to/dir/.android/avd
+      sdk_root: path to SDK root directory.
+    """
+    self._avd_home = avd_home
+    self._sdk_root = sdk_root
+
+    self._env = dict(os.environ)
+
+    # The avdmanager from cmdline-tools would look two levels
+    # up from toolsdir to find the SDK root.
+    # Pass avdmanager a fake directory under the directory in which
+    # we install the system images s.t. avdmanager can find the
+    # system images.
+    fake_tools_dir = os.path.join(self._sdk_root, 'non-existent-tools',
+                                  'non-existent-version')
+    self._env.update({
+        'ANDROID_AVD_HOME':
+        self._avd_home,
+        'AVDMANAGER_OPTS':
+        '-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir,
+    })
+
+  def Create(self, avd_name, system_image, force=False):
+    """Call `avdmanager create`.
+
+    Args:
+      avd_name: name of the AVD to create.
+      system_image: system image to use for the AVD.
+      force: whether to force creation, overwriting any existing
+        AVD with the same name.
+    """
+    create_cmd = [
+        _DEFAULT_AVDMANAGER_PATH,
+        '-v',
+        'create',
+        'avd',
+        '-n',
+        avd_name,
+        '-k',
+        system_image,
+    ]
+    if force:
+      create_cmd += ['--force']
+
+    create_proc = cmd_helper.Popen(
+        create_cmd,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        env=self._env)
+    output, error = create_proc.communicate(input='\n')
+    if create_proc.returncode != 0:
+      raise AvdException(
+          'AVD creation failed',
+          command=create_cmd,
+          stdout=output,
+          stderr=error)
+
+    for line in output.splitlines():
+      logging.info('  %s', line)
+
+  def Delete(self, avd_name):
+    """Call `avdmanager delete`.
+
+    Args:
+      avd_name: name of the AVD to delete.
+    """
+    delete_cmd = [
+        _DEFAULT_AVDMANAGER_PATH,
+        '-v',
+        'delete',
+        'avd',
+        '-n',
+        avd_name,
+    ]
+    try:
+      for line in cmd_helper.IterCmdOutputLines(delete_cmd, env=self._env):
+        logging.info('  %s', line)
+    except subprocess.CalledProcessError as e:
+      raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd)
+
+
+class AvdConfig(object):
+  """Represents a particular AVD configuration.
+
+  This class supports creation, installation, and execution of an AVD
+  from a given Avd proto message, as defined in
+  //build/android/pylib/local/emulator/proto/avd.proto.
+  """
+
+  def __init__(self, avd_proto_path):
+    """Create an AvdConfig object.
+
+    Args:
+      avd_proto_path: path to a textpb file containing an Avd message.
+    """
+    self._config = _Load(avd_proto_path)
+
+    self._emulator_home = os.path.join(constants.DIR_SOURCE_ROOT,
+                                       self._config.avd_package.dest_path)
+    self._emulator_sdk_root = os.path.join(
+        constants.DIR_SOURCE_ROOT, self._config.emulator_package.dest_path)
+    self._emulator_path = os.path.join(self._emulator_sdk_root, 'emulator',
+                                       'emulator')
+
+    self._initialized = False
+    self._initializer_lock = threading.Lock()
+
+  @property
+  def avd_settings(self):
+    return self._config.avd_settings
+
+  def Create(self,
+             force=False,
+             snapshot=False,
+             keep=False,
+             cipd_json_output=None,
+             dry_run=False):
+    """Create an instance of the AVD CIPD package.
+
+    This method:
+     - installs the requisite system image
+     - creates the AVD
+     - modifies the AVD's ini files to support running chromium tests
+       in chromium infrastructure
+     - optionally starts & stops the AVD for snapshotting (default no)
+     - By default creates and uploads an instance of the AVD CIPD package
+       (can be turned off by dry_run flag).
+     - optionally deletes the AVD (default yes)
+
+    Args:
+      force: bool indicating whether to force create the AVD.
+      snapshot: bool indicating whether to snapshot the AVD before creating
+        the CIPD package.
+      keep: bool indicating whether to keep the AVD after creating
+        the CIPD package.
+      cipd_json_output: string path to pass to `cipd create` via -json-output.
+      dry_run: When set to True, it will skip the CIPD package creation
+        after creating the AVD.
+    """
+    logging.info('Installing required packages.')
+    self._InstallCipdPackages(packages=[
+        self._config.emulator_package,
+        self._config.system_image_package,
+    ])
+
+    android_avd_home = os.path.join(self._emulator_home, 'avd')
+
+    if not os.path.exists(android_avd_home):
+      os.makedirs(android_avd_home)
+
+    avd_manager = _AvdManagerAgent(
+        avd_home=android_avd_home, sdk_root=self._emulator_sdk_root)
+
+    logging.info('Creating AVD.')
+    avd_manager.Create(
+        avd_name=self._config.avd_name,
+        system_image=self._config.system_image_name,
+        force=force)
+
+    try:
+      logging.info('Modifying AVD configuration.')
+
+      # Clear out any previous configuration or state from this AVD.
+      root_ini = os.path.join(android_avd_home,
+                              '%s.ini' % self._config.avd_name)
+      features_ini = os.path.join(self._emulator_home, 'advancedFeatures.ini')
+      avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)
+      config_ini = os.path.join(avd_dir, 'config.ini')
+
+      with ini.update_ini_file(root_ini) as root_ini_contents:
+        root_ini_contents['path.rel'] = 'avd/%s.avd' % self._config.avd_name
+
+      with ini.update_ini_file(features_ini) as features_ini_contents:
+        # features_ini file will not be refreshed by avdmanager during
+        # creation. So explicitly clear its content to exclude any leftover
+        # from previous creation.
+        features_ini_contents.clear()
+        features_ini_contents.update(self.avd_settings.advanced_features)
+
+      with ini.update_ini_file(config_ini) as config_ini_contents:
+        height = self.avd_settings.screen.height or _DEFAULT_SCREEN_HEIGHT
+        width = self.avd_settings.screen.width or _DEFAULT_SCREEN_WIDTH
+        density = self.avd_settings.screen.density or _DEFAULT_SCREEN_DENSITY
+
+        config_ini_contents.update({
+            'disk.dataPartition.size': '4G',
+            'hw.keyboard': 'yes',
+            'hw.lcd.density': density,
+            'hw.lcd.height': height,
+            'hw.lcd.width': width,
+        })
+
+        if self.avd_settings.ram_size:
+          config_ini_contents['hw.ramSize'] = self.avd_settings.ram_size
+
+      # Start & stop the AVD.
+      self._Initialize()
+      instance = _AvdInstance(self._emulator_path, self._emulator_home,
+                              self._config)
+      # Enable debug for snapshot when it is set to True
+      debug_tags = 'init,snapshot' if snapshot else None
+      instance.Start(
+          read_only=False, snapshot_save=snapshot, debug_tags=debug_tags)
+      # Android devices with full-disk encryption are encrypted on first boot,
+      # and then get decrypted to continue the boot process (See details in
+      # https://bit.ly/3agmjcM).
+      # Wait for this step to complete since it can take a while for old OSs
+      # like M, otherwise the avd may have "Encryption Unsuccessful" error.
+      device_utils.DeviceUtils(instance.serial).WaitUntilFullyBooted(
+          decrypt=True, timeout=180, retries=0)
+      instance.Stop()
+
+      # The multiinstance lock file seems to interfere with the emulator's
+      # operation in some circumstances (beyond the obvious -read-only ones),
+      # and there seems to be no mechanism by which it gets closed or deleted.
+      # See https://bit.ly/2pWQTH7 for context.
+      multiInstanceLockFile = os.path.join(avd_dir, 'multiinstance.lock')
+      if os.path.exists(multiInstanceLockFile):
+        os.unlink(multiInstanceLockFile)
+
+      package_def_content = {
+          'package':
+          self._config.avd_package.package_name,
+          'root':
+          self._emulator_home,
+          'install_mode':
+          'copy',
+          'data': [{
+              'dir': os.path.relpath(avd_dir, self._emulator_home)
+          }, {
+              'file': os.path.relpath(root_ini, self._emulator_home)
+          }, {
+              'file': os.path.relpath(features_ini, self._emulator_home)
+          }],
+      }
+
+      logging.info('Creating AVD CIPD package.')
+      logging.debug('ensure file content: %s',
+                    json.dumps(package_def_content, indent=2))
+
+      with tempfile_ext.TemporaryFileName(suffix='.json') as package_def_path:
+        with open(package_def_path, 'w') as package_def_file:
+          json.dump(package_def_content, package_def_file)
+
+        logging.info('  %s', self._config.avd_package.package_name)
+        cipd_create_cmd = [
+            'cipd',
+            'create',
+            '-pkg-def',
+            package_def_path,
+            '-tag',
+            'emulator_version:%s' % self._config.emulator_package.version,
+            '-tag',
+            'system_image_version:%s' %
+            self._config.system_image_package.version,
+        ]
+        if cipd_json_output:
+          cipd_create_cmd.extend([
+              '-json-output',
+              cipd_json_output,
+          ])
+        logging.info('running %r%s', cipd_create_cmd,
+                     ' (dry_run)' if dry_run else '')
+        if not dry_run:
+          try:
+            for line in cmd_helper.IterCmdOutputLines(cipd_create_cmd):
+              logging.info('    %s', line)
+          except subprocess.CalledProcessError as e:
+            raise AvdException(
+                'CIPD package creation failed: %s' % str(e),
+                command=cipd_create_cmd)
+
+    finally:
+      if not keep:
+        logging.info('Deleting AVD.')
+        avd_manager.Delete(avd_name=self._config.avd_name)
+
+  def Install(self, packages=_ALL_PACKAGES):
+    """Installs the requested CIPD packages and prepares them for use.
+
+    This includes making files writeable and revising some of the
+    emulator's internal config files.
+
+    Returns: None
+    Raises: AvdException on failure to install.
+    """
+    self._InstallCipdPackages(packages=packages)
+    self._MakeWriteable()
+    self._EditConfigs()
+
+  def _InstallCipdPackages(self, packages):
+    pkgs_by_dir = {}
+    if packages is _ALL_PACKAGES:
+      packages = [
+          self._config.avd_package,
+          self._config.emulator_package,
+          self._config.system_image_package,
+      ]
+    for pkg in packages:
+      if not pkg.dest_path in pkgs_by_dir:
+        pkgs_by_dir[pkg.dest_path] = []
+      pkgs_by_dir[pkg.dest_path].append(pkg)
+
+    for pkg_dir, pkgs in pkgs_by_dir.items():
+      logging.info('Installing packages in %s', pkg_dir)
+      cipd_root = os.path.join(constants.DIR_SOURCE_ROOT, pkg_dir)
+      if not os.path.exists(cipd_root):
+        os.makedirs(cipd_root)
+      ensure_path = os.path.join(cipd_root, '.ensure')
+      with open(ensure_path, 'w') as ensure_file:
+        # Make CIPD ensure that all files are present and correct,
+        # even if it thinks the package is installed.
+        ensure_file.write('$ParanoidMode CheckIntegrity\n\n')
+        for pkg in pkgs:
+          ensure_file.write('%s %s\n' % (pkg.package_name, pkg.version))
+          logging.info('  %s %s', pkg.package_name, pkg.version)
+      ensure_cmd = [
+          'cipd',
+          'ensure',
+          '-ensure-file',
+          ensure_path,
+          '-root',
+          cipd_root,
+      ]
+      try:
+        for line in cmd_helper.IterCmdOutputLines(ensure_cmd):
+          logging.info('    %s', line)
+      except subprocess.CalledProcessError as e:
+        raise AvdException(
+            'Failed to install CIPD package %s: %s' % (pkg.package_name,
+                                                       str(e)),
+            command=ensure_cmd)
+
+  def _MakeWriteable(self):
+    # The emulator requires that some files are writable.
+    for dirname, _, filenames in os.walk(self._emulator_home):
+      for f in filenames:
+        path = os.path.join(dirname, f)
+        mode = os.lstat(path).st_mode
+        if mode & stat.S_IRUSR:
+          mode = mode | stat.S_IWUSR
+        os.chmod(path, mode)
+
+  def _EditConfigs(self):
+    android_avd_home = os.path.join(self._emulator_home, 'avd')
+    avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)
+
+    config_path = os.path.join(avd_dir, 'config.ini')
+    if os.path.exists(config_path):
+      with open(config_path) as config_file:
+        config_contents = ini.load(config_file)
+    else:
+      config_contents = {}
+
+    config_contents['hw.sdCard'] = 'true'
+    if self.avd_settings.sdcard.size:
+      sdcard_path = os.path.join(avd_dir, 'cr-sdcard.img')
+      if not os.path.exists(sdcard_path):
+        mksdcard_path = os.path.join(
+            os.path.dirname(self._emulator_path), 'mksdcard')
+        mksdcard_cmd = [
+            mksdcard_path,
+            self.avd_settings.sdcard.size,
+            sdcard_path,
+        ]
+        cmd_helper.RunCmd(mksdcard_cmd)
+
+      config_contents['hw.sdCard.path'] = sdcard_path
+
+    with open(config_path, 'w') as config_file:
+      ini.dump(config_contents, config_file)
+
+  def _Initialize(self):
+    if self._initialized:
+      return
+
+    with self._initializer_lock:
+      if self._initialized:
+        return
+
+      # Emulator start-up looks for the adb daemon. Make sure it's running.
+      adb_wrapper.AdbWrapper.StartServer()
+
+      # Emulator start-up tries to check for the SDK root by looking for
+      # platforms/ and platform-tools/. Ensure they exist.
+      # See http://bit.ly/2YAkyFE for context.
+      required_dirs = [
+          os.path.join(self._emulator_sdk_root, 'platforms'),
+          os.path.join(self._emulator_sdk_root, 'platform-tools'),
+      ]
+      for d in required_dirs:
+        if not os.path.exists(d):
+          os.makedirs(d)
+
+  def CreateInstance(self):
+    """Creates an AVD instance without starting it.
+
+    Returns:
+      An _AvdInstance.
+    """
+    self._Initialize()
+    return _AvdInstance(self._emulator_path, self._emulator_home, self._config)
+
+  def StartInstance(self):
+    """Starts an AVD instance.
+
+    Returns:
+      An _AvdInstance.
+    """
+    instance = self.CreateInstance()
+    instance.Start()
+    return instance
+
+
+class _AvdInstance(object):
+  """Represents a single running instance of an AVD.
+
+  This class should only be created directly by AvdConfig.StartInstance,
+  but its other methods can be freely called.
+  """
+
+  def __init__(self, emulator_path, emulator_home, avd_config):
+    """Create an _AvdInstance object.
+
+    Args:
+      emulator_path: path to the emulator binary.
+      emulator_home: path to the emulator home directory.
+      avd_config: AVD config proto.
+    """
+    self._avd_config = avd_config
+    self._avd_name = avd_config.avd_name
+    self._emulator_home = emulator_home
+    self._emulator_path = emulator_path
+    self._emulator_proc = None
+    self._emulator_serial = None
+    self._sink = None
+
+  def __str__(self):
+    return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self)))
+
+  def Start(self,
+            read_only=True,
+            snapshot_save=False,
+            window=False,
+            writable_system=False,
+            debug_tags=None):
+    """Starts the emulator running an instance of the given AVD."""
+
+    with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing(
+        socket.socket(socket.AF_UNIX))) as sock:
+      sock.bind(socket_path)
+      emulator_cmd = [
+          self._emulator_path,
+          '-avd',
+          self._avd_name,
+          '-report-console',
+          'unix:%s' % socket_path,
+          '-no-boot-anim',
+          # Set the gpu mode to swiftshader_indirect otherwise the avd may exit
+          # with the error "change of render" under window mode
+          '-gpu',
+          'swiftshader_indirect',
+      ]
+
+      if read_only:
+        emulator_cmd.append('-read-only')
+      if not snapshot_save:
+        emulator_cmd.append('-no-snapshot-save')
+      if writable_system:
+        emulator_cmd.append('-writable-system')
+      if debug_tags:
+        emulator_cmd.extend(['-debug', debug_tags])
+
+      emulator_env = {}
+      if self._emulator_home:
+        emulator_env['ANDROID_EMULATOR_HOME'] = self._emulator_home
+      if window:
+        if 'DISPLAY' in os.environ:
+          emulator_env['DISPLAY'] = os.environ.get('DISPLAY')
+        else:
+          raise AvdException('Emulator failed to start: DISPLAY not defined')
+      else:
+        emulator_cmd.append('-no-window')
+
+      sock.listen(1)
+
+      logging.info('Starting emulator with commands: %s',
+                   ' '.join(emulator_cmd))
+
+      # TODO(jbudorick): Add support for logging emulator stdout & stderr at
+      # higher logging levels.
+      # Enable the emulator log when debug_tags is set.
+      if not debug_tags:
+        self._sink = open('/dev/null', 'w')
+      self._emulator_proc = cmd_helper.Popen(
+          emulator_cmd, stdout=self._sink, stderr=self._sink, env=emulator_env)
+
+      # Waits for the emulator to report its serial as requested via
+      # -report-console. See http://bit.ly/2lK3L18 for more.
+      def listen_for_serial(s):
+        logging.info('Waiting for connection from emulator.')
+        with contextlib.closing(s.accept()[0]) as conn:
+          val = conn.recv(1024)
+          return 'emulator-%d' % int(val)
+
+      try:
+        self._emulator_serial = timeout_retry.Run(
+            listen_for_serial, timeout=30, retries=0, args=[sock])
+        logging.info('%s started', self._emulator_serial)
+      except Exception as e:
+        self.Stop()
+        raise AvdException('Emulator failed to start: %s' % str(e))
+
+  def Stop(self):
+    """Stops the emulator process."""
+    if self._emulator_proc:
+      if self._emulator_proc.poll() is None:
+        if self._emulator_serial:
+          device_utils.DeviceUtils(self._emulator_serial).adb.Emu('kill')
+        else:
+          self._emulator_proc.terminate()
+        self._emulator_proc.wait()
+      self._emulator_proc = None
+
+    if self._sink:
+      self._sink.close()
+      self._sink = None
+
+  @property
+  def serial(self):
+    return self._emulator_serial
diff --git a/src/build/android/pylib/local/emulator/ini.py b/src/build/android/pylib/local/emulator/ini.py
new file mode 100644
index 0000000..8f16c33
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/ini.py
@@ -0,0 +1,58 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic .ini encoding and decoding."""
+
+from __future__ import absolute_import
+import contextlib
+import os
+
+
+def loads(ini_str, strict=True):
+  ret = {}
+  for line in ini_str.splitlines():
+    key, val = line.split('=', 1)
+    key = key.strip()
+    val = val.strip()
+    if strict and key in ret:
+      raise ValueError('Multiple entries present for key "%s"' % key)
+    ret[key] = val
+
+  return ret
+
+
+def load(fp):
+  return loads(fp.read())
+
+
+def dumps(obj):
+  ret = ''
+  for k, v in sorted(obj.items()):
+    ret += '%s = %s\n' % (k, str(v))
+  return ret
+
+
+def dump(obj, fp):
+  fp.write(dumps(obj))
+
+
+@contextlib.contextmanager
+def update_ini_file(ini_file_path):
+  """Load and update the contents of an ini file.
+
+  Args:
+    ini_file_path: A string containing the absolute path of the ini file.
+  Yields:
+    The contents of the file, as a dict
+  """
+  if os.path.exists(ini_file_path):
+    with open(ini_file_path) as ini_file:
+      ini_contents = load(ini_file)
+  else:
+    ini_contents = {}
+
+  yield ini_contents
+
+  with open(ini_file_path, 'w') as ini_file:
+    dump(ini_contents, ini_file)
diff --git a/src/build/android/pylib/local/emulator/ini_test.py b/src/build/android/pylib/local/emulator/ini_test.py
new file mode 100755
index 0000000..0cf9250
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/ini_test.py
@@ -0,0 +1,69 @@
+#! /usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for ini.py."""
+
+from __future__ import absolute_import
+import textwrap
+import unittest
+
+from pylib.local.emulator import ini
+
+
+class IniTest(unittest.TestCase):
+  def testLoadsBasic(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz= example
+        bar.bad =/path/to/thing
+        """)
+    expected = {
+        'foo.bar': '1',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    self.assertEqual(expected, ini.loads(ini_str))
+
+  def testLoadsStrictFailure(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz = example
+        bar.bad = /path/to/thing
+        foo.bar = duplicate
+        """)
+    with self.assertRaises(ValueError):
+      ini.loads(ini_str, strict=True)
+
+  def testLoadsPermissive(self):
+    ini_str = textwrap.dedent("""\
+        foo.bar = 1
+        foo.baz = example
+        bar.bad = /path/to/thing
+        foo.bar = duplicate
+        """)
+    expected = {
+        'foo.bar': 'duplicate',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    self.assertEqual(expected, ini.loads(ini_str, strict=False))
+
+  def testDumpsBasic(self):
+    ini_contents = {
+        'foo.bar': '1',
+        'foo.baz': 'example',
+        'bar.bad': '/path/to/thing',
+    }
+    # ini.dumps is expected to dump to string alphabetically
+    # by key.
+    expected = textwrap.dedent("""\
+        bar.bad = /path/to/thing
+        foo.bar = 1
+        foo.baz = example
+        """)
+    self.assertEqual(expected, ini.dumps(ini_contents))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/local/emulator/local_emulator_environment.py b/src/build/android/pylib/local/emulator/local_emulator_environment.py
new file mode 100644
index 0000000..1343d8c
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/local_emulator_environment.py
@@ -0,0 +1,102 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import logging
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil import base_error
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import parallelizer
+from devil.utils import reraiser_thread
+from devil.utils import timeout_retry
+from pylib.local.device import local_device_environment
+from pylib.local.emulator import avd
+
+# Mirroring https://bit.ly/2OjuxcS#23
+_MAX_ANDROID_EMULATORS = 16
+
+
+class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment):
+
+  def __init__(self, args, output_manager, error_func):
+    super(LocalEmulatorEnvironment, self).__init__(args, output_manager,
+                                                   error_func)
+    self._avd_config = avd.AvdConfig(args.avd_config)
+    if args.emulator_count < 1:
+      error_func('--emulator-count must be >= 1')
+    elif args.emulator_count >= _MAX_ANDROID_EMULATORS:
+      logging.warning('--emulator-count capped at 16.')
+    self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count)
+    self._emulator_window = args.emulator_window
+    self._writable_system = ((hasattr(args, 'use_webview_provider')
+                              and args.use_webview_provider)
+                             or (hasattr(args, 'replace_system_package')
+                                 and args.replace_system_package)
+                             or (hasattr(args, 'system_packages_to_remove')
+                                 and args.system_packages_to_remove))
+
+    self._emulator_instances = []
+    self._device_serials = []
+
+  #override
+  def SetUp(self):
+    self._avd_config.Install()
+
+    emulator_instances = [
+        self._avd_config.CreateInstance() for _ in range(self._emulator_count)
+    ]
+
+    def start_emulator_instance(e):
+
+      def impl(e):
+        try:
+          e.Start(
+              window=self._emulator_window,
+              writable_system=self._writable_system)
+        except avd.AvdException:
+          logging.exception('Failed to start emulator instance.')
+          return None
+        try:
+          device_utils.DeviceUtils(e.serial).WaitUntilFullyBooted()
+        except base_error.BaseError:
+          e.Stop()
+          raise
+        return e
+
+      def retry_on_timeout(exc):
+        return (isinstance(exc, device_errors.CommandTimeoutError)
+                or isinstance(exc, reraiser_thread.TimeoutError))
+
+      return timeout_retry.Run(
+          impl,
+          timeout=120 if self._writable_system else 30,
+          retries=2,
+          args=[e],
+          retry_if_func=retry_on_timeout)
+
+    parallel_emulators = parallelizer.SyncParallelizer(emulator_instances)
+    self._emulator_instances = [
+        emu
+        for emu in parallel_emulators.pMap(start_emulator_instance).pGet(None)
+        if emu is not None
+    ]
+    self._device_serials = [e.serial for e in self._emulator_instances]
+
+    if not self._emulator_instances:
+      raise Exception('Failed to start any instances of the emulator.')
+    elif len(self._emulator_instances) < self._emulator_count:
+      logging.warning(
+          'Running with fewer emulator instances than requested (%d vs %d)',
+          len(self._emulator_instances), self._emulator_count)
+
+    super(LocalEmulatorEnvironment, self).SetUp()
+
+  #override
+  def TearDown(self):
+    try:
+      super(LocalEmulatorEnvironment, self).TearDown()
+    finally:
+      parallelizer.SyncParallelizer(self._emulator_instances).Stop()
diff --git a/src/build/android/pylib/local/emulator/proto/__init__.py b/src/build/android/pylib/local/emulator/proto/__init__.py
new file mode 100644
index 0000000..4a12e35
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/emulator/proto/avd.proto b/src/build/android/pylib/local/emulator/proto/avd.proto
new file mode 100644
index 0000000..b06da49
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/avd.proto
@@ -0,0 +1,75 @@
+
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto3";
+
+package tools.android.avd.proto;
+
+message CIPDPackage {
+  // CIPD package name.
+  string package_name = 1;
+  // CIPD package version to use.
+  // Ignored when creating AVD packages.
+  string version = 2;
+  // Path into which the package should be installed.
+  // src-relative.
+  string dest_path = 3;
+}
+
+message ScreenSettings {
+  // Screen height in pixels.
+  uint32 height = 1;
+
+  // Screen width in pixels.
+  uint32 width = 2;
+
+  // Scren density in dpi.
+  uint32 density = 3;
+}
+
+message SdcardSettings {
+  // Size of the sdcard that should be created for this AVD.
+  // Can be anything that `mksdcard` or `avdmanager -c` would accept:
+  //   - a number of bytes
+  //   - a number followed by K, M, or G, indicating that many
+  //     KiB, MiB, or GiB, respectively.
+  string size = 1;
+}
+
+message AvdSettings {
+  // Settings pertaining to the AVD's screen.
+  ScreenSettings screen = 1;
+
+  // Settings pertaining to the AVD's sdcard.
+  SdcardSettings sdcard = 2;
+
+  // Advanced Features for AVD. The <key,value> pairs here will override the
+  // default ones in the given system image.
+  // See https://bit.ly/2P1qK2X for all the available keys.
+  // The values should be on, off, default, or null
+  map<string, string> advanced_features = 3;
+
+  // The physical RAM size on the device, in megabytes.
+  uint32 ram_size = 4;
+}
+
+message Avd {
+  // The emulator to use in running the AVD.
+  CIPDPackage emulator_package = 1;
+
+  // The system image to use.
+  CIPDPackage system_image_package = 2;
+  // The name of the system image to use, as reported by sdkmanager.
+  string system_image_name = 3;
+
+  // The AVD to create or use.
+  // (Only the package_name is used during AVD creation.)
+  CIPDPackage avd_package = 4;
+  // The name of the AVD to create or use.
+  string avd_name = 5;
+
+  // How to configure the AVD at creation.
+  AvdSettings avd_settings = 6;
+}
diff --git a/src/build/android/pylib/local/emulator/proto/avd_pb2.py b/src/build/android/pylib/local/emulator/proto/avd_pb2.py
new file mode 100644
index 0000000..49cc1aa
--- /dev/null
+++ b/src/build/android/pylib/local/emulator/proto/avd_pb2.py
@@ -0,0 +1,362 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: avd.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='avd.proto',
+  package='tools.android.avd.proto',
+  syntax='proto3',
+  serialized_options=None,
+  serialized_pb=b'\n\tavd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"@\n\x0eScreenSettings\x12\x0e\n\x06height\x18\x01 \x01(\r\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0f\n\x07\x64\x65nsity\x18\x03 \x01(\r\"\x1e\n\x0eSdcardSettings\x12\x0c\n\x04size\x18\x01 \x01(\t\"\xa1\x02\n\x0b\x41vdSettings\x12\x37\n\x06screen\x18\x01 \x01(\x0b\x32\'.tools.android.avd.proto.ScreenSettings\x12\x37\n\x06sdcard\x18\x02 \x01(\x0b\x32\'.tools.android.avd.proto.SdcardSettings\x12U\n\x11\x61\x64vanced_features\x18\x03 \x03(\x0b\x32:.tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry\x12\x10\n\x08ram_size\x18\x04 \x01(\r\x1a\x37\n\x15\x41\x64vancedFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xad\x02\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\t\x12:\n\x0c\x61vd_settings\x18\x06 \x01(\x0b\x32$.tools.android.avd.proto.AvdSettingsb\x06proto3'
+)
+
+
+
+
+_CIPDPACKAGE = _descriptor.Descriptor(
+  name='CIPDPackage',
+  full_name='tools.android.avd.proto.CIPDPackage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='package_name', full_name='tools.android.avd.proto.CIPDPackage.package_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='tools.android.avd.proto.CIPDPackage.version', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dest_path', full_name='tools.android.avd.proto.CIPDPackage.dest_path', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=38,
+  serialized_end=109,
+)
+
+
+_SCREENSETTINGS = _descriptor.Descriptor(
+  name='ScreenSettings',
+  full_name='tools.android.avd.proto.ScreenSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='height', full_name='tools.android.avd.proto.ScreenSettings.height', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='width', full_name='tools.android.avd.proto.ScreenSettings.width', index=1,
+      number=2, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='density', full_name='tools.android.avd.proto.ScreenSettings.density', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=111,
+  serialized_end=175,
+)
+
+
+_SDCARDSETTINGS = _descriptor.Descriptor(
+  name='SdcardSettings',
+  full_name='tools.android.avd.proto.SdcardSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='size', full_name='tools.android.avd.proto.SdcardSettings.size', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=177,
+  serialized_end=207,
+)
+
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY = _descriptor.Descriptor(
+  name='AdvancedFeaturesEntry',
+  full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=b'8\001',
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=444,
+  serialized_end=499,
+)
+
+_AVDSETTINGS = _descriptor.Descriptor(
+  name='AvdSettings',
+  full_name='tools.android.avd.proto.AvdSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='screen', full_name='tools.android.avd.proto.AvdSettings.screen', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sdcard', full_name='tools.android.avd.proto.AvdSettings.sdcard', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='advanced_features', full_name='tools.android.avd.proto.AvdSettings.advanced_features', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ram_size', full_name='tools.android.avd.proto.AvdSettings.ram_size', index=3,
+      number=4, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_AVDSETTINGS_ADVANCEDFEATURESENTRY, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=210,
+  serialized_end=499,
+)
+
+
+_AVD = _descriptor.Descriptor(
+  name='Avd',
+  full_name='tools.android.avd.proto.Avd',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='emulator_package', full_name='tools.android.avd.proto.Avd.emulator_package', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='system_image_package', full_name='tools.android.avd.proto.Avd.system_image_package', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='system_image_name', full_name='tools.android.avd.proto.Avd.system_image_name', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_package', full_name='tools.android.avd.proto.Avd.avd_package', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_name', full_name='tools.android.avd.proto.Avd.avd_name', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='avd_settings', full_name='tools.android.avd.proto.Avd.avd_settings', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=502,
+  serialized_end=803,
+)
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY.containing_type = _AVDSETTINGS
+_AVDSETTINGS.fields_by_name['screen'].message_type = _SCREENSETTINGS
+_AVDSETTINGS.fields_by_name['sdcard'].message_type = _SDCARDSETTINGS
+_AVDSETTINGS.fields_by_name['advanced_features'].message_type = _AVDSETTINGS_ADVANCEDFEATURESENTRY
+_AVD.fields_by_name['emulator_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['system_image_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['avd_package'].message_type = _CIPDPACKAGE
+_AVD.fields_by_name['avd_settings'].message_type = _AVDSETTINGS
+DESCRIPTOR.message_types_by_name['CIPDPackage'] = _CIPDPACKAGE
+DESCRIPTOR.message_types_by_name['ScreenSettings'] = _SCREENSETTINGS
+DESCRIPTOR.message_types_by_name['SdcardSettings'] = _SDCARDSETTINGS
+DESCRIPTOR.message_types_by_name['AvdSettings'] = _AVDSETTINGS
+DESCRIPTOR.message_types_by_name['Avd'] = _AVD
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+CIPDPackage = _reflection.GeneratedProtocolMessageType('CIPDPackage', (_message.Message,), {
+  'DESCRIPTOR' : _CIPDPACKAGE,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.CIPDPackage)
+  })
+_sym_db.RegisterMessage(CIPDPackage)
+
+ScreenSettings = _reflection.GeneratedProtocolMessageType('ScreenSettings', (_message.Message,), {
+  'DESCRIPTOR' : _SCREENSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.ScreenSettings)
+  })
+_sym_db.RegisterMessage(ScreenSettings)
+
+SdcardSettings = _reflection.GeneratedProtocolMessageType('SdcardSettings', (_message.Message,), {
+  'DESCRIPTOR' : _SDCARDSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.SdcardSettings)
+  })
+_sym_db.RegisterMessage(SdcardSettings)
+
+AvdSettings = _reflection.GeneratedProtocolMessageType('AvdSettings', (_message.Message,), {
+
+  'AdvancedFeaturesEntry' : _reflection.GeneratedProtocolMessageType('AdvancedFeaturesEntry', (_message.Message,), {
+    'DESCRIPTOR' : _AVDSETTINGS_ADVANCEDFEATURESENTRY,
+    '__module__' : 'avd_pb2'
+    # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry)
+    })
+  ,
+  'DESCRIPTOR' : _AVDSETTINGS,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings)
+  })
+_sym_db.RegisterMessage(AvdSettings)
+_sym_db.RegisterMessage(AvdSettings.AdvancedFeaturesEntry)
+
+Avd = _reflection.GeneratedProtocolMessageType('Avd', (_message.Message,), {
+  'DESCRIPTOR' : _AVD,
+  '__module__' : 'avd_pb2'
+  # @@protoc_insertion_point(class_scope:tools.android.avd.proto.Avd)
+  })
+_sym_db.RegisterMessage(Avd)
+
+
+_AVDSETTINGS_ADVANCEDFEATURESENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/build/android/pylib/local/local_test_server_spawner.py b/src/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000..f21f1be
--- /dev/null
+++ b/src/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,101 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import json
+import time
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from devil.android import forwarder
+from devil.android import ports
+from pylib.base import test_server
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import chrome_test_server_spawner
+
+
+# The tests should not need more than one test server instance.
+MAX_TEST_SERVER_INSTANCES = 1
+
+
+def _WaitUntil(predicate, max_attempts=5):
+  """Blocks until the provided predicate (function) is true.
+
+  Returns:
+    Whether the provided predicate was satisfied once (before the timeout).
+  """
+  sleep_time_sec = 0.025
+  for _ in range(1, max_attempts):
+    if predicate():
+      return True
+    time.sleep(sleep_time_sec)
+    sleep_time_sec = min(1, sleep_time_sec * 2)  # Don't wait more than 1 sec.
+  return False
+
+
+class PortForwarderAndroid(chrome_test_server_spawner.PortForwarder):
+  def __init__(self, device, tool):
+    self.device = device
+    self.tool = tool
+
+  def Map(self, port_pairs):
+    forwarder.Forwarder.Map(port_pairs, self.device, self.tool)
+
+  def GetDevicePortForHostPort(self, host_port):
+    return forwarder.Forwarder.DevicePortForHostPort(host_port)
+
+  def WaitHostPortAvailable(self, port):
+    return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+  def WaitPortNotAvailable(self, port):
+    return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+  def WaitDevicePortReady(self, port):
+    return _WaitUntil(lambda: ports.IsDevicePortUsed(self.device, port))
+
+  def Unmap(self, device_port):
+    forwarder.Forwarder.UnmapDevicePort(device_port, self.device)
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+  def __init__(self, port, device, tool):
+    super(LocalTestServerSpawner, self).__init__()
+    self._device = device
+    self._spawning_server = chrome_test_server_spawner.SpawningServer(
+        port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES)
+    self._tool = tool
+
+  @property
+  def server_address(self):
+    return self._spawning_server.server.server_address
+
+  @property
+  def port(self):
+    return self.server_address[1]
+
+  #override
+  def SetUp(self):
+    # See net/test/spawned_test_server/remote_test_server.h for description of
+    # the fields in the config file.
+    test_server_config = json.dumps({
+      'spawner_url_base': 'http://localhost:%d' % self.port
+    })
+    self._device.WriteFile(
+        '%s/net-test-server-config' % self._device.GetExternalStoragePath(),
+        test_server_config)
+    forwarder.Forwarder.Map(
+        [(self.port, self.port)], self._device, self._tool)
+    self._spawning_server.Start()
+
+  #override
+  def Reset(self):
+    self._spawning_server.CleanupState()
+
+  #override
+  def TearDown(self):
+    self.Reset()
+    self._spawning_server.Stop()
+    forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
diff --git a/src/build/android/pylib/local/machine/__init__.py b/src/build/android/pylib/local/machine/__init__.py
new file mode 100644
index 0000000..ca3e206
--- /dev/null
+++ b/src/build/android/pylib/local/machine/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/local/machine/local_machine_environment.py b/src/build/android/pylib/local/machine/local_machine_environment.py
new file mode 100644
index 0000000..d198f89
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_environment.py
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import devil_chromium
+from pylib import constants
+from pylib.base import environment
+
+
+class LocalMachineEnvironment(environment.Environment):
+
+  def __init__(self, _args, output_manager, _error_func):
+    super(LocalMachineEnvironment, self).__init__(output_manager)
+
+    devil_chromium.Initialize(
+        output_directory=constants.GetOutDirectory())
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
diff --git a/src/build/android/pylib/local/machine/local_machine_junit_test_run.py b/src/build/android/pylib/local/machine/local_machine_junit_test_run.py
new file mode 100644
index 0000000..a64b63b
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -0,0 +1,309 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+import collections
+import json
+import logging
+import multiprocessing
+import os
+import select
+import subprocess
+import sys
+import zipfile
+
+from six.moves import range  # pylint: disable=redefined-builtin
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import host_paths
+from pylib.results import json_results
+from py_utils import tempfile_ext
+
+
+# These Test classes are used for running tests and are excluded in the test
+# runner. See:
+# https://android.googlesource.com/platform/frameworks/testing/+/android-support-test/runner/src/main/java/android/support/test/internal/runner/TestRequestBuilder.java
+# base/test/android/javatests/src/org/chromium/base/test/BaseChromiumAndroidJUnitRunner.java # pylint: disable=line-too-long
+_EXCLUDED_CLASSES_PREFIXES = ('android', 'junit', 'org/bouncycastle/util',
+                              'org/hamcrest', 'org/junit', 'org/mockito')
+
+# Suites we shouldn't shard, usually because they don't contain enough test
+# cases.
+_EXCLUDED_SUITES = {
+    'password_check_junit_tests',
+    'touch_to_fill_junit_tests',
+}
+
+
+# It can actually take longer to run if you shard too much, especially on
+# smaller suites. Locally media_base_junit_tests takes 4.3 sec with 1 shard,
+# and 6 sec with 2 or more shards.
+_MIN_CLASSES_PER_SHARD = 8
+
+
+class LocalMachineJunitTestRun(test_run.TestRun):
+  def __init__(self, env, test_instance):
+    super(LocalMachineJunitTestRun, self).__init__(env, test_instance)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+    pass
+
+  def _CreateJarArgsList(self, json_result_file_paths, group_test_list, shards):
+    # Creates a list of jar_args. The important thing is each jar_args list
+    # has a different json_results file for writing test results to and that
+    # each list of jar_args has its own test to run as specified in the
+    # -gtest-filter.
+    jar_args_list = [['-json-results-file', result_file]
+                     for result_file in json_result_file_paths]
+    for index, jar_arg in enumerate(jar_args_list):
+      if shards > 1:
+        jar_arg.extend(['-gtest-filter', ':'.join(group_test_list[index])])
+      elif self._test_instance.test_filter:
+        jar_arg.extend(['-gtest-filter', self._test_instance.test_filter])
+
+      if self._test_instance.package_filter:
+        jar_arg.extend(['-package-filter', self._test_instance.package_filter])
+      if self._test_instance.runner_filter:
+        jar_arg.extend(['-runner-filter', self._test_instance.runner_filter])
+
+    return jar_args_list
+
+  def _CreateJvmArgsList(self):
+    # Creates a list of jvm_args (robolectric, code coverage, etc...)
+    jvm_args = [
+        '-Drobolectric.dependency.dir=%s' %
+        self._test_instance.robolectric_runtime_deps_dir,
+        '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,
+        '-Drobolectric.resourcesMode=binary',
+    ]
+    if logging.getLogger().isEnabledFor(logging.INFO):
+      jvm_args += ['-Drobolectric.logging=stdout']
+    if self._test_instance.debug_socket:
+      jvm_args += [
+          '-agentlib:jdwp=transport=dt_socket'
+          ',server=y,suspend=y,address=%s' % self._test_instance.debug_socket
+      ]
+
+    if self._test_instance.coverage_dir:
+      if not os.path.exists(self._test_instance.coverage_dir):
+        os.makedirs(self._test_instance.coverage_dir)
+      elif not os.path.isdir(self._test_instance.coverage_dir):
+        raise Exception('--coverage-dir takes a directory, not file path.')
+      if self._test_instance.coverage_on_the_fly:
+        jacoco_coverage_file = os.path.join(
+            self._test_instance.coverage_dir,
+            '%s.exec' % self._test_instance.suite)
+        jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                         'third_party', 'jacoco', 'lib',
+                                         'jacocoagent.jar')
+
+        # inclnolocationclasses is false to prevent no class def found error.
+        jacoco_args = '-javaagent:{}=destfile={},inclnolocationclasses=false'
+        jvm_args.append(
+            jacoco_args.format(jacoco_agent_path, jacoco_coverage_file))
+      else:
+        jvm_args.append('-Djacoco-agent.destfile=%s' %
+                        os.path.join(self._test_instance.coverage_dir,
+                                     '%s.exec' % self._test_instance.suite))
+
+    return jvm_args
+
+  #override
+  def RunTests(self, results):
+    wrapper_path = os.path.join(constants.GetOutDirectory(), 'bin', 'helper',
+                                self._test_instance.suite)
+
+    # This avoids searching through the classparth jars for tests classes,
+    # which takes about 1-2 seconds.
+    # Do not shard when a test filter is present since we do not know at this
+    # point which tests will be filtered out.
+    if (self._test_instance.shards == 1 or self._test_instance.test_filter
+        or self._test_instance.suite in _EXCLUDED_SUITES):
+      test_classes = []
+      shards = 1
+    else:
+      test_classes = _GetTestClasses(wrapper_path)
+      shards = ChooseNumOfShards(test_classes, self._test_instance.shards)
+
+    logging.info('Running tests on %d shard(s).', shards)
+    group_test_list = GroupTestsForShard(shards, test_classes)
+
+    with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+      cmd_list = [[wrapper_path] for _ in range(shards)]
+      json_result_file_paths = [
+          os.path.join(temp_dir, 'results%d.json' % i) for i in range(shards)
+      ]
+      jar_args_list = self._CreateJarArgsList(json_result_file_paths,
+                                              group_test_list, shards)
+      for i in range(shards):
+        cmd_list[i].extend(['--jar-args', '"%s"' % ' '.join(jar_args_list[i])])
+
+      jvm_args = self._CreateJvmArgsList()
+      if jvm_args:
+        for cmd in cmd_list:
+          cmd.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
+
+      AddPropertiesJar(cmd_list, temp_dir, self._test_instance.resource_apk)
+
+      procs = [
+          subprocess.Popen(cmd,
+                           stdout=subprocess.PIPE,
+                           stderr=subprocess.STDOUT) for cmd in cmd_list
+      ]
+      PrintProcessesStdout(procs)
+
+      results_list = []
+      try:
+        for json_file_path in json_result_file_paths:
+          with open(json_file_path, 'r') as f:
+            results_list += json_results.ParseResultsFromJson(
+                json.loads(f.read()))
+      except IOError:
+        # In the case of a failure in the JUnit or Robolectric test runner
+        # the output json file may never be written.
+        results_list = [
+          base_test_result.BaseTestResult(
+              'Test Runner Failure', base_test_result.ResultType.UNKNOWN)
+        ]
+
+      test_run_results = base_test_result.TestRunResults()
+      test_run_results.AddResults(results_list)
+      results.append(test_run_results)
+
+  #override
+  def TearDown(self):
+    pass
+
+
+def AddPropertiesJar(cmd_list, temp_dir, resource_apk):
+  # Create properties file for Robolectric test runners so they can find the
+  # binary resources.
+  properties_jar_path = os.path.join(temp_dir, 'properties.jar')
+  with zipfile.ZipFile(properties_jar_path, 'w') as z:
+    z.writestr('com/android/tools/test_config.properties',
+               'android_resource_apk=%s' % resource_apk)
+
+  for cmd in cmd_list:
+    cmd.extend(['--classpath', properties_jar_path])
+
+
+def ChooseNumOfShards(test_classes, shards):
+  # Don't override requests to not shard.
+  if shards == 1:
+    return 1
+
+  # Sharding doesn't reduce runtime on just a few tests.
+  if shards > (len(test_classes) // _MIN_CLASSES_PER_SHARD) or shards < 1:
+    shards = max(1, (len(test_classes) // _MIN_CLASSES_PER_SHARD))
+
+  # Local tests of explicit --shard values show that max speed is achieved
+  # at cpu_count() / 2.
+  # Using -XX:TieredStopAtLevel=1 is required for this result. The flag reduces
+  # CPU time by two-thirds, making sharding more effective.
+  shards = max(1, min(shards, multiprocessing.cpu_count() // 2))
+  # Can have at minimum one test_class per shard.
+  shards = min(len(test_classes), shards)
+
+  return shards
+
+
+def GroupTestsForShard(num_of_shards, test_classes):
+  """Groups tests that will be ran on each shard.
+
+  Args:
+    num_of_shards: number of shards to split tests between.
+    test_classes: A list of test_class files in the jar.
+
+  Return:
+    Returns a dictionary containing a list of test classes.
+  """
+  test_dict = {i: [] for i in range(num_of_shards)}
+
+  # Round robin test distribiution to reduce chance that a sequential group of
+  # classes all have an unusually high number of tests.
+  for count, test_cls in enumerate(test_classes):
+    test_cls = test_cls.replace('.class', '*')
+    test_cls = test_cls.replace('/', '.')
+    test_dict[count % num_of_shards].append(test_cls)
+
+  return test_dict
+
+
+def PrintProcessesStdout(procs):
+  """Prints the stdout of all the processes.
+
+  Buffers the stdout of the processes and prints it when finished.
+
+  Args:
+    procs: A list of subprocesses.
+
+  Returns: N/A
+  """
+  streams = [p.stdout for p in procs]
+  outputs = collections.defaultdict(list)
+  first_fd = streams[0].fileno()
+
+  while streams:
+    rstreams, _, _ = select.select(streams, [], [])
+    for stream in rstreams:
+      line = stream.readline()
+      if line:
+        # Print out just one output so user can see work being done rather
+        # than waiting for it all at the end.
+        if stream.fileno() == first_fd:
+          sys.stdout.write(line)
+        else:
+          outputs[stream.fileno()].append(line)
+      else:
+        streams.remove(stream)  # End of stream.
+
+  for p in procs:
+    sys.stdout.write(''.join(outputs[p.stdout.fileno()]))
+
+
+def _GetTestClasses(file_path):
+  test_jar_paths = subprocess.check_output([file_path, '--print-classpath'])
+  test_jar_paths = test_jar_paths.split(':')
+
+  test_classes = []
+  for test_jar_path in test_jar_paths:
+    # Avoid searching through jars that are for the test runner.
+    # TODO(crbug.com/1144077): Use robolectric buildconfig file arg.
+    if 'third_party/robolectric/' in test_jar_path:
+      continue
+
+    test_classes += _GetTestClassesFromJar(test_jar_path)
+
+  logging.info('Found %d test classes in class_path jars.', len(test_classes))
+  return test_classes
+
+
+def _GetTestClassesFromJar(test_jar_path):
+  """Returns a list of test classes from a jar.
+
+  Test files end in Test, this is enforced:
+  //tools/android/errorprone_plugin/src/org/chromium/tools/errorprone
+  /plugin/TestClassNameCheck.java
+
+  Args:
+    test_jar_path: Path to the jar.
+
+  Return:
+    Returns a list of test classes that were in the jar.
+  """
+  class_list = []
+  with zipfile.ZipFile(test_jar_path, 'r') as zip_f:
+    for test_class in zip_f.namelist():
+      if test_class.startswith(_EXCLUDED_CLASSES_PREFIXES):
+        continue
+      if test_class.endswith('Test.class') and '$' not in test_class:
+        class_list.append(test_class)
+
+  return class_list
diff --git a/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py b/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py
new file mode 100755
index 0000000..2bbe561
--- /dev/null
+++ b/src/build/android/pylib/local/machine/local_machine_junit_test_run_test.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+from __future__ import absolute_import
+import os
+import unittest
+
+from pylib.local.machine import local_machine_junit_test_run
+from py_utils import tempfile_ext
+from mock import patch  # pylint: disable=import-error
+
+
+class LocalMachineJunitTestRunTests(unittest.TestCase):
+  def testAddPropertiesJar(self):
+    with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+      apk = 'resource_apk'
+      cmd_list = []
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(cmd_list, [])
+      cmd_list = [['test1']]
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(
+          cmd_list[0],
+          ['test1', '--classpath',
+           os.path.join(temp_dir, 'properties.jar')])
+      cmd_list = [['test1'], ['test2']]
+      local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk)
+      self.assertEquals(len(cmd_list[0]), 3)
+      self.assertEquals(
+          cmd_list[1],
+          ['test2', '--classpath',
+           os.path.join(temp_dir, 'properties.jar')])
+
+  @patch('multiprocessing.cpu_count')
+  def testChooseNumOfShards(self, mock_cpu_count):
+    mock_cpu_count.return_value = 36
+    # Test shards is 1 when filter is set.
+    test_shards = 1
+    test_classes = [1] * 50
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(1, shards)
+
+    # Tests setting shards.
+    test_shards = 4
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(4, shards)
+
+    # Tests using min_class per shards.
+    test_classes = [1] * 20
+    test_shards = 8
+    shards = local_machine_junit_test_run.ChooseNumOfShards(
+        test_classes, test_shards)
+    self.assertEquals(2, shards)
+
+  def testGroupTestsForShard(self):
+    test_classes = []
+    results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes)
+    self.assertDictEqual(results, {0: []})
+
+    test_classes = ['dir/test.class'] * 5
+    results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes)
+    self.assertDictEqual(results, {0: ['dir.test*'] * 5})
+
+    test_classes = ['dir/test.class'] * 5
+    results = local_machine_junit_test_run.GroupTestsForShard(2, test_classes)
+    ans_dict = {
+        0: ['dir.test*'] * 3,
+        1: ['dir.test*'] * 2,
+    }
+    self.assertDictEqual(results, ans_dict)
+
+    test_classes = ['a10 warthog', 'b17', 'SR71']
+    results = local_machine_junit_test_run.GroupTestsForShard(3, test_classes)
+    ans_dict = {
+        0: ['a10 warthog'],
+        1: ['b17'],
+        2: ['SR71'],
+    }
+    self.assertDictEqual(results, ans_dict)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/monkey/__init__.py b/src/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/monkey/__init__.py
diff --git a/src/build/android/pylib/monkey/monkey_test_instance.py b/src/build/android/pylib/monkey/monkey_test_instance.py
new file mode 100644
index 0000000..10b1131
--- /dev/null
+++ b/src/build/android/pylib/monkey/monkey_test_instance.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import random
+
+from pylib import constants
+from pylib.base import test_instance
+
+
+_SINGLE_EVENT_TIMEOUT = 100 # Milliseconds
+
+class MonkeyTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, _):
+    super(MonkeyTestInstance, self).__init__()
+
+    self._categories = args.categories
+    self._event_count = args.event_count
+    self._seed = args.seed or random.randint(1, 100)
+    self._throttle = args.throttle
+    self._verbose_count = args.verbose_count
+
+    self._package = constants.PACKAGE_INFO[args.browser].package
+    self._activity = constants.PACKAGE_INFO[args.browser].activity
+
+    self._timeout_s = (
+        self.event_count * (self.throttle + _SINGLE_EVENT_TIMEOUT)) / 1000
+
+  #override
+  def TestType(self):
+    return 'monkey'
+
+  #override
+  def SetUp(self):
+    pass
+
+  #override
+  def TearDown(self):
+    pass
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def categories(self):
+    return self._categories
+
+  @property
+  def event_count(self):
+    return self._event_count
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def seed(self):
+    return self._seed
+
+  @property
+  def throttle(self):
+    return self._throttle
+
+  @property
+  def timeout(self):
+    return self._timeout_s
+
+  @property
+  def verbose_count(self):
+    return self._verbose_count
diff --git a/src/build/android/pylib/output/__init__.py b/src/build/android/pylib/output/__init__.py
new file mode 100644
index 0000000..a22a6ee
--- /dev/null
+++ b/src/build/android/pylib/output/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/output/local_output_manager.py b/src/build/android/pylib/output/local_output_manager.py
new file mode 100644
index 0000000..89becd7
--- /dev/null
+++ b/src/build/android/pylib/output/local_output_manager.py
@@ -0,0 +1,45 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+import os
+import shutil
+import urllib
+
+from pylib.base import output_manager
+
+
+class LocalOutputManager(output_manager.OutputManager):
+  """Saves and manages test output files locally in output directory.
+
+  Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}.
+  """
+
+  def __init__(self, output_dir):
+    super(LocalOutputManager, self).__init__()
+    timestamp = time.strftime(
+        '%Y_%m_%dT%H_%M_%S', time.localtime())
+    self._output_root = os.path.abspath(os.path.join(
+        output_dir, 'TEST_RESULTS_%s' % timestamp))
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    return LocalArchivedFile(
+        out_filename, out_subdir, datatype, self._output_root)
+
+
+class LocalArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype, out_root):
+    super(LocalArchivedFile, self).__init__(
+        out_filename, out_subdir, datatype)
+    self._output_path = os.path.join(out_root, out_subdir, out_filename)
+
+  def _Link(self):
+    return 'file://%s' % urllib.quote(self._output_path)
+
+  def _Archive(self):
+    if not os.path.exists(os.path.dirname(self._output_path)):
+      os.makedirs(os.path.dirname(self._output_path))
+    shutil.copy(self.name, self._output_path)
diff --git a/src/build/android/pylib/output/local_output_manager_test.py b/src/build/android/pylib/output/local_output_manager_test.py
new file mode 100755
index 0000000..7954350
--- /dev/null
+++ b/src/build/android/pylib/output/local_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import tempfile
+import shutil
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import local_output_manager
+
+
+class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_dir = tempfile.mkdtemp()
+    self._output_manager = local_output_manager.LocalOutputManager(
+        self._output_dir)
+
+  def testUsableTempFile(self):
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+  def tearDown(self):
+    shutil.rmtree(self._output_dir)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/output/noop_output_manager.py b/src/build/android/pylib/output/noop_output_manager.py
new file mode 100644
index 0000000..d29a743
--- /dev/null
+++ b/src/build/android/pylib/output/noop_output_manager.py
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import output_manager
+
+# TODO(jbudorick): This class is currently mostly unused.
+# Add a --bot-mode argument that all bots pass. If --bot-mode and
+# --local-output args are both not passed to test runner then use this
+# as the output manager impl.
+
+# pylint: disable=no-self-use
+
+class NoopOutputManager(output_manager.OutputManager):
+
+  def __init__(self):
+    super(NoopOutputManager, self).__init__()
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    del out_filename, out_subdir, datatype
+    return NoopArchivedFile()
+
+
+class NoopArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self):
+    super(NoopArchivedFile, self).__init__(None, None, None)
+
+  def Link(self):
+    """NoopArchivedFiles are not retained."""
+    return ''
+
+  def _Link(self):
+    pass
+
+  def Archive(self):
+    """NoopArchivedFiles are not retained."""
+    pass
+
+  def _Archive(self):
+    pass
diff --git a/src/build/android/pylib/output/noop_output_manager_test.py b/src/build/android/pylib/output/noop_output_manager_test.py
new file mode 100755
index 0000000..4e470ef
--- /dev/null
+++ b/src/build/android/pylib/output/noop_output_manager_test.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import noop_output_manager
+
+
+class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_manager = noop_output_manager.NoopOutputManager()
+
+  def testUsableTempFile(self):
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/output/remote_output_manager.py b/src/build/android/pylib/output/remote_output_manager.py
new file mode 100644
index 0000000..9fdb4bf
--- /dev/null
+++ b/src/build/android/pylib/output/remote_output_manager.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+from pylib.base import output_manager
+from pylib.output import noop_output_manager
+from pylib.utils import logdog_helper
+from pylib.utils import google_storage_helper
+
+
+class RemoteOutputManager(output_manager.OutputManager):
+
+  def __init__(self, bucket):
+    """Uploads output files to Google Storage or LogDog.
+
+    Files will either be uploaded directly to Google Storage or LogDog
+    depending on the datatype.
+
+    Args
+      bucket: Bucket to use when saving to Google Storage.
+    """
+    super(RemoteOutputManager, self).__init__()
+    self._bucket = bucket
+
+  #override
+  def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+    if datatype == output_manager.Datatype.TEXT:
+      try:
+        logdog_helper.get_logdog_client()
+        return LogdogArchivedFile(out_filename, out_subdir, datatype)
+      except RuntimeError:
+        return noop_output_manager.NoopArchivedFile()
+    else:
+      if self._bucket is None:
+        return noop_output_manager.NoopArchivedFile()
+      return GoogleStorageArchivedFile(
+          out_filename, out_subdir, datatype, self._bucket)
+
+
+class LogdogArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype):
+    super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype)
+    self._stream_name = '%s_%s' % (out_subdir, out_filename)
+
+  def _Link(self):
+    return logdog_helper.get_viewer_url(self._stream_name)
+
+  def _Archive(self):
+    with open(self.name, 'r') as f:
+      logdog_helper.text(self._stream_name, f.read())
+
+
+class GoogleStorageArchivedFile(output_manager.ArchivedFile):
+
+  def __init__(self, out_filename, out_subdir, datatype, bucket):
+    super(GoogleStorageArchivedFile, self).__init__(
+        out_filename, out_subdir, datatype)
+    self._bucket = bucket
+    self._upload_path = None
+    self._content_addressed = None
+
+  def _PrepareArchive(self):
+    self._content_addressed = (self._datatype in (
+        output_manager.Datatype.HTML,
+        output_manager.Datatype.PNG,
+        output_manager.Datatype.JSON))
+    if self._content_addressed:
+      sha1 = hashlib.sha1()
+      with open(self.name, 'rb') as f:
+        sha1.update(f.read())
+      self._upload_path = sha1.hexdigest()
+    else:
+      self._upload_path = os.path.join(self._out_subdir, self._out_filename)
+
+  def _Link(self):
+    return google_storage_helper.get_url_link(
+        self._upload_path, self._bucket)
+
+  def _Archive(self):
+    if (self._content_addressed and
+        google_storage_helper.exists(self._upload_path, self._bucket)):
+      return
+
+    google_storage_helper.upload(
+        self._upload_path, self.name, self._bucket, content_type=self._datatype)
diff --git a/src/build/android/pylib/output/remote_output_manager_test.py b/src/build/android/pylib/output/remote_output_manager_test.py
new file mode 100755
index 0000000..4c6c081
--- /dev/null
+++ b/src/build/android/pylib/output/remote_output_manager_test.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import remote_output_manager
+
+import mock  # pylint: disable=import-error
+
+
+@mock.patch('pylib.utils.google_storage_helper')
+class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+  def setUp(self):
+    self._output_manager = remote_output_manager.RemoteOutputManager(
+        'this-is-a-fake-bucket')
+
+  def testUsableTempFile(self, google_storage_helper_mock):
+    del google_storage_helper_mock
+    self.assertUsableTempFile(
+        self._output_manager._CreateArchivedFile(
+            'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/pexpect.py b/src/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..cf59fb0
--- /dev/null
+++ b/src/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+  pass
diff --git a/src/build/android/pylib/restart_adbd.sh b/src/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/src/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+  stop adbd
+  start adbd
+}
+
+restart &
diff --git a/src/build/android/pylib/results/__init__.py b/src/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/flakiness_dashboard/__init__.py b/src/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000..b2e542b
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,699 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# tools/blinkpy/web_tests/layout_package/json_results_generator.py
+# tools/blinkpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+  return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+  # FIXME: Kill this code once the server returns json instead of jsonp.
+  if HasJSONWrapper(json_content):
+    return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+  return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+  # Specify separators in order to get compact encoding.
+  json_string = json.dumps(json_object, separators=(',', ':'))
+  if callback:
+    json_string = callback + '(' + json_string + ');'
+  with open(file_path, 'w') as fp:
+    fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flattens the trie of paths, prepending a prefix to each."""
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and not 'results' in data:
+      result.update(ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def AddPathToTrie(path, value, trie):
+  """Inserts a single path and value into a directory trie structure."""
+  if not '/' in path:
+    trie[path] = value
+    return
+
+  directory, _, rest = path.partition('/')
+  if not directory in trie:
+    trie[directory] = {}
+  AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+  """Breaks a test name into dicts by directory
+
+  foo/bar/baz.html: 1ms
+  foo/bar/baz1.html: 3ms
+
+  becomes
+  foo: {
+      bar: {
+          baz.html: 1,
+          baz1.html: 3
+      }
+  }
+  """
+  trie = {}
+  for test_result in individual_test_timings:
+    test = test_result.test_name
+
+    AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+  return trie
+
+
+class TestResult(object):
+  """A simple class that represents a single test result."""
+
+  # Test modifier constants.
+  (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+  def __init__(self, test, failed=False, elapsed_time=0):
+    self.test_name = test
+    self.failed = failed
+    self.test_run_time = elapsed_time
+
+    test_name = test
+    try:
+      test_name = test.split('.')[1]
+    except IndexError:
+      _log.warn('Invalid test name: %s.', test)
+
+    if test_name.startswith('FAILS_'):
+      self.modifier = self.FAILS
+    elif test_name.startswith('FLAKY_'):
+      self.modifier = self.FLAKY
+    elif test_name.startswith('DISABLED_'):
+      self.modifier = self.DISABLED
+    else:
+      self.modifier = self.NONE
+
+  def Fixable(self):
+    return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+  """A JSON results generator for generic tests."""
+
+  MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+  # Min time (seconds) that will be added to the JSON.
+  MIN_TIME = 1
+
+  # Note that in non-chromium tests those chars are used to indicate
+  # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+  PASS_RESULT = 'P'
+  SKIP_RESULT = 'X'
+  FAIL_RESULT = 'F'
+  FLAKY_RESULT = 'L'
+  NO_DATA_RESULT = 'N'
+
+  MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+                      TestResult.DISABLED: SKIP_RESULT,
+                      TestResult.FAILS: FAIL_RESULT,
+                      TestResult.FLAKY: FLAKY_RESULT}
+
+  VERSION = 4
+  VERSION_KEY = 'version'
+  RESULTS = 'results'
+  TIMES = 'times'
+  BUILD_NUMBERS = 'buildNumbers'
+  TIME = 'secondsSinceEpoch'
+  TESTS = 'tests'
+
+  FIXABLE_COUNT = 'fixableCount'
+  FIXABLE = 'fixableCounts'
+  ALL_FIXABLE_COUNT = 'allFixableCount'
+
+  RESULTS_FILENAME = 'results.json'
+  TIMES_MS_FILENAME = 'times_ms.json'
+  INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+  # line too long pylint: disable=line-too-long
+  URL_FOR_TEST_LIST_JSON = (
+      'https://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&'
+      'master=%s')
+  # pylint: enable=line-too-long
+
+  def __init__(self, builder_name, build_name, build_number,
+               results_file_base_path, builder_base_url,
+               test_results_map, svn_repositories=None,
+               test_results_server=None,
+               test_type='',
+               master_name=''):
+    """Modifies the results.json file. Grabs it off the archive directory
+    if it is not found locally.
+
+    Args
+      builder_name: the builder name (e.g. Webkit).
+      build_name: the build name (e.g. webkit-rel).
+      build_number: the build number.
+      results_file_base_path: Absolute path to the directory containing the
+          results json file.
+      builder_base_url: the URL where we have the archived test results.
+          If this is None no archived results will be retrieved.
+      test_results_map: A dictionary that maps test_name to TestResult.
+      svn_repositories: A (json_field_name, svn_path) pair for SVN
+          repositories that tests rely on.  The SVN revision will be
+          included in the JSON with the given json_field_name.
+      test_results_server: server that hosts test results json.
+      test_type: test type string (e.g. 'layout-tests').
+      master_name: the name of the buildbot master.
+    """
+    self._builder_name = builder_name
+    self._build_name = build_name
+    self._build_number = build_number
+    self._builder_base_url = builder_base_url
+    self._results_directory = results_file_base_path
+
+    self._test_results_map = test_results_map
+    self._test_results = test_results_map.values()
+
+    self._svn_repositories = svn_repositories
+    if not self._svn_repositories:
+      self._svn_repositories = {}
+
+    self._test_results_server = test_results_server
+    self._test_type = test_type
+    self._master_name = master_name
+
+    self._archived_results = None
+
+  def GenerateJSONOutput(self):
+    json_object = self.GetJSON()
+    if json_object:
+      file_path = (
+          os.path.join(
+              self._results_directory,
+              self.INCREMENTAL_RESULTS_FILENAME))
+      WriteJSON(json_object, file_path)
+
+  def GenerateTimesMSFile(self):
+    times = TestTimingsTrie(self._test_results_map.values())
+    file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+    WriteJSON(times, file_path)
+
+  def GetJSON(self):
+    """Gets the results for the results.json file."""
+    results_json = {}
+
+    if not results_json:
+      results_json, error = self._GetArchivedJSONResults()
+      if error:
+        # If there was an error don't write a results.json
+        # file at all as it would lose all the information on the
+        # bot.
+        _log.error('Archive directory is inaccessible. Not '
+                   'modifying or clobbering the results.json '
+                   'file: ' + str(error))
+        return None
+
+    builder_name = self._builder_name
+    if results_json and builder_name not in results_json:
+      _log.debug('Builder name (%s) is not in the results.json file.',
+                 builder_name)
+
+    self._ConvertJSONToCurrentVersion(results_json)
+
+    if builder_name not in results_json:
+      results_json[builder_name] = (
+          self._CreateResultsForBuilderJSON())
+
+    results_for_builder = results_json[builder_name]
+
+    if builder_name:
+      self._InsertGenericMetaData(results_for_builder)
+
+    self._InsertFailureSummaries(results_for_builder)
+
+    # Update the all failing tests with result type and time.
+    tests = results_for_builder[self.TESTS]
+    all_failing_tests = self._GetFailedTestNames()
+    all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+    for test in all_failing_tests:
+      self._InsertTestTimeAndResult(test, tests)
+
+    return results_json
+
+  def SetArchivedResults(self, archived_results):
+    self._archived_results = archived_results
+
+  def UploadJSONFiles(self, json_files):
+    """Uploads the given json_files to the test_results_server (if the
+    test_results_server is given)."""
+    if not self._test_results_server:
+      return
+
+    if not self._master_name:
+      _log.error(
+          '--test-results-server was set, but --master-name was not.  Not '
+          'uploading JSON files.')
+      return
+
+    _log.info('Uploading JSON files for builder: %s', self._builder_name)
+    attrs = [('builder', self._builder_name),
+             ('testtype', self._test_type),
+             ('master', self._master_name)]
+
+    files = [(json_file, os.path.join(self._results_directory, json_file))
+             for json_file in json_files]
+
+    url = 'https://%s/testfile/upload' % self._test_results_server
+    # Set uploading timeout in case appengine server is having problems.
+    # 120 seconds are more than enough to upload test results.
+    uploader = _FileUploader(url, 120)
+    try:
+      response = uploader.UploadAsMultipartFormData(files, attrs)
+      if response:
+        if response.code == 200:
+          _log.info('JSON uploaded.')
+        else:
+          _log.debug(
+              "JSON upload failed, %d: '%s'", response.code, response.read())
+      else:
+        _log.error('JSON upload failed; no response returned')
+    except Exception as err: # pylint: disable=broad-except
+      _log.error('Upload failed: %s', err)
+      return
+
+  def _GetTestTiming(self, test_name):
+    """Returns test timing data (elapsed time) in second
+    for the given test_name."""
+    if test_name in self._test_results_map:
+      # Floor for now to get time in seconds.
+      return int(self._test_results_map[test_name].test_run_time)
+    return 0
+
+  def _GetFailedTestNames(self):
+    """Returns a set of failed test names."""
+    return set([r.test_name for r in self._test_results if r.failed])
+
+  def _GetModifierChar(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+      return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+    return self.__class__.PASS_RESULT
+
+  def _get_result_char(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier == TestResult.DISABLED:
+      return self.__class__.SKIP_RESULT
+
+    if test_result.failed:
+      return self.__class__.FAIL_RESULT
+
+    return self.__class__.PASS_RESULT
+
+  def _GetSVNRevision(self, in_directory):
+    """Returns the svn revision for the given directory.
+
+    Args:
+      in_directory: The directory where svn is to be run.
+    """
+    # This is overridden in flakiness_dashboard_results_uploader.py.
+    raise NotImplementedError()
+
+  def _GetArchivedJSONResults(self):
+    """Download JSON file that only contains test
+    name list from test-results server. This is for generating incremental
+    JSON so the file generated has info for tests that failed before but
+    pass or are skipped from current run.
+
+    Returns (archived_results, error) tuple where error is None if results
+    were successfully read.
+    """
+    results_json = {}
+    old_results = None
+    error = None
+
+    if not self._test_results_server:
+      return {}, None
+
+    results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+                        (urllib2.quote(self._test_results_server),
+                         urllib2.quote(self._builder_name),
+                         self.RESULTS_FILENAME,
+                         urllib2.quote(self._test_type),
+                         urllib2.quote(self._master_name)))
+
+    # pylint: disable=redefined-variable-type
+    try:
+      # FIXME: We should talk to the network via a Host object.
+      results_file = urllib2.urlopen(results_file_url)
+      old_results = results_file.read()
+    except urllib2.HTTPError as http_error:
+      # A non-4xx status code means the bot is hosed for some reason
+      # and we can't grab the results.json file off of it.
+      if http_error.code < 400 and http_error.code >= 500:
+        error = http_error
+    except urllib2.URLError as url_error:
+      error = url_error
+    # pylint: enable=redefined-variable-type
+
+    if old_results:
+      # Strip the prefix and suffix so we can get the actual JSON object.
+      old_results = StripJSONWrapper(old_results)
+
+      try:
+        results_json = json.loads(old_results)
+      except Exception: # pylint: disable=broad-except
+        _log.debug('results.json was not valid JSON. Clobbering.')
+        # The JSON file is not valid JSON. Just clobber the results.
+        results_json = {}
+    else:
+      _log.debug('Old JSON results do not exist. Starting fresh.')
+      results_json = {}
+
+    return results_json, error
+
+  def _InsertFailureSummaries(self, results_for_builder):
+    """Inserts aggregate pass/failure statistics into the JSON.
+    This method reads self._test_results and generates
+    FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+    """
+    # Insert the number of tests that failed or skipped.
+    fixable_count = len([r for r in self._test_results if r.Fixable()])
+    self._InsertItemIntoRawList(results_for_builder,
+                                fixable_count, self.FIXABLE_COUNT)
+
+    # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+    entry = {}
+    for test_name in self._test_results_map.iterkeys():
+      result_char = self._GetModifierChar(test_name)
+      entry[result_char] = entry.get(result_char, 0) + 1
+
+    # Insert the pass/skip/failure summary dictionary.
+    self._InsertItemIntoRawList(results_for_builder, entry,
+                                self.FIXABLE)
+
+    # Insert the number of all the tests that are supposed to pass.
+    all_test_count = len(self._test_results)
+    self._InsertItemIntoRawList(results_for_builder,
+                                all_test_count, self.ALL_FIXABLE_COUNT)
+
+  def _InsertItemIntoRawList(self, results_for_builder, item, key):
+    """Inserts the item into the list with the given key in the results for
+    this builder. Creates the list if no such list exists.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+      item: Number or string to insert into the list.
+      key: Key in results_for_builder for the list to insert into.
+    """
+    if key in results_for_builder:
+      raw_list = results_for_builder[key]
+    else:
+      raw_list = []
+
+    raw_list.insert(0, item)
+    raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+    results_for_builder[key] = raw_list
+
+  def _InsertItemRunLengthEncoded(self, item, encoded_results):
+    """Inserts the item into the run-length encoded results.
+
+    Args:
+      item: String or number to insert.
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    if len(encoded_results) and item == encoded_results[0][1]:
+      num_results = encoded_results[0][0]
+      if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        encoded_results[0][0] = num_results + 1
+    else:
+      # Use a list instead of a class for the run-length encoding since
+      # we want the serialized form to be concise.
+      encoded_results.insert(0, [1, item])
+
+  def _InsertGenericMetaData(self, results_for_builder):
+    """ Inserts generic metadata (such as version number, current time etc)
+    into the JSON.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for
+          a single builder.
+    """
+    self._InsertItemIntoRawList(results_for_builder,
+                                self._build_number, self.BUILD_NUMBERS)
+
+    # Include SVN revisions for the given repositories.
+    for (name, path) in self._svn_repositories:
+      # Note: for JSON file's backward-compatibility we use 'chrome' rather
+      # than 'chromium' here.
+      lowercase_name = name.lower()
+      if lowercase_name == 'chromium':
+        lowercase_name = 'chrome'
+      self._InsertItemIntoRawList(results_for_builder,
+                                  self._GetSVNRevision(path),
+                                  lowercase_name + 'Revision')
+
+    self._InsertItemIntoRawList(results_for_builder,
+                                int(time.time()),
+                                self.TIME)
+
+  def _InsertTestTimeAndResult(self, test_name, tests):
+    """ Insert a test item with its results to the given tests dictionary.
+
+    Args:
+      tests: Dictionary containing test result entries.
+    """
+
+    result = self._get_result_char(test_name)
+    test_time = self._GetTestTiming(test_name)
+
+    this_test = tests
+    for segment in test_name.split('/'):
+      if segment not in this_test:
+        this_test[segment] = {}
+      this_test = this_test[segment]
+
+    if not len(this_test):
+      self._PopulateResultsAndTimesJSON(this_test)
+
+    if self.RESULTS in this_test:
+      self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+    else:
+      this_test[self.RESULTS] = [[1, result]]
+
+    if self.TIMES in this_test:
+      self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+    else:
+      this_test[self.TIMES] = [[1, test_time]]
+
+  def _ConvertJSONToCurrentVersion(self, results_json):
+    """If the JSON does not match the current version, converts it to the
+    current version and adds in the new version number.
+    """
+    if self.VERSION_KEY in results_json:
+      archive_version = results_json[self.VERSION_KEY]
+      if archive_version == self.VERSION:
+        return
+    else:
+      archive_version = 3
+
+    # version 3->4
+    if archive_version == 3:
+      for results in results_json.values():
+        self._ConvertTestsToTrie(results)
+
+    results_json[self.VERSION_KEY] = self.VERSION
+
+  def _ConvertTestsToTrie(self, results):
+    if not self.TESTS in results:
+      return
+
+    test_results = results[self.TESTS]
+    test_results_trie = {}
+    for test in test_results.iterkeys():
+      single_test_result = test_results[test]
+      AddPathToTrie(test, single_test_result, test_results_trie)
+
+    results[self.TESTS] = test_results_trie
+
+  def _PopulateResultsAndTimesJSON(self, results_and_times):
+    results_and_times[self.RESULTS] = []
+    results_and_times[self.TIMES] = []
+    return results_and_times
+
+  def _CreateResultsForBuilderJSON(self):
+    results_for_builder = {}
+    results_for_builder[self.TESTS] = {}
+    return results_for_builder
+
+  def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+    """Removes items from the run-length encoded list after the final
+    item that exceeds the max number of builds to track.
+
+    Args:
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    num_builds = 0
+    index = 0
+    for result in encoded_list:
+      num_builds = num_builds + result[0]
+      index = index + 1
+      if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        return encoded_list[:index]
+    return encoded_list
+
+  def _NormalizeResultsJSON(self, test, test_name, tests):
+    """ Prune tests where all runs pass or tests that no longer exist and
+    truncate all results to maxNumberOfBuilds.
+
+    Args:
+      test: ResultsAndTimes object for this test.
+      test_name: Name of the test.
+      tests: The JSON object with all the test results for this builder.
+    """
+    test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.RESULTS])
+    test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.TIMES])
+
+    is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+                                           self.PASS_RESULT)
+    is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+                                              self.NO_DATA_RESULT)
+    max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+    # Remove all passes/no-data from the results to reduce noise and
+    # filesize. If a test passes every run, but takes > MIN_TIME to run,
+    # don't throw away the data.
+    if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+      del tests[test_name]
+
+  # method could be a function pylint: disable=R0201
+  def _IsResultsAllOfType(self, results, result_type):
+    """Returns whether all the results are of the given type
+    (e.g. all passes)."""
+    return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+  def __init__(self, url, timeout_seconds):
+    self._url = url
+    self._timeout_seconds = timeout_seconds
+
+  def UploadAsMultipartFormData(self, files, attrs):
+    file_objs = []
+    for filename, path in files:
+      with file(path, 'rb') as fp:
+        file_objs.append(('file', filename, fp.read()))
+
+    # FIXME: We should use the same variable names for the formal and actual
+    # parameters.
+    content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+    return self._UploadData(content_type, data)
+
+  def _UploadData(self, content_type, data):
+    start = time.time()
+    end = start + self._timeout_seconds
+    while time.time() < end:
+      try:
+        request = urllib2.Request(self._url, data,
+                                  {'Content-Type': content_type})
+        return urllib2.urlopen(request)
+      except urllib2.HTTPError as e:
+        _log.warn("Received HTTP status %s loading \"%s\".  "
+                  'Retrying in 10 seconds...', e.code, e.filename)
+        time.sleep(10)
+
+
+def _GetMIMEType(filename):
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+
+  for key, value in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  for key, filename, value in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; '
+                 'filename="%s"' % (key, filename))
+    lines.append('Content-Type: %s' % _GetMIMEType(filename))
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
diff --git a/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
new file mode 100644
index 0000000..d6aee05
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.results.flakiness_dashboard import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+  def setUp(self):
+    self.builder_name = 'DUMMY_BUILDER_NAME'
+    self.build_name = 'DUMMY_BUILD_NAME'
+    self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+    # For archived results.
+    self._json = None
+    self._num_runs = 0
+    self._tests_set = set([])
+    self._test_timings = {}
+    self._failed_count_map = {}
+
+    self._PASS_count = 0
+    self._DISABLED_count = 0
+    self._FLAKY_count = 0
+    self._FAILS_count = 0
+    self._fixable_count = 0
+
+    self._orig_write_json = json_results_generator.WriteJSON
+
+    # unused arguments ... pylint: disable=W0613
+    def _WriteJSONStub(json_object, file_path, callback=None):
+      pass
+
+    json_results_generator.WriteJSON = _WriteJSONStub
+
+  def tearDown(self):
+    json_results_generator.WriteJSON = self._orig_write_json
+
+  def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+    tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+    DISABLED_tests = set([t for t in tests_set
+                          if t.startswith('DISABLED_')])
+    FLAKY_tests = set([t for t in tests_set
+                       if t.startswith('FLAKY_')])
+    FAILS_tests = set([t for t in tests_set
+                       if t.startswith('FAILS_')])
+    PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+    failed_tests = set(failed_tests_list) - DISABLED_tests
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    test_timings = {}
+    i = 0
+    for test in tests_set:
+      test_timings[test] = float(self._num_runs * 100 + i)
+      i += 1
+
+    test_results_map = dict()
+    for test in tests_set:
+      test_results_map[test] = json_results_generator.TestResult(
+          test, failed=(test in failed_tests),
+          elapsed_time=test_timings[test])
+
+    generator = json_results_generator.JSONResultsGeneratorBase(
+        self.builder_name, self.build_name, self.build_number,
+        '',
+        None,   # don't fetch past json results archive
+        test_results_map)
+
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    # Test incremental json results
+    incremental_json = generator.GetJSON()
+    self._VerifyJSONResults(
+        tests_set,
+        test_timings,
+        failed_count_map,
+        len(PASS_tests),
+        len(DISABLED_tests),
+        len(FLAKY_tests),
+        len(DISABLED_tests | failed_tests),
+        incremental_json,
+        1)
+
+    # We don't verify the results here, but at least we make sure the code
+    # runs without errors.
+    generator.GenerateJSONOutput()
+    generator.GenerateTimesMSFile()
+
+  def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+                         PASS_count, DISABLED_count, FLAKY_count,
+                         fixable_count, json_obj, num_runs):
+    # Aliasing to a short name for better access to its constants.
+    JRG = json_results_generator.JSONResultsGeneratorBase
+
+    self.assertIn(JRG.VERSION_KEY, json_obj)
+    self.assertIn(self.builder_name, json_obj)
+
+    buildinfo = json_obj[self.builder_name]
+    self.assertIn(JRG.FIXABLE, buildinfo)
+    self.assertIn(JRG.TESTS, buildinfo)
+    self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+    self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+    if tests_set or DISABLED_count:
+      fixable = {}
+      for fixable_items in buildinfo[JRG.FIXABLE]:
+        for (result_type, count) in fixable_items.iteritems():
+          if result_type in fixable:
+            fixable[result_type] = fixable[result_type] + count
+          else:
+            fixable[result_type] = count
+
+      if PASS_count:
+        self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+      else:
+        self.assertTrue(JRG.PASS_RESULT not in fixable or
+                        fixable[JRG.PASS_RESULT] == 0)
+      if DISABLED_count:
+        self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+      else:
+        self.assertTrue(JRG.SKIP_RESULT not in fixable or
+                        fixable[JRG.SKIP_RESULT] == 0)
+      if FLAKY_count:
+        self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+      else:
+        self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+                        fixable[JRG.FLAKY_RESULT] == 0)
+
+    if failed_count_map:
+      tests = buildinfo[JRG.TESTS]
+      for test_name in failed_count_map.iterkeys():
+        test = self._FindTestInTrie(test_name, tests)
+
+        failed = 0
+        for result in test[JRG.RESULTS]:
+          if result[1] == JRG.FAIL_RESULT:
+            failed += result[0]
+        self.assertEqual(failed_count_map[test_name], failed)
+
+        timing_count = 0
+        for timings in test[JRG.TIMES]:
+          if timings[1] == test_timings[test_name]:
+            timing_count = timings[0]
+        self.assertEqual(1, timing_count)
+
+    if fixable_count:
+      self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+  def _FindTestInTrie(self, path, trie):
+    nodes = path.split('/')
+    sub_trie = trie
+    for node in nodes:
+      self.assertIn(node, sub_trie)
+      sub_trie = sub_trie[node]
+    return sub_trie
+
+  def testJSONGeneration(self):
+    self._TestJSONGeneration([], [])
+    self._TestJSONGeneration(['A1', 'B1'], [])
+    self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+    self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+    self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+    self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+    self._TestJSONGeneration(
+        ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+        ['FAILS_D6'])
+
+    # Generate JSON with the same test sets. (Both incremental results and
+    # archived results must be updated appropriately.)
+    self._TestJSONGeneration(
+        ['A', 'FLAKY_B', 'DISABLED_C'],
+        ['FAILS_D', 'FLAKY_E'])
+    self._TestJSONGeneration(
+        ['A', 'DISABLED_C', 'FLAKY_E'],
+        ['FLAKY_B', 'FAILS_D'])
+    self._TestJSONGeneration(
+        ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+        ['A', 'FLAKY_E'])
+
+  def testHierarchicalJSNGeneration(self):
+    # FIXME: Re-work tests to be more comprehensible and comprehensive.
+    self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+  def testTestTimingsTrie(self):
+    individual_test_timings = []
+    individual_test_timings.append(
+        json_results_generator.TestResult(
+            'foo/bar/baz.html',
+            elapsed_time=1.2))
+    individual_test_timings.append(
+        json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+    trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+    expected_trie = {
+        'bar.html': 0,
+        'foo': {
+            'bar': {
+                'baz.html': 1200,
+            }
+        }
+    }
+
+    self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000..b68a898
--- /dev/null
+++ b/src/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _GetModifierChar(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _GetSVNRevision(self, in_directory):
+    """Returns the git/svn revision for the given directory.
+
+    Args:
+      in_directory: The directory relative to src.
+    """
+    def _is_git_directory(in_directory):
+      """Returns true if the given directory is in a git repository.
+
+      Args:
+        in_directory: The directory path to be tested.
+      """
+      if os.path.exists(os.path.join(in_directory, '.git')):
+        return True
+      parent = os.path.dirname(in_directory)
+      if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory:
+        return False
+      return _is_git_directory(parent)
+
+    in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory)
+
+    if not os.path.exists(os.path.join(in_directory, '.svn')):
+      if _is_git_directory(in_directory):
+        return repo_utils.GetGitHeadSHA1(in_directory)
+      else:
+        return ''
+
+    output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+    try:
+      dom = xml.dom.minidom.parseString(output)
+      return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+    except xml.parsers.expat.ExpatError:
+      return ''
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._master_name = os.environ.get('BUILDBOT_MASTERNAME')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+    self._build_name = None
+
+    if not self._build_number or not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    upstream = (tests_type != 'Chromium_Android_Instrumentation')
+    if not upstream:
+      self._build_name = 'chromium-android'
+      buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+      if not buildbot_branch:
+        buildbot_branch = 'master'
+      else:
+        # Ensure there's no leading "origin/"
+        buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+      self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    # TODO(frankf): Differentiate between fail/crash/timeouts.
+    conversion_map = [
+        (test_results.GetPass(), False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.GetFail(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetCrash(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetTimeout(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetUnknown(), True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.GetName(),
+            failed=failed,
+            elapsed_time=single_test_result.GetDuration() / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.GetName()] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.GenerateJSONOutput()
+      results_generator.GenerateTimesMSFile()
+      results_generator.UploadJSONFiles(json_files)
+    except Exception as e: # pylint: disable=broad-except
+      logging.error("Uploading results to test server failed: %s.", e)
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    results: test results.
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/src/build/android/pylib/results/json_results.py b/src/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000..9b3bcb5
--- /dev/null
+++ b/src/build/android/pylib/results/json_results.py
@@ -0,0 +1,229 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import json
+import logging
+import time
+
+import six
+
+from pylib.base import base_test_result
+
+def GenerateResultsDict(test_run_results, global_tags=None):
+  """Create a results dict from |test_run_results| suitable for writing to JSON.
+  Args:
+    test_run_results: a list of base_test_result.TestRunResults objects.
+  Returns:
+    A results dict that mirrors the one generated by
+      base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+  """
+  # Example json output.
+  # {
+  #   "global_tags": [],
+  #   "all_tests": [
+  #     "test1",
+  #     "test2",
+  #    ],
+  #   "disabled_tests": [],
+  #   "per_iteration_data": [
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #         ...
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #         ...
+  #       ],
+  #     },
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #     },
+  #     ...
+  #   ],
+  # }
+
+  all_tests = set()
+  per_iteration_data = []
+  test_run_links = {}
+
+  for test_run_result in test_run_results:
+    iteration_data = collections.defaultdict(list)
+    if isinstance(test_run_result, list):
+      results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+      for tr in test_run_result:
+        test_run_links.update(tr.GetLinks())
+
+    else:
+      results_iterable = test_run_result.GetAll()
+      test_run_links.update(test_run_result.GetLinks())
+
+    for r in results_iterable:
+      result_dict = {
+          'status': r.GetType(),
+          'elapsed_time_ms': r.GetDuration(),
+          'output_snippet': six.ensure_text(r.GetLog(), errors='replace'),
+          'losless_snippet': True,
+          'output_snippet_base64': '',
+          'links': r.GetLinks(),
+      }
+      iteration_data[r.GetName()].append(result_dict)
+
+    all_tests = all_tests.union(set(six.iterkeys(iteration_data)))
+    per_iteration_data.append(iteration_data)
+
+  return {
+    'global_tags': global_tags or [],
+    'all_tests': sorted(list(all_tests)),
+    # TODO(jbudorick): Add support for disabled tests within base_test_result.
+    'disabled_tests': [],
+    'per_iteration_data': per_iteration_data,
+    'links': test_run_links,
+  }
+
+
+def GenerateJsonTestResultFormatDict(test_run_results, interrupted):
+  """Create a results dict from |test_run_results| suitable for writing to JSON.
+
+  Args:
+    test_run_results: a list of base_test_result.TestRunResults objects.
+    interrupted: True if tests were interrupted, e.g. timeout listing tests
+  Returns:
+    A results dict that mirrors the standard JSON Test Results Format.
+  """
+
+  tests = {}
+  counts = {'PASS': 0, 'FAIL': 0}
+
+  for test_run_result in test_run_results:
+    if isinstance(test_run_result, list):
+      results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+    else:
+      results_iterable = test_run_result.GetAll()
+
+    for r in results_iterable:
+      element = tests
+      for key in r.GetName().split('.'):
+        if key not in element:
+          element[key] = {}
+        element = element[key]
+
+      element['expected'] = 'PASS'
+
+      result = 'PASS' if r.GetType(
+      ) == base_test_result.ResultType.PASS else 'FAIL'
+
+      if 'actual' in element:
+        element['actual'] += ' ' + result
+      else:
+        counts[result] += 1
+        element['actual'] = result
+        if result == 'FAIL':
+          element['is_unexpected'] = True
+
+      if r.GetDuration() != 0:
+        element['time'] = r.GetDuration()
+
+  # Fill in required fields.
+  return {
+      'interrupted': interrupted,
+      'num_failures_by_type': counts,
+      'path_delimiter': '.',
+      'seconds_since_epoch': time.time(),
+      'tests': tests,
+      'version': 3,
+  }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path, global_tags=None,
+                            **kwargs):
+  """Write |test_run_result| to JSON.
+
+  This emulates the format of the JSON emitted by
+  base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(json.dumps(
+        GenerateResultsDict(test_run_result, global_tags=global_tags),
+        **kwargs))
+    logging.info('Generated json results file at %s', file_path)
+
+
+def GenerateJsonTestResultFormatFile(test_run_result, interrupted, file_path,
+                                     **kwargs):
+  """Write |test_run_result| to JSON.
+
+  This uses the official Chromium Test Results Format.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    interrupted: True if tests were interrupted, e.g. timeout listing tests
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(
+        json.dumps(
+            GenerateJsonTestResultFormatDict(test_run_result, interrupted),
+            **kwargs))
+    logging.info('Generated json results file at %s', file_path)
+
+
+def ParseResultsFromJson(json_results):
+  """Creates a list of BaseTestResult objects from JSON.
+
+  Args:
+    json_results: A JSON dict in the format created by
+                  GenerateJsonResultsFile.
+  """
+
+  def string_as_status(s):
+    if s in base_test_result.ResultType.GetTypes():
+      return s
+    return base_test_result.ResultType.UNKNOWN
+
+  results_list = []
+  testsuite_runs = json_results['per_iteration_data']
+  for testsuite_run in testsuite_runs:
+    for test, test_runs in testsuite_run.iteritems():
+      results_list.extend(
+          [base_test_result.BaseTestResult(test,
+                                           string_as_status(tr['status']),
+                                           duration=tr['elapsed_time_ms'])
+          for tr in test_runs])
+  return results_list
diff --git a/src/build/android/pylib/results/json_results_test.py b/src/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000..6647331
--- /dev/null
+++ b/src/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+  def testGenerateResultsDict_passedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_skippedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.SKIP)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+  def testGenerateResultsDict_failedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('FAILURE', test_iteration_result['status'])
+
+  def testGenerateResultsDict_duration(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('elapsed_time_ms' in test_iteration_result)
+    self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+  def testGenerateResultsDict_multipleResults(self):
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName2', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result1)
+    all_results.AddResult(result2)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName1', 'test.package.TestName2'],
+        results_dict['all_tests'])
+
+    self.assertTrue('per_iteration_data' in results_dict)
+    iterations = results_dict['per_iteration_data']
+    self.assertEquals(1, len(iterations))
+
+    expected_tests = set([
+        'test.package.TestName1',
+        'test.package.TestName2',
+    ])
+
+    for test_name, iteration_result in iterations[0].iteritems():
+      self.assertTrue(test_name in expected_tests)
+      expected_tests.remove(test_name)
+      self.assertEquals(1, len(iteration_result))
+
+      test_iteration_result = iteration_result[0]
+      self.assertTrue('status' in test_iteration_result)
+      self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_passOnRetry(self):
+    raw_results = []
+
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.FAIL)
+    run_results1 = base_test_result.TestRunResults()
+    run_results1.AddResult(result1)
+    raw_results.append(run_results1)
+
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    run_results2 = base_test_result.TestRunResults()
+    run_results2.AddResult(result2)
+    raw_results.append(run_results2)
+
+    results_dict = json_results.GenerateResultsDict([raw_results])
+    self.assertEquals(['test.package.TestName1'], results_dict['all_tests'])
+
+    # Check that there's only one iteration.
+    self.assertIn('per_iteration_data', results_dict)
+    iterations = results_dict['per_iteration_data']
+    self.assertEquals(1, len(iterations))
+
+    # Check that test.package.TestName1 is the only test in the iteration.
+    self.assertEquals(1, len(iterations[0]))
+    self.assertIn('test.package.TestName1', iterations[0])
+
+    # Check that there are two results for test.package.TestName1.
+    actual_test_results = iterations[0]['test.package.TestName1']
+    self.assertEquals(2, len(actual_test_results))
+
+    # Check that the first result is a failure.
+    self.assertIn('status', actual_test_results[0])
+    self.assertEquals('FAILURE', actual_test_results[0]['status'])
+
+    # Check that the second result is a success.
+    self.assertIn('status', actual_test_results[1])
+    self.assertEquals('SUCCESS', actual_test_results[1]['status'])
+
+  def testGenerateResultsDict_globalTags(self):
+    raw_results = []
+    global_tags = ['UNRELIABLE_RESULTS']
+
+    results_dict = json_results.GenerateResultsDict(
+        [raw_results], global_tags=global_tags)
+    self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
+
+  def testGenerateResultsDict_loslessSnippet(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+    log = 'blah-blah'
+    result.SetLog(log)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict([all_results])
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('losless_snippet' in test_iteration_result)
+    self.assertTrue(test_iteration_result['losless_snippet'])
+    self.assertTrue('output_snippet' in test_iteration_result)
+    self.assertEquals(log, test_iteration_result['output_snippet'])
+    self.assertTrue('output_snippet_base64' in test_iteration_result)
+    self.assertEquals('', test_iteration_result['output_snippet_base64'])
+
+  def testGenerateJsonTestResultFormatDict_passedResult(self):
+    result = base_test_result.BaseTestResult('test.package.TestName',
+                                             base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict([all_results],
+                                                                 False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'PASS', results_dict['tests']['test']['package']['TestName']['actual'])
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(1, results_dict['num_failures_by_type']['PASS'])
+    self.assertEquals(0, results_dict['num_failures_by_type']['FAIL'])
+
+  def testGenerateJsonTestResultFormatDict_failedResult(self):
+    result = base_test_result.BaseTestResult('test.package.TestName',
+                                             base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict([all_results],
+                                                                 False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'FAIL', results_dict['tests']['test']['package']['TestName']['actual'])
+    self.assertEquals(
+        True,
+        results_dict['tests']['test']['package']['TestName']['is_unexpected'])
+    self.assertEquals(2, len(results_dict['num_failures_by_type']))
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(0, results_dict['num_failures_by_type']['PASS'])
+    self.assertEquals(1, results_dict['num_failures_by_type']['FAIL'])
+
+  def testGenerateJsonTestResultFormatDict_failedResultWithRetry(self):
+    result_1 = base_test_result.BaseTestResult('test.package.TestName',
+                                               base_test_result.ResultType.FAIL)
+    run_results_1 = base_test_result.TestRunResults()
+    run_results_1.AddResult(result_1)
+
+    # Simulate a second retry with failure.
+    result_2 = base_test_result.BaseTestResult('test.package.TestName',
+                                               base_test_result.ResultType.FAIL)
+    run_results_2 = base_test_result.TestRunResults()
+    run_results_2.AddResult(result_2)
+
+    all_results = [run_results_1, run_results_2]
+
+    results_dict = json_results.GenerateJsonTestResultFormatDict(
+        all_results, False)
+    self.assertEquals(1, len(results_dict['tests']))
+    self.assertEquals(1, len(results_dict['tests']['test']))
+    self.assertEquals(1, len(results_dict['tests']['test']['package']))
+    self.assertEquals(
+        'PASS',
+        results_dict['tests']['test']['package']['TestName']['expected'])
+    self.assertEquals(
+        'FAIL FAIL',
+        results_dict['tests']['test']['package']['TestName']['actual'])
+    self.assertEquals(
+        True,
+        results_dict['tests']['test']['package']['TestName']['is_unexpected'])
+
+    # Note: technically a missing entry counts as zero.
+    self.assertEquals(2, len(results_dict['num_failures_by_type']))
+    self.assertEquals(0, results_dict['num_failures_by_type']['PASS'])
+
+    # According to the spec: If a test was run more than once, only the first
+    # invocation's result is included in the totals.
+    self.assertEquals(1, results_dict['num_failures_by_type']['FAIL'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/results/presentation/__init__.py b/src/build/android/pylib/results/presentation/__init__.py
new file mode 100644
index 0000000..a22a6ee
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/android/pylib/results/presentation/javascript/main_html.js b/src/build/android/pylib/results/presentation/javascript/main_html.js
new file mode 100644
index 0000000..3d94663
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/javascript/main_html.js
@@ -0,0 +1,193 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+function getArguments() {
+  // Returns the URL arguments as a dictionary.
+  args = {}
+  var s = location.search;
+  if (s) {
+    var vals = s.substring(1).split('&');
+    for (var i = 0; i < vals.length; i++) {
+      var pair = vals[i].split('=');
+      args[pair[0]] = pair[1];
+    }
+  }
+  return args;
+}
+
+function showSuiteTable(show_the_table) {
+    document.getElementById('suite-table').style.display = (
+        show_the_table ? 'table' : 'none');
+}
+
+function showTestTable(show_the_table) {
+    document.getElementById('test-table').style.display = (
+        show_the_table ? 'table' : 'none');
+}
+
+function showTestsOfOneSuiteOnly(suite_name) {
+  setTitle('Test Results of Suite: ' + suite_name)
+  show_all = (suite_name == 'TOTAL')
+  var testTableBlocks = document.getElementById('test-table')
+      .getElementsByClassName('row_block');
+  Array.prototype.slice.call(testTableBlocks)
+      .forEach(function(testTableBlock) {
+        if (!show_all) {
+          var table_block_in_suite = (testTableBlock.firstElementChild
+            .firstElementChild.firstElementChild.innerHTML)
+            .startsWith(suite_name);
+          if (!table_block_in_suite) {
+            testTableBlock.style.display = 'none';
+            return;
+          }
+        }
+        testTableBlock.style.display = 'table-row-group';
+      });
+  showTestTable(true);
+  showSuiteTable(false);
+  window.scrollTo(0, 0);
+}
+
+function showTestsOfOneSuiteOnlyWithNewState(suite_name) {
+  showTestsOfOneSuiteOnly(suite_name);
+  history.pushState({suite: suite_name}, suite_name, '');
+}
+
+function showSuiteTableOnly() {
+  setTitle('Suites Summary')
+  showTestTable(false);
+  showSuiteTable(true);
+  window.scrollTo(0, 0);
+}
+
+function showSuiteTableOnlyWithReplaceState() {
+  showSuiteTableOnly();
+  history.replaceState({}, 'suite_table', '');
+}
+
+function setBrowserBackButtonLogic() {
+  window.onpopstate = function(event) {
+    if (!event.state || !event.state.suite) {
+      showSuiteTableOnly();
+    } else {
+      showTestsOfOneSuiteOnly(event.state.suite);
+    }
+  };
+}
+
+function setTitle(title) {
+  document.getElementById('summary-header').textContent = title;
+}
+
+function sortByColumn(head) {
+  var table = head.parentNode.parentNode.parentNode;
+  var rowBlocks = Array.prototype.slice.call(
+      table.getElementsByTagName('tbody'));
+
+  // Determine whether to asc or desc and set arrows.
+  var headers = head.parentNode.getElementsByTagName('th');
+  var headIndex = Array.prototype.slice.call(headers).indexOf(head);
+  var asc = -1;
+  for (var i = 0; i < headers.length; i++) {
+    if (headers[i].dataset.ascSorted != 0) {
+      if (headers[i].dataset.ascSorted == 1) {
+          headers[i].getElementsByClassName('up')[0]
+              .style.display = 'none';
+      } else {
+        headers[i].getElementsByClassName('down')[0]
+            .style.display = 'none';
+      }
+      if (headers[i] == head) {
+        asc = headers[i].dataset.ascSorted * -1;
+      } else {
+        headers[i].dataset.ascSorted = 0;
+      }
+      break;
+    }
+  }
+  headers[headIndex].dataset.ascSorted = asc;
+  if (asc == 1) {
+      headers[headIndex].getElementsByClassName('up')[0]
+          .style.display = 'inline';
+  } else {
+      headers[headIndex].getElementsByClassName('down')[0]
+          .style.display = 'inline';
+  }
+
+  // Sort the array by the specified column number (col) and order (asc).
+  rowBlocks.sort(function (a, b) {
+    if (a.style.display == 'none') {
+      return -1;
+    } else if (b.style.display == 'none') {
+      return 1;
+    }
+    var a_rows = Array.prototype.slice.call(a.children);
+    var b_rows = Array.prototype.slice.call(b.children);
+    if (head.className == "text") {
+      // If sorting by text, we only compare the entry on the first row.
+      var aInnerHTML = a_rows[0].children[headIndex].innerHTML;
+      var bInnerHTML = b_rows[0].children[headIndex].innerHTML;
+      return (aInnerHTML == bInnerHTML) ? 0 : (
+          (aInnerHTML > bInnerHTML) ? asc : -1 * asc);
+    } else if (head.className == "number") {
+      // If sorting by number, for example, duration,
+      // we will sum up the durations of different test runs
+      // for one specific test case and sort by the sum.
+      var avalue = 0;
+      var bvalue = 0;
+      a_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        avalue += Number(row.children[index].innerHTML);
+      });
+      b_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        bvalue += Number(row.children[index].innerHTML);
+      });
+    } else if (head.className == "flaky") {
+      // Flakiness = (#total - #success - #skipped) / (#total - #skipped)
+      var a_success_or_skipped = 0;
+      var a_skipped = 0;
+      var b_success_or_skipped = 0;
+      var b_skipped = 0;
+      a_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        var status = row.children[index].innerHTML.trim();
+        if (status == 'SUCCESS') {
+          a_success_or_skipped += 1;
+        }
+        if (status == 'SKIPPED') {
+          a_success_or_skipped += 1;
+          a_skipped += 1;
+        }
+      });
+      b_rows.forEach(function (row, i) {
+        var index = (i > 0) ? headIndex - 1 : headIndex;
+        var status = row.children[index].innerHTML.trim();
+        if (status == 'SUCCESS') {
+          b_success_or_skipped += 1;
+        }
+        if (status == 'SKIPPED') {
+          b_success_or_skipped += 1;
+          b_skipped += 1;
+        }
+      });
+      var atotal_minus_skipped = a_rows.length - a_skipped;
+      var btotal_minus_skipped = b_rows.length - b_skipped;
+
+      var avalue = ((atotal_minus_skipped == 0) ? -1 :
+          (a_rows.length - a_success_or_skipped) / atotal_minus_skipped);
+      var bvalue = ((btotal_minus_skipped == 0) ? -1 :
+          (b_rows.length - b_success_or_skipped) / btotal_minus_skipped);
+    }
+    return asc * (avalue - bvalue);
+  });
+
+  for (var i = 0; i < rowBlocks.length; i++) {
+    table.appendChild(rowBlocks[i]);
+  }
+}
+
+function sortSuiteTableByFailedTestCases() {
+  sortByColumn(document.getElementById('number_fail_tests'));
+}
diff --git a/src/build/android/pylib/results/presentation/standard_gtest_merge.py b/src/build/android/pylib/results/presentation/standard_gtest_merge.py
new file mode 100755
index 0000000..58a2936
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -0,0 +1,173 @@
+#! /usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import sys
+
+
+def merge_shard_results(summary_json, jsons_to_merge):
+  """Reads JSON test output from all shards and combines them into one.
+
+  Returns dict with merged test output on success or None on failure. Emits
+  annotations.
+  """
+  try:
+    with open(summary_json) as f:
+      summary = json.load(f)
+  except (IOError, ValueError):
+    raise Exception('Summary json cannot be loaded.')
+
+  # Merge all JSON files together. Keep track of missing shards.
+  merged = {
+    'all_tests': set(),
+    'disabled_tests': set(),
+    'global_tags': set(),
+    'missing_shards': [],
+    'per_iteration_data': [],
+    'swarming_summary': summary,
+    'links': set()
+  }
+  for index, result in enumerate(summary['shards']):
+    if result is None:
+      merged['missing_shards'].append(index)
+      continue
+
+    # Author note: this code path doesn't trigger convert_to_old_format() in
+    # client/swarming.py, which means the state enum is saved in its string
+    # name form, not in the number form.
+    state = result.get('state')
+    if state == u'BOT_DIED':
+      print(
+          'Shard #%d had a Swarming internal failure' % index, file=sys.stderr)
+    elif state == u'EXPIRED':
+      print('There wasn\'t enough capacity to run your test', file=sys.stderr)
+    elif state == u'TIMED_OUT':
+      print('Test runtime exceeded allocated time'
+            'Either it ran for too long (hard timeout) or it didn\'t produce '
+            'I/O for an extended period of time (I/O timeout)',
+            file=sys.stderr)
+    elif state != u'COMPLETED':
+      print('Invalid Swarming task state: %s' % state, file=sys.stderr)
+
+    json_data, err_msg = load_shard_json(index, result.get('task_id'),
+                                         jsons_to_merge)
+    if json_data:
+      # Set-like fields.
+      for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+        merged[key].update(json_data.get(key), [])
+
+      # 'per_iteration_data' is a list of dicts. Dicts should be merged
+      # together, not the 'per_iteration_data' list itself.
+      merged['per_iteration_data'] = merge_list_of_dicts(
+          merged['per_iteration_data'], json_data.get('per_iteration_data', []))
+    else:
+      merged['missing_shards'].append(index)
+      print('No result was found: %s' % err_msg, file=sys.stderr)
+
+  # If some shards are missing, make it known. Continue parsing anyway. Step
+  # should be red anyway, since swarming.py return non-zero exit code in that
+  # case.
+  if merged['missing_shards']:
+    as_str = ', '.join([str(shard) for shard in merged['missing_shards']])
+    print('some shards did not complete: %s' % as_str, file=sys.stderr)
+    # Not all tests run, combined JSON summary can not be trusted.
+    merged['global_tags'].add('UNRELIABLE_RESULTS')
+
+  # Convert to jsonish dict.
+  for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+    merged[key] = sorted(merged[key])
+  return merged
+
+
+OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024  # 100 MB
+
+
+def load_shard_json(index, task_id, jsons_to_merge):
+  """Reads JSON output of the specified shard.
+
+  Args:
+    output_dir: The directory in which to look for the JSON output to load.
+    index: The index of the shard to load data for, this is for old api.
+    task_id: The directory of the shard to load data for, this is for new api.
+
+  Returns: A tuple containing:
+    * The contents of path, deserialized into a python object.
+    * An error string.
+    (exactly one of the tuple elements will be non-None).
+  """
+  matching_json_files = [
+      j for j in jsons_to_merge
+      if (os.path.basename(j) == 'output.json' and
+          (os.path.basename(os.path.dirname(j)) == str(index) or
+           os.path.basename(os.path.dirname(j)) == task_id))]
+
+  if not matching_json_files:
+    print('shard %s test output missing' % index, file=sys.stderr)
+    return (None, 'shard %s test output was missing' % index)
+  elif len(matching_json_files) > 1:
+    print('duplicate test output for shard %s' % index, file=sys.stderr)
+    return (None, 'shard %s test output was duplicated' % index)
+
+  path = matching_json_files[0]
+
+  try:
+    filesize = os.stat(path).st_size
+    if filesize > OUTPUT_JSON_SIZE_LIMIT:
+      print(
+          'output.json is %d bytes. Max size is %d' % (filesize,
+                                                       OUTPUT_JSON_SIZE_LIMIT),
+          file=sys.stderr)
+      return (None, 'shard %s test output exceeded the size limit' % index)
+
+    with open(path) as f:
+      return (json.load(f), None)
+  except (IOError, ValueError, OSError) as e:
+    print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
+    print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
+
+    return (None, 'shard %s test output was missing or invalid' % index)
+
+
+def merge_list_of_dicts(left, right):
+  """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
+  output = []
+  for i in xrange(max(len(left), len(right))):
+    left_dict = left[i] if i < len(left) else {}
+    right_dict = right[i] if i < len(right) else {}
+    merged_dict = left_dict.copy()
+    merged_dict.update(right_dict)
+    output.append(merged_dict)
+  return output
+
+
+def standard_gtest_merge(
+    output_json, summary_json, jsons_to_merge):
+
+  output = merge_shard_results(summary_json, jsons_to_merge)
+  with open(output_json, 'wb') as f:
+    json.dump(output, f)
+
+  return 0
+
+
+def main(raw_args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--summary-json')
+  parser.add_argument('-o', '--output-json', required=True)
+  parser.add_argument('jsons_to_merge', nargs='*')
+
+  args = parser.parse_args(raw_args)
+
+  return standard_gtest_merge(
+      args.output_json, args.summary_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/pylib/results/presentation/template/main.html b/src/build/android/pylib/results/presentation/template/main.html
new file mode 100644
index 0000000..e30d7d3
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/template/main.html
@@ -0,0 +1,93 @@
+<!DOCTYPE html>
+<html>
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+    <style>
+      body {
+        background-color: #fff;
+        color: #333;
+        font-family: Verdana, sans-serif;
+        font-size: 10px;
+        margin-left: 30px;
+        margin-right: 30px;
+        margin-top: 20px;
+        margin-bottom: 50px;
+        padding: 0;
+      }
+      table, th, td {
+        border: 1px solid black;
+        border-collapse: collapse;
+        text-align: center;
+      }
+      table, td {
+        padding: 0.1em 1em 0.1em 1em;
+      }
+      th {
+        cursor: pointer;
+        padding: 0.2em 1.5em 0.2em 1.5em;
+      }
+      table {
+        width: 100%;
+      }
+      .center {
+        text-align: center;
+      }
+      .left {
+        text-align: left;
+      }
+      a {
+        cursor: pointer;
+        text-decoration: underline;
+      }
+      a:link,a:visited,a:active {
+        color: #444;
+      }
+      .row_block:hover {
+        background-color: #F6F6F6;
+      }
+      .skipped, .success, .failure {
+        border-color: #000000;
+      }
+      .success {
+        color: #000;
+        background-color: #8d4;
+      }
+      .failure {
+        color: #000;
+        background-color: #e88;
+      }
+      .skipped {
+        color: #000;
+        background: #AADDEE;
+      }
+    </style>
+    <script type="text/javascript">
+      {% include "javascript/main_html.js" %}
+    </script>
+  </head>
+  <body>
+    <div>
+      <h2 id="summary-header"></h2>
+      {% for tb_value in tb_values %}
+        {% include 'template/table.html' %}
+      {% endfor %}
+    </div>
+  {% if feedback_url %}
+    </br>
+      <a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
+    </body>
+  {%- endif %}
+  <script>
+    sortSuiteTableByFailedTestCases();
+    showSuiteTableOnlyWithReplaceState();
+    // Enable sorting for each column of tables.
+    Array.prototype.slice.call(document.getElementsByTagName('th'))
+        .forEach(function(head) {
+            head.addEventListener(
+                "click",
+                function() { sortByColumn(head); });
+        }
+    );
+    setBrowserBackButtonLogic();
+  </script>
+</html>
diff --git a/src/build/android/pylib/results/presentation/template/table.html b/src/build/android/pylib/results/presentation/template/table.html
new file mode 100644
index 0000000..4240043
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/template/table.html
@@ -0,0 +1,60 @@
+<table id="{{tb_value.table_id}}" style="display:none;">
+  <thead class="heads">
+    <tr>
+      {% for cell in tb_value.table_headers -%}
+        <th class="{{cell.class}}" id="{{cell.data}}" data-asc-sorted=0>
+          {{cell.data}}
+          <span class="up" style="display:none;"> &#8593</span>
+          <span class="down" style="display:none;"> &#8595</span>
+        </th>
+      {%- endfor %}
+    </tr>
+  </thead>
+  {% for block in tb_value.table_row_blocks -%}
+    <tbody class="row_block">
+      {% for row in block -%}
+        <tr class="{{tb_value.table_id}}-body-row">
+          {% for cell in row -%}
+            {% if cell.rowspan -%}
+              <td rowspan="{{cell.rowspan}}" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+            {%- else -%}
+              <td rowspan="1" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+            {%- endif %}
+            {% if cell.cell_type == 'pre' -%}
+              <pre>{{cell.data}}</pre>
+            {%- elif cell.cell_type == 'links' -%}
+              {% for link in cell.links -%}
+                <a href="{{link.href}}" target="{{link.target}}">{{link.data}}</a>
+                {% if not loop.last -%}
+                  <br />
+                {%- endif %}
+              {%- endfor %}
+            {%- elif cell.cell_type == 'action' -%}
+              <a onclick="{{cell.action}}">{{cell.data}}</a>
+            {%- else -%}
+              {{cell.data}}
+            {%- endif %}
+            </td>
+          {%- endfor %}
+        </tr>
+      {%- endfor %}
+    </tbody>
+  {%- endfor %}
+  <tfoot>
+    <tr>
+        {% for cell in tb_value.table_footer -%}
+          <td class="{{tb_value.table_id}}-summary-column-{{loop.index0}} {{cell.class}}">
+            {% if cell.cell_type == 'links' -%}
+              {% for link in cell.links -%}
+                <a href="{{link.href}}" target="{{link.target}}"><b>{{link.data}}</b></a>
+              {%- endfor %}
+            {%- elif cell.cell_type == 'action' -%}
+              <a onclick="{{cell.action}}">{{cell.data}}</a>
+            {%- else -%}
+              <b>{{cell.data}}</b>
+            {%- endif %}
+          </td>
+        {%- endfor %}
+      </tr>
+  </tfoot>
+</table>
diff --git a/src/build/android/pylib/results/presentation/test_results_presentation.py b/src/build/android/pylib/results/presentation/test_results_presentation.py
new file mode 100755
index 0000000..33fae04
--- /dev/null
+++ b/src/build/android/pylib/results/presentation/test_results_presentation.py
@@ -0,0 +1,547 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import json
+import logging
+import tempfile
+import os
+import sys
+import urllib
+
+
+CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.abspath(os.path.join(
+    CURRENT_DIR, '..', '..', '..', '..', '..'))
+
+sys.path.append(os.path.join(BASE_DIR, 'build', 'android'))
+from pylib.results.presentation import standard_gtest_merge
+from pylib.utils import google_storage_helper  # pylint: disable=import-error
+
+sys.path.append(os.path.join(BASE_DIR, 'third_party'))
+import jinja2  # pylint: disable=import-error
+JINJA_ENVIRONMENT = jinja2.Environment(
+    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+    autoescape=True)
+
+
+def cell(data, html_class='center'):
+  """Formats table cell data for processing in jinja template."""
+  return {
+    'data': data,
+    'class': html_class,
+  }
+
+
+def pre_cell(data, html_class='center'):
+  """Formats table <pre> cell data for processing in jinja template."""
+  return {
+    'cell_type': 'pre',
+    'data': data,
+    'class': html_class,
+  }
+
+
+class LinkTarget(object):
+  # Opens the linked document in a new window or tab.
+  NEW_TAB = '_blank'
+  # Opens the linked document in the same frame as it was clicked.
+  CURRENT_TAB = '_self'
+
+
+def link(data, href, target=LinkTarget.CURRENT_TAB):
+  """Formats <a> tag data for processing in jinja template.
+
+  Args:
+    data: String link appears as on HTML page.
+    href: URL where link goes.
+    target: Where link should be opened (e.g. current tab or new tab).
+  """
+  return {
+    'data': data,
+    'href': href,
+    'target': target,
+  }
+
+
+def links_cell(links, html_class='center', rowspan=None):
+  """Formats table cell with links for processing in jinja template.
+
+  Args:
+    links: List of link dictionaries. Use |link| function to generate them.
+    html_class: Class for table cell.
+    rowspan: Rowspan HTML attribute.
+  """
+  return {
+    'cell_type': 'links',
+    'class': html_class,
+    'links': links,
+    'rowspan': rowspan,
+  }
+
+
+def action_cell(action, data, html_class):
+  """Formats table cell with javascript actions.
+
+  Args:
+    action: Javscript action.
+    data: Data in cell.
+    class: Class for table cell.
+  """
+  return {
+    'cell_type': 'action',
+    'action': action,
+    'data': data,
+    'class': html_class,
+  }
+
+
+def flakiness_dashbord_link(test_name, suite_name):
+  url_args = urllib.urlencode([
+      ('testType', suite_name),
+      ('tests', test_name)])
+  return ('https://test-results.appspot.com/'
+         'dashboards/flakiness_dashboard.html#%s' % url_args)
+
+
+def logs_cell(result, test_name, suite_name):
+  """Formats result logs data for processing in jinja template."""
+  link_list = []
+  result_link_dict = result.get('links', {})
+  result_link_dict['flakiness'] = flakiness_dashbord_link(
+      test_name, suite_name)
+  for name, href in sorted(result_link_dict.items()):
+    link_list.append(link(
+        data=name,
+        href=href,
+        target=LinkTarget.NEW_TAB))
+  if link_list:
+    return links_cell(link_list)
+  else:
+    return cell('(no logs)')
+
+
+def code_search(test, cs_base_url):
+  """Returns URL for test on codesearch."""
+  search = test.replace('#', '.')
+  return '%s/search/?q=%s&type=cs' % (cs_base_url, search)
+
+
+def status_class(status):
+  """Returns HTML class for test status."""
+  if not status:
+    return 'failure unknown'
+  status = status.lower()
+  if status not in ('success', 'skipped'):
+    return 'failure %s' % status
+  return status
+
+
+def create_test_table(results_dict, cs_base_url, suite_name):
+  """Format test data for injecting into HTML table."""
+
+  header_row = [
+    cell(data='test_name', html_class='text'),
+    cell(data='status', html_class='flaky'),
+    cell(data='elapsed_time_ms', html_class='number'),
+    cell(data='logs', html_class='text'),
+    cell(data='output_snippet', html_class='text'),
+  ]
+
+  test_row_blocks = []
+  for test_name, test_results in results_dict.iteritems():
+    test_runs = []
+    for index, result in enumerate(test_results):
+      if index == 0:
+        test_run = [links_cell(
+            links=[
+                link(href=code_search(test_name, cs_base_url),
+                     target=LinkTarget.NEW_TAB,
+                     data=test_name)],
+            rowspan=len(test_results),
+            html_class='left %s' % test_name
+        )]                                          # test_name
+      else:
+        test_run = []
+
+      test_run.extend([
+          cell(data=result['status'] or 'UNKNOWN',
+                                                    # status
+               html_class=('center %s' %
+                  status_class(result['status']))),
+          cell(data=result['elapsed_time_ms']),     # elapsed_time_ms
+          logs_cell(result, test_name, suite_name), # logs
+          pre_cell(data=result['output_snippet'],   # output_snippet
+                   html_class='left'),
+      ])
+      test_runs.append(test_run)
+    test_row_blocks.append(test_runs)
+  return header_row, test_row_blocks
+
+
+def create_suite_table(results_dict):
+  """Format test suite data for injecting into HTML table."""
+
+  SUCCESS_COUNT_INDEX = 1
+  FAIL_COUNT_INDEX = 2
+  ALL_COUNT_INDEX = 3
+  TIME_INDEX = 4
+
+  header_row = [
+    cell(data='suite_name', html_class='text'),
+    cell(data='number_success_tests', html_class='number'),
+    cell(data='number_fail_tests', html_class='number'),
+    cell(data='all_tests', html_class='number'),
+    cell(data='elapsed_time_ms', html_class='number'),
+  ]
+
+  footer_row = [
+    action_cell(
+          'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
+          'TOTAL',
+          'center'
+        ),         # TOTAL
+    cell(data=0),  # number_success_tests
+    cell(data=0),  # number_fail_tests
+    cell(data=0),  # all_tests
+    cell(data=0),  # elapsed_time_ms
+  ]
+
+  suite_row_dict = {}
+  for test_name, test_results in results_dict.iteritems():
+    # TODO(mikecase): This logic doesn't work if there are multiple test runs.
+    # That is, if 'per_iteration_data' has multiple entries.
+    # Since we only care about the result of the last test run.
+    result = test_results[-1]
+
+    suite_name = (test_name.split('#')[0] if '#' in test_name
+                  else test_name.split('.')[0])
+    if suite_name in suite_row_dict:
+      suite_row = suite_row_dict[suite_name]
+    else:
+      suite_row = [
+        action_cell(
+          'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
+          suite_name,
+          'left'
+        ),             # suite_name
+        cell(data=0),  # number_success_tests
+        cell(data=0),  # number_fail_tests
+        cell(data=0),  # all_tests
+        cell(data=0),  # elapsed_time_ms
+      ]
+
+    suite_row_dict[suite_name] = suite_row
+
+    suite_row[ALL_COUNT_INDEX]['data'] += 1
+    footer_row[ALL_COUNT_INDEX]['data'] += 1
+
+    if result['status'] == 'SUCCESS':
+      suite_row[SUCCESS_COUNT_INDEX]['data'] += 1
+      footer_row[SUCCESS_COUNT_INDEX]['data'] += 1
+    elif result['status'] != 'SKIPPED':
+      suite_row[FAIL_COUNT_INDEX]['data'] += 1
+      footer_row[FAIL_COUNT_INDEX]['data'] += 1
+
+    # Some types of crashes can have 'null' values for elapsed_time_ms.
+    if result['elapsed_time_ms'] is not None:
+      suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+      footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+
+  for suite in suite_row_dict.values():
+    if suite[FAIL_COUNT_INDEX]['data'] > 0:
+      suite[FAIL_COUNT_INDEX]['class'] += ' failure'
+    else:
+      suite[FAIL_COUNT_INDEX]['class'] += ' success'
+
+  if footer_row[FAIL_COUNT_INDEX]['data'] > 0:
+    footer_row[FAIL_COUNT_INDEX]['class'] += ' failure'
+  else:
+    footer_row[FAIL_COUNT_INDEX]['class'] += ' success'
+
+  return (header_row,
+          [[suite_row] for suite_row in suite_row_dict.values()],
+          footer_row)
+
+
+def feedback_url(result_details_link):
+  # pylint: disable=redefined-variable-type
+  url_args = [
+      ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
+      ('summary', 'Result Details Feedback:'),
+      ('components', 'Test>Android'),
+  ]
+  if result_details_link:
+    url_args.append(('comment', 'Please check out: %s' % result_details_link))
+  url_args = urllib.urlencode(url_args)
+  # pylint: enable=redefined-variable-type
+  return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
+
+
+def results_to_html(results_dict, cs_base_url, bucket, test_name,
+                    builder_name, build_number, local_output):
+  """Convert list of test results into html format.
+
+  Args:
+    local_output: Whether this results file is uploaded to Google Storage or
+        just a local file.
+  """
+  test_rows_header, test_rows = create_test_table(
+      results_dict, cs_base_url, test_name)
+  suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
+      results_dict)
+
+  suite_table_values = {
+    'table_id': 'suite-table',
+    'table_headers': suite_rows_header,
+    'table_row_blocks': suite_rows,
+    'table_footer': suite_row_footer,
+  }
+
+  test_table_values = {
+    'table_id': 'test-table',
+    'table_headers': test_rows_header,
+    'table_row_blocks': test_rows,
+  }
+
+  main_template = JINJA_ENVIRONMENT.get_template(
+      os.path.join('template', 'main.html'))
+
+  if local_output:
+    html_render = main_template.render(  #  pylint: disable=no-member
+        {
+          'tb_values': [suite_table_values, test_table_values],
+          'feedback_url': feedback_url(None),
+        })
+    return (html_render, None, None)
+  else:
+    dest = google_storage_helper.unique_name(
+        '%s_%s_%s' % (test_name, builder_name, build_number))
+    result_details_link = google_storage_helper.get_url_link(
+        dest, '%s/html' % bucket)
+    html_render = main_template.render(  #  pylint: disable=no-member
+        {
+          'tb_values': [suite_table_values, test_table_values],
+          'feedback_url': feedback_url(result_details_link),
+        })
+    return (html_render, dest, result_details_link)
+
+
+def result_details(json_path, test_name, cs_base_url, bucket=None,
+                   builder_name=None, build_number=None, local_output=False):
+  """Get result details from json path and then convert results to html.
+
+  Args:
+    local_output: Whether this results file is uploaded to Google Storage or
+        just a local file.
+  """
+
+  with open(json_path) as json_file:
+    json_object = json.loads(json_file.read())
+
+  if not 'per_iteration_data' in json_object:
+    return 'Error: json file missing per_iteration_data.'
+
+  results_dict = collections.defaultdict(list)
+  for testsuite_run in json_object['per_iteration_data']:
+    for test, test_runs in testsuite_run.iteritems():
+      results_dict[test].extend(test_runs)
+  return results_to_html(results_dict, cs_base_url, bucket, test_name,
+                         builder_name, build_number, local_output)
+
+
+def upload_to_google_bucket(html, bucket, dest):
+  with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
+    temp_file.write(html)
+    temp_file.flush()
+    return google_storage_helper.upload(
+        name=dest,
+        filepath=temp_file.name,
+        bucket='%s/html' % bucket,
+        content_type='text/html',
+        authenticated_link=True)
+
+
+def ui_screenshot_set(json_path):
+  with open(json_path) as json_file:
+    json_object = json.loads(json_file.read())
+  if not 'per_iteration_data' in json_object:
+    # This will be reported as an error by result_details, no need to duplicate.
+    return None
+  ui_screenshots = []
+  # pylint: disable=too-many-nested-blocks
+  for testsuite_run in json_object['per_iteration_data']:
+    for _, test_runs in testsuite_run.iteritems():
+      for test_run in test_runs:
+        if 'ui screenshot' in test_run['links']:
+          screenshot_link = test_run['links']['ui screenshot']
+          if screenshot_link.startswith('file:'):
+            with contextlib.closing(urllib.urlopen(screenshot_link)) as f:
+              test_screenshots = json.load(f)
+          else:
+            # Assume anything that isn't a file link is a google storage link
+            screenshot_string = google_storage_helper.read_from_link(
+                screenshot_link)
+            if not screenshot_string:
+              logging.error('Bad screenshot link %s', screenshot_link)
+              continue
+            test_screenshots = json.loads(
+                screenshot_string)
+          ui_screenshots.extend(test_screenshots)
+  # pylint: enable=too-many-nested-blocks
+
+  if ui_screenshots:
+    return json.dumps(ui_screenshots)
+  return None
+
+
+def upload_screenshot_set(json_path, test_name, bucket, builder_name,
+                          build_number):
+  screenshot_set = ui_screenshot_set(json_path)
+  if not screenshot_set:
+    return None
+  dest = google_storage_helper.unique_name(
+    'screenshots_%s_%s_%s' % (test_name, builder_name, build_number),
+    suffix='.json')
+  with tempfile.NamedTemporaryFile(suffix='.json') as temp_file:
+    temp_file.write(screenshot_set)
+    temp_file.flush()
+    return google_storage_helper.upload(
+        name=dest,
+        filepath=temp_file.name,
+        bucket='%s/json' % bucket,
+        content_type='application/json',
+        authenticated_link=True)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--json-file', help='Path of json file.')
+  parser.add_argument('--cs-base-url', help='Base url for code search.',
+                      default='http://cs.chromium.org')
+  parser.add_argument('--bucket', help='Google storage bucket.', required=True)
+  parser.add_argument('--builder-name', help='Builder name.')
+  parser.add_argument('--build-number', help='Build number.')
+  parser.add_argument('--test-name', help='The name of the test.',
+                      required=True)
+  parser.add_argument(
+      '-o', '--output-json',
+      help='(Swarming Merge Script API) '
+           'Output JSON file to create.')
+  parser.add_argument(
+      '--build-properties',
+      help='(Swarming Merge Script API) '
+           'Build property JSON file provided by recipes.')
+  parser.add_argument(
+      '--summary-json',
+      help='(Swarming Merge Script API) '
+           'Summary of shard state running on swarming. '
+           '(Output of the swarming.py collect '
+           '--task-summary-json=XXX command.)')
+  parser.add_argument(
+      '--task-output-dir',
+      help='(Swarming Merge Script API) '
+           'Directory containing all swarming task results.')
+  parser.add_argument(
+      'positional', nargs='*',
+      help='output.json from shards.')
+
+  args = parser.parse_args()
+
+  if ((args.build_properties is None) ==
+         (args.build_number is None or args.builder_name is None)):
+    raise parser.error('Exactly one of build_perperties or '
+                       '(build_number or builder_name) should be given.')
+
+  if (args.build_number is None) != (args.builder_name is None):
+    raise parser.error('args.build_number and args.builder_name '
+                       'has to be be given together'
+                       'or not given at all.')
+
+  if len(args.positional) == 0 and args.json_file is None:
+    if args.output_json:
+      with open(args.output_json, 'w') as f:
+        json.dump({}, f)
+    return
+  elif len(args.positional) != 0 and args.json_file:
+    raise parser.error('Exactly one of args.positional and '
+                       'args.json_file should be given.')
+
+  if args.build_properties:
+    build_properties = json.loads(args.build_properties)
+    if ((not 'buildnumber' in build_properties) or
+        (not 'buildername' in build_properties)):
+      raise parser.error('Build number/builder name not specified.')
+    build_number = build_properties['buildnumber']
+    builder_name = build_properties['buildername']
+  elif args.build_number and args.builder_name:
+    build_number = args.build_number
+    builder_name = args.builder_name
+
+  if args.positional:
+    if len(args.positional) == 1:
+      json_file = args.positional[0]
+    else:
+      if args.output_json and args.summary_json:
+        standard_gtest_merge.standard_gtest_merge(
+            args.output_json, args.summary_json, args.positional)
+        json_file = args.output_json
+      elif not args.output_json:
+        raise Exception('output_json required by merge API is missing.')
+      else:
+        raise Exception('summary_json required by merge API is missing.')
+  elif args.json_file:
+    json_file = args.json_file
+
+  if not os.path.exists(json_file):
+    raise IOError('--json-file %s not found.' % json_file)
+
+  # Link to result details presentation page is a part of the page.
+  result_html_string, dest, result_details_link = result_details(
+      json_file, args.test_name, args.cs_base_url, args.bucket,
+      builder_name, build_number)
+
+  result_details_link_2 = upload_to_google_bucket(
+      result_html_string.encode('UTF-8'),
+      args.bucket, dest)
+  assert result_details_link == result_details_link_2, (
+      'Result details link do not match. The link returned by get_url_link'
+      ' should be the same as that returned by upload.')
+
+  ui_screenshot_set_link = upload_screenshot_set(json_file, args.test_name,
+      args.bucket, builder_name, build_number)
+
+  if ui_screenshot_set_link:
+    ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/'
+    ui_catalog_query = urllib.urlencode(
+        {'screenshot_source': ui_screenshot_set_link})
+    ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query)
+
+  if args.output_json:
+    with open(json_file) as original_json_file:
+      json_object = json.load(original_json_file)
+      json_object['links'] = {
+          'result_details (logcats, flakiness links)': result_details_link
+      }
+
+      if ui_screenshot_set_link:
+        json_object['links']['ui screenshots'] = ui_screenshot_link
+
+      with open(args.output_json, 'w') as f:
+        json.dump(json_object, f)
+  else:
+    print('Result Details: %s' % result_details_link)
+
+    if ui_screenshot_set_link:
+      print('UI Screenshots %s' % ui_screenshot_link)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/results/report_results.py b/src/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000..56eefac
--- /dev/null
+++ b/src/build/android/pylib/results/report_results.py
@@ -0,0 +1,136 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+from __future__ import print_function
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+from pylib.utils import logging_utils
+
+
+def _LogToFile(results, test_type, suite_name):
+  """Log results to local files which can be used for aggregation later."""
+  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+  if not os.path.exists(log_file_path):
+    os.mkdir(log_file_path)
+  full_file_name = os.path.join(
+      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+  if not os.path.exists(full_file_name):
+    with open(full_file_name, 'w') as log_file:
+      print(
+          '\n%s results for %s build %s:' %
+          (test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+           os.environ.get('BUILDBOT_BUILDNUMBER')),
+          file=log_file)
+    logging.info('Writing results to %s.', full_file_name)
+
+  logging.info('Writing results to %s.', full_file_name)
+  with open(full_file_name, 'a') as log_file:
+    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+    print(
+        '%s%s' % (shortened_suite_name.ljust(30), results.GetShortForm()),
+        file=log_file)
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+                             flakiness_server):
+  """Upload results to the flakiness dashboard"""
+  logging.info('Upload results for test type "%s", test package "%s" to %s',
+               test_type, test_package, flakiness_server)
+
+  try:
+    # TODO(jbudorick): remove Instrumentation once instrumentation tests
+    # switch to platform mode.
+    if test_type in ('instrumentation', 'Instrumentation'):
+      if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+        assert test_package in ['ContentShellTest',
+                                'ChromePublicTest',
+                                'ChromeSyncShellTest',
+                                'SystemWebViewShellLayoutTest',
+                                'WebViewInstrumentationTest']
+        dashboard_test_type = ('%s_instrumentation_tests' %
+                               test_package.lower().rstrip('test'))
+      # Downstream server.
+      else:
+        dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+    elif test_type == 'gtest':
+      dashboard_test_type = test_package
+
+    else:
+      logging.warning('Invalid test type')
+      return
+
+    results_uploader.Upload(
+        results, flakiness_server, dashboard_test_type)
+
+  except Exception: # pylint: disable=broad-except
+    logging.exception('Failure while logging to %s', flakiness_server)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+            flakiness_server=None):
+  """Log the tests results for the test suite.
+
+  The results will be logged three different ways:
+    1. Log to stdout.
+    2. Log to local files for aggregating multiple test steps
+       (on buildbots only).
+    3. Log to flakiness dashboard (on buildbots only).
+
+  Args:
+    results: An instance of TestRunResults object.
+    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+    test_package: Test package name (e.g. 'ipc_tests' for gtests,
+                  'ContentShellTest' for instrumentation tests)
+    annotation: If instrumenation test type, this is a list of annotations
+                (e.g. ['Feature', 'SmallTest']).
+    flakiness_server: If provider, upload the results to flakiness dashboard
+                      with this URL.
+    """
+  # pylint doesn't like how colorama set up its color enums.
+  # pylint: disable=no-member
+  black_on_white = (logging_utils.BACK.WHITE, logging_utils.FORE.BLACK)
+  with logging_utils.OverrideColor(logging.CRITICAL, black_on_white):
+    if not results.DidRunPass():
+      logging.critical('*' * 80)
+      logging.critical('Detailed Logs')
+      logging.critical('*' * 80)
+      for line in results.GetLogs().splitlines():
+        logging.critical(line)
+    logging.critical('*' * 80)
+    logging.critical('Summary')
+    logging.critical('*' * 80)
+    for line in results.GetGtestForm().splitlines():
+      color = black_on_white
+      if 'FAILED' in line:
+        # Red on white, dim.
+        color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED,
+                 logging_utils.STYLE.DIM)
+      elif 'PASSED' in line:
+        # Green on white, dim.
+        color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN,
+                 logging_utils.STYLE.DIM)
+      with logging_utils.OverrideColor(logging.CRITICAL, color):
+        logging.critical(line)
+    logging.critical('*' * 80)
+
+  if os.environ.get('BUILDBOT_BUILDERNAME'):
+    # It is possible to have multiple buildbot steps for the same
+    # instrumenation test package using different annotations.
+    if annotation and len(annotation) == 1:
+      suite_name = annotation[0]
+    else:
+      suite_name = test_package
+    _LogToFile(results, test_type, suite_name)
+
+    if flakiness_server:
+      _LogToFlakinessDashboard(results, test_type, test_package,
+                               flakiness_server)
diff --git a/src/build/android/pylib/symbols/__init__.py b/src/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/symbols/__init__.py
diff --git a/src/build/android/pylib/symbols/apk_lib_dump.py b/src/build/android/pylib/symbols/apk_lib_dump.py
new file mode 100755
index 0000000..ba87026
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_lib_dump.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump shared library information from an APK file.
+
+This script is used to dump which *uncompressed* native shared libraries an
+APK contains, as well as their position within the file. This is mostly useful
+to diagnose logcat and tombstone symbolization issues when the libraries are
+loaded directly from the APK at runtime.
+
+The default format will print one line per uncompressed shared library with the
+following format:
+
+  0x<start-offset> 0x<end-offset> 0x<file-size> <file-path>
+
+The --format=python option can be used to dump the same information that is
+easy to use in a Python script, e.g. with a line like:
+
+  (0x<start-offset>, 0x<end-offset>, 0x<file-size>, <file-path>),
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+
+from pylib.symbols import apk_native_libs
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__,
+      formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  parser.add_argument('apk', help='Input APK file path.')
+
+  parser.add_argument('--format', help='Select output format',
+                      default='default', choices=['default', 'python'])
+
+  args = parser.parse_args()
+
+  apk_reader = apk_native_libs.ApkReader(args.apk)
+  lib_map = apk_native_libs.ApkNativeLibraries(apk_reader)
+  for lib_path, file_offset, file_size in lib_map.GetDumpList():
+    if args.format == 'python':
+      print('(0x%08x, 0x%08x, 0x%08x, \'%s\'),' %
+            (file_offset, file_offset + file_size, file_size, lib_path))
+    else:
+      print('0x%08x 0x%08x 0x%08x %s' % (file_offset, file_offset + file_size,
+                                         file_size, lib_path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/pylib/symbols/apk_native_libs.py b/src/build/android/pylib/symbols/apk_native_libs.py
new file mode 100644
index 0000000..c4af202
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_native_libs.py
@@ -0,0 +1,419 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import struct
+import zipfile
+
+# The default zipfile python module cannot open APKs properly, but this
+# fixes it. Note that simply importing this file is sufficient to
+# ensure that zip works correctly for all other modules. See:
+# http://bugs.python.org/issue14315
+# https://hg.python.org/cpython/rev/6dd5e9556a60#l2.8
+def _PatchZipFile():
+  # pylint: disable=protected-access
+  oldDecodeExtra = zipfile.ZipInfo._decodeExtra
+  def decodeExtra(self):
+    try:
+      oldDecodeExtra(self)
+    except struct.error:
+      pass
+  zipfile.ZipInfo._decodeExtra = decodeExtra
+_PatchZipFile()
+
+
+class ApkZipInfo(object):
+  """Models a single file entry from an ApkReader.
+
+  This is very similar to the zipfile.ZipInfo class. It provides a few
+  properties describing the entry:
+    - filename          (same as ZipInfo.filename)
+    - file_size         (same as ZipInfo.file_size)
+    - compress_size     (same as ZipInfo.file_size)
+    - file_offset       (note: not provided by ZipInfo)
+
+  And a few useful methods: IsCompressed() and IsElfFile().
+
+  Entries can be created by using ApkReader() methods.
+  """
+  def __init__(self, zip_file, zip_info):
+    """Construct instance. Do not call this directly. Use ApkReader methods."""
+    self._file = zip_file
+    self._info = zip_info
+    self._file_offset = None
+
+  @property
+  def filename(self):
+    """Entry's file path within APK."""
+    return self._info.filename
+
+  @property
+  def file_size(self):
+    """Entry's extracted file size in bytes."""
+    return self._info.file_size
+
+  @property
+  def compress_size(self):
+    """Entry' s compressed file size in bytes."""
+    return self._info.compress_size
+
+  @property
+  def file_offset(self):
+    """Entry's starting file offset in the APK."""
+    if self._file_offset is None:
+      self._file_offset = self._ZipFileOffsetFromLocalHeader(
+          self._file.fp, self._info.header_offset)
+    return self._file_offset
+
+  def __repr__(self):
+    """Convert to string for debugging."""
+    return 'ApkZipInfo["%s",size=0x%x,compressed=0x%x,offset=0x%x]' % (
+        self.filename, self.file_size, self.compress_size, self.file_offset)
+
+  def IsCompressed(self):
+    """Returns True iff the entry is compressed."""
+    return self._info.compress_type != zipfile.ZIP_STORED
+
+  def IsElfFile(self):
+    """Returns True iff the entry is an ELF file."""
+    with self._file.open(self._info, 'r') as f:
+      return f.read(4) == '\x7fELF'
+
+  @staticmethod
+  def _ZipFileOffsetFromLocalHeader(fd, local_header_offset):
+    """Return a file's start offset from its zip archive local header.
+
+    Args:
+      fd: Input file object.
+      local_header_offset: Local header offset (from its ZipInfo entry).
+    Returns:
+      file start offset.
+    """
+    FILE_NAME_LEN_OFFSET = 26
+    FILE_NAME_OFFSET = 30
+    fd.seek(local_header_offset + FILE_NAME_LEN_OFFSET)
+    file_name_len = struct.unpack('H', fd.read(2))[0]
+    extra_field_len = struct.unpack('H', fd.read(2))[0]
+    file_offset = (local_header_offset + FILE_NAME_OFFSET +
+                    file_name_len + extra_field_len)
+    return file_offset
+
+
+class ApkReader(object):
+  """A convenience class used to read the content of APK files.
+
+  Its design is very similar to the one from zipfile.ZipFile, except
+  that its returns ApkZipInfo entries which provide a |file_offset|
+  property that can be used to know where a given file is located inside
+  the archive.
+
+  It is also easy to mock for unit-testing (see MockApkReader in
+  apk_utils_unittest.py) without creating any files on disk.
+
+  Usage is the following:
+    - Create an instance using a with statement (for proper unit-testing).
+    - Call ListEntries() to list all entries in the archive. This returns
+      a list of ApkZipInfo entries.
+    - Or call FindEntry() corresponding to a given path within the archive.
+
+  For example:
+     with ApkReader(input_apk_path) as reader:
+       info = reader.FindEntry('lib/armeabi-v7a/libfoo.so')
+       if info.IsCompressed() or not info.IsElfFile():
+         raise Exception('Invalid library path")
+
+  The ApkZipInfo can be used to inspect the entry's metadata, or read its
+  content with the ReadAll() method. See its documentation for all details.
+  """
+  def __init__(self, apk_path):
+    """Initialize instance."""
+    self._zip_file = zipfile.ZipFile(apk_path, 'r')
+    self._path = apk_path
+
+  def __enter__(self):
+    """Python context manager entry."""
+    return self
+
+  def __exit__(self, *kwargs):
+    """Python context manager exit."""
+    self.Close()
+
+  @property
+  def path(self):
+    """The corresponding input APK path."""
+    return self._path
+
+  def Close(self):
+    """Close the reader (and underlying ZipFile instance)."""
+    self._zip_file.close()
+
+  def ListEntries(self):
+    """Return a list of ApkZipInfo entries for this APK."""
+    result = []
+    for info in self._zip_file.infolist():
+      result.append(ApkZipInfo(self._zip_file, info))
+    return result
+
+  def FindEntry(self, file_path):
+    """Return an ApkZipInfo instance for a given archive file path.
+
+    Args:
+      file_path: zip file path.
+    Return:
+      A new ApkZipInfo entry on success.
+    Raises:
+      KeyError on failure (entry not found).
+    """
+    info = self._zip_file.getinfo(file_path)
+    return ApkZipInfo(self._zip_file, info)
+
+
+
+class ApkNativeLibraries(object):
+  """A class for the list of uncompressed shared libraries inside an APK.
+
+  Create a new instance by passing the path to an input APK, then use
+  the FindLibraryByOffset() method to find the native shared library path
+  corresponding to a given file offset.
+
+  GetAbiList() and GetLibrariesList() can also be used to inspect
+  the state of the instance.
+  """
+  def __init__(self, apk_reader):
+    """Initialize instance.
+
+    Args:
+      apk_reader: An ApkReader instance corresponding to the input APK.
+    """
+    self._native_libs = []
+    for entry in apk_reader.ListEntries():
+      # Chromium uses so-called 'placeholder' native shared libraries
+      # that have a size of 0, and are only used to deal with bugs in
+      # older Android system releases (they are never loaded and cannot
+      # appear in stack traces). Ignore these here to avoid generating
+      # confusing results.
+      if entry.file_size == 0:
+        continue
+
+      # Only uncompressed libraries can appear in stack traces.
+      if entry.IsCompressed():
+        continue
+
+      # Only consider files within lib/ and with a filename ending with .so
+      # at the moment. NOTE: Do not require a 'lib' prefix, since that would
+      # prevent finding the 'crazy.libXXX.so' libraries used by Chromium.
+      if (not entry.filename.startswith('lib/') or
+          not entry.filename.endswith('.so')):
+        continue
+
+      lib_path = entry.filename
+
+      self._native_libs.append(
+          (lib_path, entry.file_offset, entry.file_offset + entry.file_size))
+
+  def IsEmpty(self):
+    """Return true iff the list is empty."""
+    return not bool(self._native_libs)
+
+  def GetLibraries(self):
+    """Return the list of all library paths in this instance."""
+    return sorted([x[0] for x in self._native_libs])
+
+  def GetDumpList(self):
+    """Retrieve full library map.
+
+    Returns:
+      A list of (lib_path, file_offset, file_size) tuples, sorted
+      in increasing |file_offset| values.
+    """
+    result = []
+    for entry in self._native_libs:
+      lib_path, file_start, file_end = entry
+      result.append((lib_path, file_start, file_end - file_start))
+
+    return sorted(result, lambda x, y: cmp(x[1], y[1]))
+
+  def FindLibraryByOffset(self, file_offset):
+    """Find the native library at a given file offset.
+
+    Args:
+      file_offset: File offset within the original APK.
+    Returns:
+      Returns a (lib_path, lib_offset) tuple on success, or (None, 0)
+      on failure. Note that lib_path will omit the 'lib/$ABI/' prefix,
+      lib_offset is the adjustment of file_offset within the library.
+    """
+    for lib_path, start_offset, end_offset in self._native_libs:
+      if file_offset >= start_offset and file_offset < end_offset:
+        return (lib_path, file_offset - start_offset)
+
+    return (None, 0)
+
+
+class ApkLibraryPathTranslator(object):
+  """Translates APK file paths + byte offsets into library path + offset.
+
+  The purpose of this class is to translate a native shared library path
+  that points to an APK into a new device-specific path that points to a
+  native shared library, as if it was installed there. E.g.:
+
+     ('/data/data/com.example.app-1/base.apk', 0x123be00)
+
+  would be translated into:
+
+     ('/data/data/com.example.app-1/base.apk!lib/libfoo.so', 0x3be00)
+
+  If the original APK (installed as base.apk) contains an uncompressed shared
+  library under lib/armeabi-v7a/libfoo.so at offset 0x120000.
+
+  Note that the virtual device path after the ! doesn't necessarily match
+  the path inside the .apk. This doesn't really matter for the rest of
+  the symbolization functions since only the file's base name can be used
+  to find the corresponding file on the host.
+
+  Usage is the following:
+
+     1/ Create new instance.
+
+     2/ Call AddHostApk() one or several times to add the host path
+        of an APK, its package name, and device-installed named.
+
+     3/ Call TranslatePath() to translate a (path, offset) tuple corresponding
+        to an on-device APK, into the corresponding virtual device library
+        path and offset.
+  """
+
+  # Depending on the version of the system, a non-system APK might be installed
+  # on a path that looks like the following:
+  #
+  #  * /data/..../<package_name>-<number>.apk, where <number> is used to
+  #    distinguish several versions of the APK during package updates.
+  #
+  #  * /data/..../<package_name>-<suffix>/base.apk, where <suffix> is a
+  #    string of random ASCII characters following the dash after the
+  #    package name. This serves as a way to distinguish the installation
+  #    paths during package update, and randomize its final location
+  #    (to prevent apps from hard-coding the paths to other apps).
+  #
+  #    Note that the 'base.apk' name comes from the system.
+  #
+  #  * /data/.../<package_name>-<suffix>/<split_name>.apk, where <suffix>
+  #    is the same as above, and <split_name> is the name of am app bundle
+  #    split APK.
+  #
+  # System APKs are installed on paths that look like /system/app/Foo.apk
+  # but this class ignores them intentionally.
+
+  # Compiler regular expression for the first format above.
+  _RE_APK_PATH_1 = re.compile(
+      r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<version>[0-9]+)\.apk')
+
+  # Compiled regular expression for the second and third formats above.
+  _RE_APK_PATH_2 = re.compile(
+      r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<suffix>[^/]+)/' +
+      r'(?P<apk_name>.+\.apk)')
+
+  def __init__(self):
+    """Initialize instance. Call AddHostApk() to add host apk file paths."""
+    self._path_map = {}  # Maps (package_name, apk_name) to host-side APK path.
+    self._libs_map = {}  # Maps APK host path to ApkNativeLibrariesMap instance.
+
+  def AddHostApk(self, package_name, native_libs, device_apk_name=None):
+    """Add a file path to the host APK search list.
+
+    Args:
+      package_name: Corresponding apk package name.
+      native_libs: ApkNativeLibraries instance for the corresponding APK.
+      device_apk_name: Optional expected name of the installed APK on the
+        device. This is only useful when symbolizing app bundle that run on
+        Android L+. I.e. it will be ignored in other cases.
+    """
+    if native_libs.IsEmpty():
+      logging.debug('Ignoring host APK without any uncompressed native ' +
+                    'libraries: %s', device_apk_name)
+      return
+
+    # If the APK name is not provided, use the default of 'base.apk'. This
+    # will be ignored if we find <package_name>-<number>.apk file paths
+    # in the input, but will work properly for Android L+, as long as we're
+    # not using Android app bundles.
+    device_apk_name = device_apk_name or 'base.apk'
+
+    key = "%s/%s" % (package_name, device_apk_name)
+    if key in self._libs_map:
+      raise KeyError('There is already an APK associated with (%s)' % key)
+
+    self._libs_map[key] = native_libs
+
+  @staticmethod
+  def _MatchApkDeviceInstallPath(apk_path):
+    """Check whether a given path matches an installed APK device file path.
+
+    Args:
+      apk_path: Device-specific file path.
+    Returns:
+      On success, a (package_name, apk_name) tuple. On failure, (None. None).
+    """
+    m = ApkLibraryPathTranslator._RE_APK_PATH_1.match(apk_path)
+    if m:
+      return (m.group('package_name'), 'base.apk')
+
+    m = ApkLibraryPathTranslator._RE_APK_PATH_2.match(apk_path)
+    if m:
+      return (m.group('package_name'), m.group('apk_name'))
+
+    return (None, None)
+
+  def TranslatePath(self, apk_path, apk_offset):
+    """Translate a potential apk file path + offset into library path + offset.
+
+    Args:
+      apk_path: Library or apk file path on the device (e.g.
+        '/data/data/com.example.app-XSAHKSJH/base.apk').
+      apk_offset: Byte offset within the library or apk.
+
+    Returns:
+      a new (lib_path, lib_offset) tuple. If |apk_path| points to an APK,
+      then this function searches inside the corresponding host-side APKs
+      (added with AddHostApk() above) for the corresponding uncompressed
+      native shared library at |apk_offset|, if found, this returns a new
+      device-specific path corresponding to a virtual installation of said
+      library with an adjusted offset.
+
+      Otherwise, just return the original (apk_path, apk_offset) values.
+    """
+    if not apk_path.endswith('.apk'):
+      return (apk_path, apk_offset)
+
+    apk_package, apk_name = self._MatchApkDeviceInstallPath(apk_path)
+    if not apk_package:
+      return (apk_path, apk_offset)
+
+    key = '%s/%s' % (apk_package, apk_name)
+    native_libs = self._libs_map.get(key)
+    if not native_libs:
+      logging.debug('Unknown %s package', key)
+      return (apk_path, apk_offset)
+
+    lib_name, new_offset = native_libs.FindLibraryByOffset(apk_offset)
+    if not lib_name:
+      logging.debug('Invalid offset in %s.apk package: %d', key, apk_offset)
+      return (apk_path, apk_offset)
+
+    lib_name = os.path.basename(lib_name)
+
+    # Some libraries are stored with a crazy. prefix inside the APK, this
+    # is done to prevent the PackageManager from extracting the libraries
+    # at installation time when running on pre Android M systems, where the
+    # system linker cannot load libraries directly from APKs.
+    crazy_prefix = 'crazy.'
+    if lib_name.startswith(crazy_prefix):
+      lib_name = lib_name[len(crazy_prefix):]
+
+    # Put this in a fictional lib sub-directory for good measure.
+    new_path = '%s!lib/%s' % (apk_path, lib_name)
+
+    return (new_path, new_offset)
diff --git a/src/build/android/pylib/symbols/apk_native_libs_unittest.py b/src/build/android/pylib/symbols/apk_native_libs_unittest.py
new file mode 100644
index 0000000..416918d
--- /dev/null
+++ b/src/build/android/pylib/symbols/apk_native_libs_unittest.py
@@ -0,0 +1,396 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import unittest
+
+from pylib.symbols import apk_native_libs
+
+# Mock ELF-like data
+MOCK_ELF_DATA = '\x7fELFFFFFFFFFFFFFFFF'
+
+class MockApkZipInfo(object):
+  """A mock ApkZipInfo class, returned by MockApkReaderFactory instances."""
+  def __init__(self, filename, file_size, compress_size, file_offset,
+               file_data):
+    self.filename = filename
+    self.file_size = file_size
+    self.compress_size = compress_size
+    self.file_offset = file_offset
+    self._data = file_data
+
+  def __repr__(self):
+    """Convert to string for debugging."""
+    return 'MockApkZipInfo["%s",size=%d,compressed=%d,offset=%d]' % (
+        self.filename, self.file_size, self.compress_size, self.file_offset)
+
+  def IsCompressed(self):
+    """Returns True iff the entry is compressed."""
+    return self.file_size != self.compress_size
+
+  def IsElfFile(self):
+    """Returns True iff the entry is an ELF file."""
+    if not self._data or len(self._data) < 4:
+      return False
+
+    return self._data[0:4] == '\x7fELF'
+
+
+class MockApkReader(object):
+  """A mock ApkReader instance used during unit-testing.
+
+  Do not use directly, but use a MockApkReaderFactory context, as in:
+
+     with MockApkReaderFactory() as mock:
+       mock.AddTestEntry(file_path, file_size, compress_size, file_data)
+       ...
+
+       # Actually returns the mock instance.
+       apk_reader = apk_native_libs.ApkReader('/some/path.apk')
+  """
+  def __init__(self, apk_path='test.apk'):
+    """Initialize instance."""
+    self._entries = []
+    self._fake_offset = 0
+    self._path = apk_path
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, *kwarg):
+    self.Close()
+    return
+
+  @property
+  def path(self):
+    return self._path
+
+  def AddTestEntry(self, filepath, file_size, compress_size, file_data):
+    """Add a new entry to the instance for unit-tests.
+
+    Do not call this directly, use the AddTestEntry() method on the parent
+    MockApkReaderFactory instance.
+
+    Args:
+      filepath: archive file path.
+      file_size: uncompressed file size in bytes.
+      compress_size: compressed size in bytes.
+      file_data: file data to be checked by IsElfFile()
+
+    Note that file_data can be None, or that its size can be actually
+    smaller than |compress_size| when used during unit-testing.
+    """
+    self._entries.append(MockApkZipInfo(filepath, file_size, compress_size,
+                         self._fake_offset, file_data))
+    self._fake_offset += compress_size
+
+  def Close(self): # pylint: disable=no-self-use
+    """Close this reader instance."""
+    return
+
+  def ListEntries(self):
+    """Return a list of MockApkZipInfo instances for this input APK."""
+    return self._entries
+
+  def FindEntry(self, file_path):
+    """Find the MockApkZipInfo instance corresponds to a given file path."""
+    for entry in self._entries:
+      if entry.filename == file_path:
+        return entry
+    raise KeyError('Could not find mock zip archive member for: ' + file_path)
+
+
+class MockApkReaderTest(unittest.TestCase):
+
+  def testEmpty(self):
+    with MockApkReader() as reader:
+      entries = reader.ListEntries()
+      self.assertTrue(len(entries) == 0)
+      with self.assertRaises(KeyError):
+        reader.FindEntry('non-existent-entry.txt')
+
+  def testSingleEntry(self):
+    with MockApkReader() as reader:
+      reader.AddTestEntry('some-path/some-file', 20000, 12345, file_data=None)
+      entries = reader.ListEntries()
+      self.assertTrue(len(entries) == 1)
+      entry = entries[0]
+      self.assertEqual(entry.filename, 'some-path/some-file')
+      self.assertEqual(entry.file_size, 20000)
+      self.assertEqual(entry.compress_size, 12345)
+      self.assertTrue(entry.IsCompressed())
+
+      entry2 = reader.FindEntry('some-path/some-file')
+      self.assertEqual(entry, entry2)
+
+  def testMultipleEntries(self):
+    with MockApkReader() as reader:
+      _ENTRIES = {
+        'foo.txt': (1024, 1024, 'FooFooFoo'),
+        'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'),
+      }
+      for path, props in _ENTRIES.iteritems():
+        reader.AddTestEntry(path, props[0], props[1], props[2])
+
+      entries = reader.ListEntries()
+      self.assertEqual(len(entries), len(_ENTRIES))
+      for path, props in _ENTRIES.iteritems():
+        entry = reader.FindEntry(path)
+        self.assertEqual(entry.filename, path)
+        self.assertEqual(entry.file_size, props[0])
+        self.assertEqual(entry.compress_size, props[1])
+
+
+class ApkNativeLibrariesTest(unittest.TestCase):
+
+  def setUp(self):
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testEmptyApk(self):
+    with MockApkReader() as reader:
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertTrue(libs_map.IsEmpty())
+      self.assertEqual(len(libs_map.GetLibraries()), 0)
+      lib_path, lib_offset = libs_map.FindLibraryByOffset(0)
+      self.assertIsNone(lib_path)
+      self.assertEqual(lib_offset, 0)
+
+  def testSimpleApk(self):
+    with MockApkReader() as reader:
+      _MOCK_ENTRIES = [
+        # Top-level library should be ignored.
+        ('libfoo.so', 1000, 1000, MOCK_ELF_DATA, False),
+        # Library not under lib/ should be ignored.
+        ('badlib/test-abi/libfoo2.so', 1001, 1001, MOCK_ELF_DATA, False),
+        # Library under lib/<abi>/ but without .so extension should be ignored.
+        ('lib/test-abi/libfoo4.so.1', 1003, 1003, MOCK_ELF_DATA, False),
+        # Library under lib/<abi>/ with .so suffix, but compressed -> ignored.
+        ('lib/test-abi/libfoo5.so', 1004, 1003, MOCK_ELF_DATA, False),
+        # First correct library
+        ('lib/test-abi/libgood1.so', 1005, 1005, MOCK_ELF_DATA, True),
+        # Second correct library: support sub-directories
+        ('lib/test-abi/subdir/libgood2.so', 1006, 1006, MOCK_ELF_DATA, True),
+        # Third correct library, no lib prefix required
+        ('lib/test-abi/crazy.libgood3.so', 1007, 1007, MOCK_ELF_DATA, True),
+      ]
+      file_offsets = []
+      prev_offset = 0
+      for ent in _MOCK_ENTRIES:
+        reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+        file_offsets.append(prev_offset)
+        prev_offset += ent[2]
+
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertFalse(libs_map.IsEmpty())
+      self.assertEqual(libs_map.GetLibraries(), [
+          'lib/test-abi/crazy.libgood3.so',
+          'lib/test-abi/libgood1.so',
+          'lib/test-abi/subdir/libgood2.so',
+          ])
+
+      BIAS = 10
+      for mock_ent, file_offset in zip(_MOCK_ENTRIES, file_offsets):
+        if mock_ent[4]:
+          lib_path, lib_offset = libs_map.FindLibraryByOffset(
+              file_offset + BIAS)
+          self.assertEqual(lib_path, mock_ent[0])
+          self.assertEqual(lib_offset, BIAS)
+
+
+  def testMultiAbiApk(self):
+    with MockApkReader() as reader:
+      _MOCK_ENTRIES = [
+        ('lib/abi1/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+        ('lib/abi2/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+      ]
+      for ent in _MOCK_ENTRIES:
+        reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+
+      libs_map = apk_native_libs.ApkNativeLibraries(reader)
+      self.assertFalse(libs_map.IsEmpty())
+      self.assertEqual(libs_map.GetLibraries(), [
+          'lib/abi1/libfoo.so', 'lib/abi2/libfoo.so'])
+
+      lib1_name, lib1_offset = libs_map.FindLibraryByOffset(10)
+      self.assertEqual(lib1_name, 'lib/abi1/libfoo.so')
+      self.assertEqual(lib1_offset, 10)
+
+      lib2_name, lib2_offset = libs_map.FindLibraryByOffset(1000)
+      self.assertEqual(lib2_name, 'lib/abi2/libfoo.so')
+      self.assertEqual(lib2_offset, 0)
+
+
+class MockApkNativeLibraries(apk_native_libs.ApkNativeLibraries):
+  """A mock ApkNativeLibraries instance that can be used as input to
+     ApkLibraryPathTranslator without creating an ApkReader instance.
+
+     Create a new instance, then call AddTestEntry or AddTestEntries
+     as many times as necessary, before using it as a regular
+     ApkNativeLibraries instance.
+  """
+  # pylint: disable=super-init-not-called
+  def __init__(self):
+    self._native_libs = []
+
+  # pylint: enable=super-init-not-called
+
+  def AddTestEntry(self, lib_path, file_offset, file_size):
+    """Add a new test entry.
+
+    Args:
+      entry: A tuple of (library-path, file-offset, file-size) values,
+          (e.g. ('lib/armeabi-v8a/libfoo.so', 0x10000, 0x2000)).
+    """
+    self._native_libs.append((lib_path, file_offset, file_offset + file_size))
+
+  def AddTestEntries(self, entries):
+    """Add a list of new test entries.
+
+    Args:
+      entries: A list of (library-path, file-offset, file-size) values.
+    """
+    for entry in entries:
+      self.AddTestEntry(entry[0], entry[1], entry[2])
+
+
+class MockApkNativeLibrariesTest(unittest.TestCase):
+
+  def testEmptyInstance(self):
+    mock = MockApkNativeLibraries()
+    self.assertTrue(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), [])
+    self.assertEqual(mock.GetDumpList(), [])
+
+  def testAddTestEntry(self):
+    mock = MockApkNativeLibraries()
+    mock.AddTestEntry('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000)
+    mock.AddTestEntry('lib/x86/libzoo.so', 0x10000, 0x10000)
+    mock.AddTestEntry('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000)
+    self.assertFalse(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+                                           'lib/armeabi-v7a/libfoo.so',
+                                           'lib/x86/libzoo.so'])
+    self.assertEqual(mock.GetDumpList(), [
+        ('lib/x86/libzoo.so', 0x10000, 0x10000),
+        ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+        ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+
+  def testAddTestEntries(self):
+    mock = MockApkNativeLibraries()
+    mock.AddTestEntries([
+      ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+      ('lib/x86/libzoo.so', 0x10000, 0x10000),
+      ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+    self.assertFalse(mock.IsEmpty())
+    self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+                                           'lib/armeabi-v7a/libfoo.so',
+                                           'lib/x86/libzoo.so'])
+    self.assertEqual(mock.GetDumpList(), [
+        ('lib/x86/libzoo.so', 0x10000, 0x10000),
+        ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+        ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+    ])
+
+
+class ApkLibraryPathTranslatorTest(unittest.TestCase):
+
+  def _CheckUntranslated(self, translator, path, offset):
+    """Check that a given (path, offset) is not modified by translation."""
+    self.assertEqual(translator.TranslatePath(path, offset), (path, offset))
+
+
+  def _CheckTranslated(self, translator, path, offset, new_path, new_offset):
+    """Check that (path, offset) is translated into (new_path, new_offset)."""
+    self.assertEqual(translator.TranslatePath(path, offset),
+                     (new_path, new_offset))
+
+  def testEmptyInstance(self):
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    self._CheckUntranslated(
+        translator, '/data/data/com.example.app-1/base.apk', 0x123456)
+
+  def testSimpleApk(self):
+    mock_libs = MockApkNativeLibraries()
+    mock_libs.AddTestEntries([
+      ('lib/test-abi/libfoo.so', 200, 2000),
+      ('lib/test-abi/libbar.so', 3200, 3000),
+      ('lib/test-abi/crazy.libzoo.so', 6200, 2000),
+    ])
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    translator.AddHostApk('com.example.app', mock_libs)
+
+    # Offset is within the first uncompressed library
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-9.apk', 757,
+        '/data/data/com.example.app-9.apk!lib/libfoo.so', 557)
+
+    # Offset is within the second compressed library.
+    self._CheckUntranslated(
+        translator,
+        '/data/data/com.example.app-9/base.apk', 2800)
+
+    # Offset is within the third uncompressed library.
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-1/base.apk', 3628,
+        '/data/data/com.example.app-1/base.apk!lib/libbar.so', 428)
+
+    # Offset is within the fourth uncompressed library with crazy. prefix
+    self._CheckTranslated(
+        translator,
+        '/data/data/com.example.app-XX/base.apk', 6500,
+        '/data/data/com.example.app-XX/base.apk!lib/libzoo.so', 300)
+
+    # Out-of-bounds apk offset.
+    self._CheckUntranslated(
+        translator,
+        '/data/data/com.example.app-1/base.apk', 10000)
+
+    # Invalid package name.
+    self._CheckUntranslated(
+        translator, '/data/data/com.example2.app-1/base.apk', 757)
+
+    # Invalid apk name.
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/not-base.apk', 100)
+
+    # Invalid file extensions.
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/base', 100)
+
+    self._CheckUntranslated(
+          translator, '/data/data/com.example.app-2/base.apk.dex', 100)
+
+  def testBundleApks(self):
+    mock_libs1 = MockApkNativeLibraries()
+    mock_libs1.AddTestEntries([
+      ('lib/test-abi/libfoo.so', 200, 2000),
+      ('lib/test-abi/libbbar.so', 3200, 3000),
+    ])
+    mock_libs2 = MockApkNativeLibraries()
+    mock_libs2.AddTestEntries([
+      ('lib/test-abi/libzoo.so', 200, 2000),
+      ('lib/test-abi/libtool.so', 3000, 4000),
+    ])
+    translator = apk_native_libs.ApkLibraryPathTranslator()
+    translator.AddHostApk('com.example.app', mock_libs1, 'base-master.apk')
+    translator.AddHostApk('com.example.app', mock_libs2, 'feature-master.apk')
+
+    self._CheckTranslated(
+      translator,
+      '/data/app/com.example.app-XUIYIUW/base-master.apk', 757,
+      '/data/app/com.example.app-XUIYIUW/base-master.apk!lib/libfoo.so', 557)
+
+    self._CheckTranslated(
+      translator,
+      '/data/app/com.example.app-XUIYIUW/feature-master.apk', 3200,
+      '/data/app/com.example.app-XUIYIUW/feature-master.apk!lib/libtool.so',
+      200)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/symbols/deobfuscator.py b/src/build/android/pylib/symbols/deobfuscator.py
new file mode 100644
index 0000000..ffc23b8
--- /dev/null
+++ b/src/build/android/pylib/symbols/deobfuscator.py
@@ -0,0 +1,175 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import threading
+import time
+import uuid
+
+from devil.utils import reraiser_thread
+from pylib import constants
+
+
+_MINIUMUM_TIMEOUT = 3.0
+_PER_LINE_TIMEOUT = .002  # Should be able to process 500 lines per second.
+_PROCESS_START_TIMEOUT = 10.0
+_MAX_RESTARTS = 10  # Should be plenty unless tool is crashing on start-up.
+
+
+class Deobfuscator(object):
+  def __init__(self, mapping_path):
+    script_path = os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android',
+                               'stacktrace', 'java_deobfuscate.py')
+    cmd = [script_path, mapping_path]
+    # Allow only one thread to call TransformLines() at a time.
+    self._lock = threading.Lock()
+    # Ensure that only one thread attempts to kill self._proc in Close().
+    self._close_lock = threading.Lock()
+    self._closed_called = False
+    # Assign to None so that attribute exists if Popen() throws.
+    self._proc = None
+    # Start process eagerly to hide start-up latency.
+    self._proc_start_time = time.time()
+    self._proc = subprocess.Popen(
+        cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+        close_fds=True)
+
+  def IsClosed(self):
+    return self._closed_called or self._proc.returncode is not None
+
+  def IsBusy(self):
+    return self._lock.locked()
+
+  def IsReady(self):
+    return not self.IsClosed() and not self.IsBusy()
+
+  def TransformLines(self, lines):
+    """Deobfuscates obfuscated names found in the given lines.
+
+    If anything goes wrong (process crashes, timeout, etc), returns |lines|.
+
+    Args:
+      lines: A list of strings without trailing newlines.
+
+    Returns:
+      A list of strings without trailing newlines.
+    """
+    if not lines:
+      return []
+
+    # Deobfuscated stacks contain more frames than obfuscated ones when method
+    # inlining occurs. To account for the extra output lines, keep reading until
+    # this eof_line token is reached.
+    eof_line = uuid.uuid4().hex
+    out_lines = []
+
+    def deobfuscate_reader():
+      while True:
+        line = self._proc.stdout.readline()
+        # Return an empty string at EOF (when stdin is closed).
+        if not line:
+          break
+        line = line[:-1]
+        if line == eof_line:
+          break
+        out_lines.append(line)
+
+    if self.IsBusy():
+      logging.warning('deobfuscator: Having to wait for Java deobfuscation.')
+
+    # Allow only one thread to operate at a time.
+    with self._lock:
+      if self.IsClosed():
+        if not self._closed_called:
+          logging.warning('deobfuscator: Process exited with code=%d.',
+                          self._proc.returncode)
+          self.Close()
+        return lines
+
+      # TODO(agrieve): Can probably speed this up by only sending lines through
+      #     that might contain an obfuscated name.
+      reader_thread = reraiser_thread.ReraiserThread(deobfuscate_reader)
+      reader_thread.start()
+
+      try:
+        self._proc.stdin.write('\n'.join(lines))
+        self._proc.stdin.write('\n{}\n'.format(eof_line))
+        self._proc.stdin.flush()
+        time_since_proc_start = time.time() - self._proc_start_time
+        timeout = (max(0, _PROCESS_START_TIMEOUT - time_since_proc_start) +
+                   max(_MINIUMUM_TIMEOUT, len(lines) * _PER_LINE_TIMEOUT))
+        reader_thread.join(timeout)
+        if self.IsClosed():
+          logging.warning(
+              'deobfuscator: Close() called by another thread during join().')
+          return lines
+        if reader_thread.is_alive():
+          logging.error('deobfuscator: Timed out.')
+          self.Close()
+          return lines
+        return out_lines
+      except IOError:
+        logging.exception('deobfuscator: Exception during java_deobfuscate')
+        self.Close()
+        return lines
+
+  def Close(self):
+    with self._close_lock:
+      needs_closing = not self.IsClosed()
+      self._closed_called = True
+
+    if needs_closing:
+      self._proc.stdin.close()
+      self._proc.kill()
+      self._proc.wait()
+
+  def __del__(self):
+    # self._proc is None when Popen() fails.
+    if not self._closed_called and self._proc:
+      logging.error('deobfuscator: Forgot to Close()')
+      self.Close()
+
+
+class DeobfuscatorPool(object):
+  # As of Sep 2017, each instance requires about 500MB of RAM, as measured by:
+  # /usr/bin/time -v build/android/stacktrace/java_deobfuscate.py \
+  #     out/Release/apks/ChromePublic.apk.mapping
+  def __init__(self, mapping_path, pool_size=4):
+    self._mapping_path = mapping_path
+    self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)]
+    # Allow only one thread to select from the pool at a time.
+    self._lock = threading.Lock()
+    self._num_restarts = 0
+
+  def TransformLines(self, lines):
+    with self._lock:
+      assert self._pool, 'TransformLines() called on a closed DeobfuscatorPool.'
+
+      # De-obfuscation is broken.
+      if self._num_restarts == _MAX_RESTARTS:
+        raise Exception('Deobfuscation seems broken.')
+
+      # Restart any closed Deobfuscators.
+      for i, d in enumerate(self._pool):
+        if d.IsClosed():
+          logging.warning('deobfuscator: Restarting closed instance.')
+          self._pool[i] = Deobfuscator(self._mapping_path)
+          self._num_restarts += 1
+          if self._num_restarts == _MAX_RESTARTS:
+            logging.warning('deobfuscator: MAX_RESTARTS reached.')
+
+      selected = next((x for x in self._pool if x.IsReady()), self._pool[0])
+      # Rotate the order so that next caller will not choose the same one.
+      self._pool.remove(selected)
+      self._pool.append(selected)
+
+    return selected.TransformLines(lines)
+
+  def Close(self):
+    with self._lock:
+      for d in self._pool:
+        d.Close()
+      self._pool = None
diff --git a/src/build/android/pylib/symbols/elf_symbolizer.py b/src/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000..1f2f918
--- /dev/null
+++ b/src/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,487 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+ELF_MAGIC = '\x7f\x45\x4c\x46'
+
+
+def ContainsElfMagic(file_path):
+  if os.path.getsize(file_path) < 4:
+    return False
+  try:
+    with open(file_path, 'r') as f:
+      b = f.read(4)
+      return b == ELF_MAGIC
+  except IOError:
+    return False
+
+
+class ELFSymbolizer(object):
+  """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+  This class is a frontend for addr2line (part of GNU binutils), designed to
+  symbolize batches of large numbers of symbols for a given ELF file. It
+  supports sharding symbolization against many addr2line instances and
+  pipelining of multiple requests per each instance (in order to hide addr2line
+  internals and OS pipe latencies).
+
+  The interface exhibited by this class is a very simple asynchronous interface,
+  which is based on the following three methods:
+  - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+  - The |callback| method: used to communicated back the symbol information.
+  - Join(): called to conclude the batch to gather the last outstanding results.
+  In essence, before the Join method returns, this class will have issued as
+  many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+  that due to multiprocess sharding, callbacks can be delivered out of order.
+
+  Some background about addr2line:
+  - it is invoked passing the elf path in the cmdline, piping the addresses in
+    its stdin and getting results on its stdout.
+  - it has pretty large response times for the first requests, but it
+    works very well in streaming mode once it has been warmed up.
+  - it doesn't scale by itself (on more cores). However, spawning multiple
+    instances at the same time on the same file is pretty efficient as they
+    keep hitting the pagecache and become mostly CPU bound.
+  - it might hang or crash, mostly for OOM. This class deals with both of these
+    problems.
+
+  Despite the "scary" imports and the multi* words above, (almost) no multi-
+  threading/processing is involved from the python viewpoint. Concurrency
+  here is achieved by spawning several addr2line subprocesses and handling their
+  output pipes asynchronously. Therefore, all the code here (with the exception
+  of the Queue instance in Addr2Line) should be free from mind-blowing
+  thread-safety concerns.
+
+  The multiprocess sharding works as follows:
+  The symbolizer tries to use the lowest number of addr2line instances as
+  possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+  in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+  worth the startup cost.
+  The multiprocess logic kicks in as soon as the queues for the existing
+  instances grow. Specifically, once all the existing instances reach the
+  |max_queue_size| bound, a new addr2line instance is kicked in.
+  In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+  have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+  blocking the SymbolizeAsync method.
+
+  This module has been deliberately designed to be dependency free (w.r.t. of
+  other modules in this project), to allow easy reuse in external projects.
+  """
+
+  def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+      max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+      source_root_path=None, strip_base_path=None):
+    """Args:
+      elf_file_path: path of the elf file to be symbolized.
+      addr2line_path: path of the toolchain's addr2line binary.
+      callback: a callback which will be invoked for each resolved symbol with
+          the two args (sym_info, callback_arg). The former is an instance of
+          |ELFSymbolInfo| and contains the symbol information. The latter is an
+          embedder-provided argument which is passed to SymbolizeAsync().
+      inlines: when True, the ELFSymbolInfo will contain also the details about
+          the outer inlining functions. When False, only the innermost function
+          will be provided.
+      max_concurrent_jobs: Max number of addr2line instances spawned.
+          Parallelize responsibly, addr2line is a memory and I/O monster.
+      max_queue_size: Max number of outstanding requests per addr2line instance.
+      addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+          After the timeout, the instance will be considered hung and respawned.
+      source_root_path: In some toolchains only the name of the source file is
+          is output, without any path information; disambiguation searches
+          through the source directory specified by |source_root_path| argument
+          for files whose name matches, adding the full path information to the
+          output. For example, if the toolchain outputs "unicode.cc" and there
+          is a file called "unicode.cc" located under |source_root_path|/foo,
+          the tool will replace "unicode.cc" with
+          "|source_root_path|/foo/unicode.cc". If there are multiple files with
+          the same name, disambiguation will fail because the tool cannot
+          determine which of the files was the source of the symbol.
+      strip_base_path: Rebases the symbols source paths onto |source_root_path|
+          (i.e replace |strip_base_path| with |source_root_path).
+    """
+    assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+    self.elf_file_path = elf_file_path
+    self.addr2line_path = addr2line_path
+    self.callback = callback
+    self.inlines = inlines
+    self.max_concurrent_jobs = (max_concurrent_jobs or
+                                min(multiprocessing.cpu_count(), 4))
+    self.max_queue_size = max_queue_size
+    self.addr2line_timeout = addr2line_timeout
+    self.requests_counter = 0  # For generating monotonic request IDs.
+    self._a2l_instances = []  # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+    # If necessary, create disambiguation lookup table
+    self.disambiguate = source_root_path is not None
+    self.disambiguation_table = {}
+    self.strip_base_path = strip_base_path
+    if self.disambiguate:
+      self.source_root_path = os.path.abspath(source_root_path)
+      self._CreateDisambiguationTable()
+
+    # Create one addr2line instance. More instances will be created on demand
+    # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+    self._CreateNewA2LInstance()
+
+  def SymbolizeAsync(self, addr, callback_arg=None):
+    """Requests symbolization of a given address.
+
+    This method is not guaranteed to return immediately. It generally does, but
+    in some scenarios (e.g. all addr2line instances have full queues) it can
+    block to create back-pressure.
+
+    Args:
+      addr: address to symbolize.
+      callback_arg: optional argument which will be passed to the |callback|."""
+    assert isinstance(addr, int)
+
+    # Process all the symbols that have been resolved in the meanwhile.
+    # Essentially, this drains all the addr2line(s) out queues.
+    for a2l_to_purge in self._a2l_instances:
+      a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+      a2l_to_purge.RecycleIfNecessary()
+
+    # Find the best instance according to this logic:
+    # 1. Find an existing instance with the shortest queue.
+    # 2. If all of instances' queues are full, but there is room in the pool,
+    #    (i.e. < |max_concurrent_jobs|) create a new instance.
+    # 3. If there were already |max_concurrent_jobs| instances and all of them
+    #    had full queues, make back-pressure.
+
+    # 1.
+    def _SortByQueueSizeAndReqID(a2l):
+      return (a2l.queue_size, a2l.first_request_id)
+    a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+    # 2.
+    if (a2l.queue_size >= self.max_queue_size and
+        len(self._a2l_instances) < self.max_concurrent_jobs):
+      a2l = self._CreateNewA2LInstance()
+
+    # 3.
+    if a2l.queue_size >= self.max_queue_size:
+      a2l.WaitForNextSymbolInQueue()
+
+    a2l.EnqueueRequest(addr, callback_arg)
+
+  def WaitForIdle(self):
+    """Waits for all the outstanding requests to complete."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+
+  def Join(self):
+    """Waits for all the outstanding requests to complete and terminates."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+      a2l.Terminate()
+
+  def _CreateNewA2LInstance(self):
+    assert len(self._a2l_instances) < self.max_concurrent_jobs
+    a2l = ELFSymbolizer.Addr2Line(self)
+    self._a2l_instances.append(a2l)
+    return a2l
+
+  def _CreateDisambiguationTable(self):
+    """ Non-unique file names will result in None entries"""
+    start_time = time.time()
+    logging.info('Collecting information about available source files...')
+    self.disambiguation_table = {}
+
+    for root, _, filenames in os.walk(self.source_root_path):
+      for f in filenames:
+        self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+                                       self.disambiguation_table) else None
+    logging.info('Finished collecting information about '
+                 'possible files (took %.1f s).',
+                 (time.time() - start_time))
+
+
+  class Addr2Line(object):
+    """A python wrapper around an addr2line instance.
+
+    The communication with the addr2line process looks as follows:
+      [STDIN]         [STDOUT]  (from addr2line's viewpoint)
+    > f001111
+    > f002222
+                    < Symbol::Name(foo, bar) for f001111
+                    < /path/to/source/file.c:line_number
+    > f003333
+                    < Symbol::Name2() for f002222
+                    < /path/to/source/file.c:line_number
+                    < Symbol::Name3() for f003333
+                    < /path/to/source/file.c:line_number
+    """
+
+    SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+    def __init__(self, symbolizer):
+      self._symbolizer = symbolizer
+      self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+      # The request queue (i.e. addresses pushed to addr2line's stdin and not
+      # yet retrieved on stdout)
+      self._request_queue = collections.deque()
+
+      # This is essentially len(self._request_queue). It has been optimized to a
+      # separate field because turned out to be a perf hot-spot.
+      self.queue_size = 0
+
+      # Keep track of the number of symbols a process has processed to
+      # avoid a single process growing too big and using all the memory.
+      self._processed_symbols_count = 0
+
+      # Objects required to handle the addr2line subprocess.
+      self._proc = None  # Subprocess.Popen(...) instance.
+      self._thread = None  # Threading.thread instance.
+      self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+      self._RestartAddr2LineProcess()
+
+    def EnqueueRequest(self, addr, callback_arg):
+      """Pushes an address to addr2line's stdin (and keeps track of it)."""
+      self._symbolizer.requests_counter += 1  # For global "age" of requests.
+      req_idx = self._symbolizer.requests_counter
+      self._request_queue.append((addr, callback_arg, req_idx))
+      self.queue_size += 1
+      self._WriteToA2lStdin(addr)
+
+    def WaitForIdle(self):
+      """Waits until all the pending requests have been symbolized."""
+      while self.queue_size > 0:
+        self.WaitForNextSymbolInQueue()
+
+    def WaitForNextSymbolInQueue(self):
+      """Waits for the next pending request to be symbolized."""
+      if not self.queue_size:
+        return
+
+      # This outer loop guards against a2l hanging (detecting stdout timeout).
+      while True:
+        start_time = datetime.datetime.now()
+        timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+        # The inner loop guards against a2l crashing (checking if it exited).
+        while datetime.datetime.now() - start_time < timeout:
+          # poll() returns !None if the process exited. a2l should never exit.
+          if self._proc.poll():
+            logging.warning('addr2line crashed, respawning (lib: %s).',
+                            self._lib_file_name)
+            self._RestartAddr2LineProcess()
+            # TODO(primiano): the best thing to do in this case would be
+            # shrinking the pool size as, very likely, addr2line is crashed
+            # due to low memory (and the respawned one will die again soon).
+
+          try:
+            lines = self._out_queue.get(block=True, timeout=0.25)
+          except Queue.Empty:
+            # On timeout (1/4 s.) repeat the inner loop and check if either the
+            # addr2line process did crash or we waited its output for too long.
+            continue
+
+          # In nominal conditions, we get straight to this point.
+          self._ProcessSymbolOutput(lines)
+          return
+
+        # If this point is reached, we waited more than |addr2line_timeout|.
+        logging.warning('Hung addr2line process, respawning (lib: %s).',
+                        self._lib_file_name)
+        self._RestartAddr2LineProcess()
+
+    def ProcessAllResolvedSymbolsInQueue(self):
+      """Consumes all the addr2line output lines produced (without blocking)."""
+      if not self.queue_size:
+        return
+      while True:
+        try:
+          lines = self._out_queue.get_nowait()
+        except Queue.Empty:
+          break
+        self._ProcessSymbolOutput(lines)
+
+    def RecycleIfNecessary(self):
+      """Restarts the process if it has been used for too long.
+
+      A long running addr2line process will consume excessive amounts
+      of memory without any gain in performance."""
+      if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+        self._RestartAddr2LineProcess()
+
+
+    def Terminate(self):
+      """Kills the underlying addr2line process.
+
+      The poller |_thread| will terminate as well due to the broken pipe."""
+      try:
+        self._proc.kill()
+        self._proc.communicate()  # Essentially wait() without risking deadlock.
+      except Exception: # pylint: disable=broad-except
+        # An exception while terminating? How interesting.
+        pass
+      self._proc = None
+
+    def _WriteToA2lStdin(self, addr):
+      self._proc.stdin.write('%s\n' % hex(addr))
+      if self._symbolizer.inlines:
+        # In the case of inlines we output an extra blank line, which causes
+        # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+        self._proc.stdin.write('\n')
+      self._proc.stdin.flush()
+
+    def _ProcessSymbolOutput(self, lines):
+      """Parses an addr2line symbol output and triggers the client callback."""
+      (_, callback_arg, _) = self._request_queue.popleft()
+      self.queue_size -= 1
+
+      innermost_sym_info = None
+      sym_info = None
+      for (line1, line2) in lines:
+        prev_sym_info = sym_info
+        name = line1 if not line1.startswith('?') else None
+        source_path = None
+        source_line = None
+        m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+        if m:
+          if not m.group(1).startswith('?'):
+            source_path = m.group(1)
+            if not m.group(2).startswith('?'):
+              source_line = int(m.group(2))
+        else:
+          logging.warning('Got invalid symbol path from addr2line: %s', line2)
+
+        # In case disambiguation is on, and needed
+        was_ambiguous = False
+        disambiguated = False
+        if self._symbolizer.disambiguate:
+          if source_path and not posixpath.isabs(source_path):
+            path = self._symbolizer.disambiguation_table.get(source_path)
+            was_ambiguous = True
+            disambiguated = path is not None
+            source_path = path if disambiguated else source_path
+
+          # Use absolute paths (so that paths are consistent, as disambiguation
+          # uses absolute paths)
+          if source_path and not was_ambiguous:
+            source_path = os.path.abspath(source_path)
+
+        if source_path and self._symbolizer.strip_base_path:
+          # Strip the base path
+          source_path = re.sub('^' + self._symbolizer.strip_base_path,
+              self._symbolizer.source_root_path or '', source_path)
+
+        sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+                                 disambiguated)
+        if prev_sym_info:
+          prev_sym_info.inlined_by = sym_info
+        if not innermost_sym_info:
+          innermost_sym_info = sym_info
+
+      self._processed_symbols_count += 1
+      self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+    def _RestartAddr2LineProcess(self):
+      if self._proc:
+        self.Terminate()
+
+      # The only reason of existence of this Queue (and the corresponding
+      # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+      # Essentially this is a pipe able to extract a couple of lines atomically.
+      self._out_queue = Queue.Queue()
+
+      # Start the underlying addr2line process in line buffered mode.
+
+      cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+          '--exe=' + self._symbolizer.elf_file_path]
+      if self._symbolizer.inlines:
+        cmd += ['--inlines']
+      self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+          stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+      # Start the poller thread, which simply moves atomically the lines read
+      # from the addr2line's stdout to the |_out_queue|.
+      self._thread = threading.Thread(
+          target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+          args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+      self._thread.daemon = True  # Don't prevent early process exit.
+      self._thread.start()
+
+      self._processed_symbols_count = 0
+
+      # Replay the pending requests on the new process (only for the case
+      # of a hung addr2line timing out during the game).
+      for (addr, _, _) in self._request_queue:
+        self._WriteToA2lStdin(addr)
+
+    @staticmethod
+    def StdoutReaderThread(process_pipe, queue, inlines):
+      """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+      This is the only piece of code not running on the main thread. It merely
+      writes to a Queue, which is thread-safe. In the case of inlines, it
+      detects the ??,??:0 marker and sends the lines atomically, such that the
+      main thread always receives all the lines corresponding to one symbol in
+      one shot."""
+      try:
+        lines_for_one_symbol = []
+        while True:
+          line1 = process_pipe.readline().rstrip('\r\n')
+          line2 = process_pipe.readline().rstrip('\r\n')
+          if not line1 or not line2:
+            break
+          inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+                                  (line1 != '??' and line2 != '??:0'))
+          if not inlines or inline_has_more_lines:
+            lines_for_one_symbol += [(line1, line2)]
+          if inline_has_more_lines:
+            continue
+          queue.put(lines_for_one_symbol)
+          lines_for_one_symbol = []
+        process_pipe.close()
+
+      # Every addr2line processes will die at some point, please die silently.
+      except (IOError, OSError):
+        pass
+
+    @property
+    def first_request_id(self):
+      """Returns the request_id of the oldest pending request in the queue."""
+      return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+  """The result of the symbolization passed as first arg. of each callback."""
+
+  def __init__(self, name, source_path, source_line, was_ambiguous=False,
+               disambiguated=False):
+    """All the fields here can be None (if addr2line replies with '??')."""
+    self.name = name
+    self.source_path = source_path
+    self.source_line = source_line
+    # In the case of |inlines|=True, the |inlined_by| points to the outer
+    # function inlining the current one (and so on, to form a chain).
+    self.inlined_by = None
+    self.disambiguated = disambiguated
+    self.was_ambiguous = was_ambiguous
+
+  def __str__(self):
+    return '%s [%s:%d]' % (
+        self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/src/build/android/pylib/symbols/elf_symbolizer_unittest.py b/src/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000..765b598
--- /dev/null
+++ b/src/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import unittest
+
+from pylib.symbols import elf_symbolizer
+from pylib.symbols import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+  def setUp(self):
+    self._callback = functools.partial(
+        ELFSymbolizerTest._SymbolizeCallback, self)
+    self._resolved_addresses = set()
+    # Mute warnings, we expect them due to the crash/hang tests.
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testParallelism1(self):
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+  def testParallelism4(self):
+    self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+  def testParallelism8(self):
+    self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+  def testCrash(self):
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+  def testHang(self):
+    os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+  def testInlines(self):
+    """Stimulate the inline processing logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        inlines=True,
+        max_concurrent_jobs=4)
+
+    for addr in xrange(1000):
+      exp_inline = False
+      exp_unknown = False
+
+      # First 100 addresses with inlines.
+      if addr < 100:
+        addr += _INLINE_MOCK_ADDR
+        exp_inline = True
+
+      # Followed by 100 without inlines.
+      elif addr < 200:
+        pass
+
+      # Followed by 100 interleaved inlines and not inlines.
+      elif addr < 300:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+
+      # Followed by 100 interleaved inlines and unknonwn.
+      elif addr < 400:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+        else:
+          addr += _UNKNOWN_MOCK_ADDR
+          exp_unknown = True
+
+      exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+      exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+      exp_source_line = addr if not exp_unknown else None
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testIncompleteSyminfo(self):
+    """Stimulate the symbol-not-resolved logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testWaitForIdle(self):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+    symbolizer.WaitForIdle()
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+    symbolizer.Join()
+
+  def _RunTest(self, max_concurrent_jobs, num_symbols):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=max_concurrent_jobs,
+        addr2line_timeout=0.5)
+
+    for addr in xrange(num_symbols):
+      exp_name = 'mock_sym_for_addr_%d' % addr
+      exp_source_path = 'mock_src/mock_lib1.so.c'
+      exp_source_line = addr
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+    # Check that all the expected callbacks have been received.
+    for addr in xrange(num_symbols):
+      self.assertIn(addr, self._resolved_addresses)
+      self._resolved_addresses.remove(addr)
+
+    # Check for unexpected callbacks.
+    self.assertEqual(len(self._resolved_addresses), 0)
+
+  def _SymbolizeCallback(self, sym_info, cb_arg):
+    self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+    self.assertTrue(isinstance(cb_arg, tuple))
+    self.assertEqual(len(cb_arg), 5)
+
+    # Unpack expectations from the callback extra argument.
+    (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+    if exp_name is None:
+      self.assertIsNone(sym_info.name)
+    else:
+      self.assertTrue(sym_info.name.startswith(exp_name))
+    self.assertEqual(sym_info.source_path, exp_source_path)
+    self.assertEqual(sym_info.source_line, exp_source_line)
+
+    if exp_inlines:
+      self.assertEqual(sym_info.name, exp_name + '_inner')
+      self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+      self.assertEqual(sym_info.inlined_by.inlined_by.name,
+                       exp_name + '_outer')
+
+    # Check against duplicate callbacks.
+    self.assertNotIn(addr, self._resolved_addresses)
+    self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/symbols/mock_addr2line/__init__.py b/src/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000..8b2a723
--- /dev/null
+++ b/src/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+from __future__ import print_function
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
+  # Silently swallow the other unnecessary arguments.
+  parser.add_option('-C', '--demangle', action='store_true')
+  parser.add_option('-f', '--functions', action='store_true')
+  parser.add_option('-i', '--inlines', action='store_true')
+  options, _ = parser.parse_args(argv[1:])
+  lib_file_name = posixpath.basename(options.exe)
+  processed_sym_count = 0
+  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+  while(True):
+    line = sys.stdin.readline().rstrip('\r')
+    if not line:
+      break
+
+    # An empty line should generate '??,??:0' (is used as marker for inlines).
+    if line == '\n':
+      print('??')
+      print('??:0')
+      sys.stdout.flush()
+      continue
+
+    addr = int(line, 16)
+    processed_sym_count += 1
+    if crash_every and processed_sym_count % crash_every == 0:
+      sys.exit(1)
+    if hang_every and processed_sym_count % hang_every == 0:
+      time.sleep(1)
+
+    # Addresses < 1M will return good mock symbol information.
+    if addr < 1024 * 1024:
+      print('mock_sym_for_addr_%d' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+
+    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+    elif addr < 2 * 1024 * 1024:
+      print('mock_sym_for_addr_%d' % addr)
+      print('??:0')
+
+    # Addresses 2M <= x < 3M will return unknown symbol information.
+    elif addr < 3 * 1024 * 1024:
+      print('??')
+      print('??')
+
+    # Addresses 3M <= x < 4M will return inlines.
+    elif addr < 4 * 1024 * 1024:
+      print('mock_sym_for_addr_%d_inner' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+      print('mock_sym_for_addr_%d_middle' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+      print('mock_sym_for_addr_%d_outer' % addr)
+      print('mock_src/%s.c:%d' % (lib_file_name, addr))
+
+    sys.stdout.flush()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
\ No newline at end of file
diff --git a/src/build/android/pylib/symbols/stack_symbolizer.py b/src/build/android/pylib/symbols/stack_symbolizer.py
new file mode 100644
index 0000000..4173741
--- /dev/null
+++ b/src/build/android/pylib/symbols/stack_symbolizer.py
@@ -0,0 +1,86 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+import time
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+_STACK_TOOL = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..',
+                          'third_party', 'android_platform', 'development',
+                          'scripts', 'stack')
+ABI_REG = re.compile('ABI: \'(.+?)\'')
+
+
+def _DeviceAbiToArch(device_abi):
+  # The order of this list is significant to find the more specific match
+  # (e.g., arm64) before the less specific (e.g., arm).
+  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+  for arch in arches:
+    if arch in device_abi:
+      return arch
+  raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+
+class Symbolizer(object):
+  """A helper class to symbolize stack."""
+
+  def __init__(self, apk_under_test=None):
+    self._apk_under_test = apk_under_test
+    self._time_spent_symbolizing = 0
+
+
+  def __del__(self):
+    self.CleanUp()
+
+
+  def CleanUp(self):
+    """Clean up the temporary directory of apk libs."""
+    if self._time_spent_symbolizing > 0:
+      logging.info(
+          'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing)
+
+
+  def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
+                                         device_abi, include_stack=True):
+    """Run the stack tool for given input.
+
+    Args:
+      data_to_symbolize: a list of strings to symbolize.
+      include_stack: boolean whether to include stack data in output.
+      device_abi: the default ABI of the device which generated the tombstone.
+
+    Yields:
+      A string for each line of resolved stack output.
+    """
+    if not os.path.exists(_STACK_TOOL):
+      logging.warning('%s missing. Unable to resolve native stack traces.',
+                      _STACK_TOOL)
+      return
+
+    arch = _DeviceAbiToArch(device_abi)
+    if not arch:
+      logging.warning('No device_abi can be found.')
+      return
+
+    cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
+           constants.GetOutDirectory(), '--more-info']
+    env = dict(os.environ)
+    env['PYTHONDONTWRITEBYTECODE'] = '1'
+    with tempfile.NamedTemporaryFile() as f:
+      f.write('\n'.join(data_to_symbolize))
+      f.flush()
+      start = time.time()
+      try:
+        _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
+      finally:
+        self._time_spent_symbolizing += time.time() - start
+    for line in output.splitlines():
+      if not include_stack and 'Stack Data:' in line:
+        break
+      yield line
diff --git a/src/build/android/pylib/symbols/symbol_utils.py b/src/build/android/pylib/symbols/symbol_utils.py
new file mode 100644
index 0000000..dea3c63
--- /dev/null
+++ b/src/build/android/pylib/symbols/symbol_utils.py
@@ -0,0 +1,814 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import bisect
+import collections
+import logging
+import os
+import re
+
+from pylib.constants import host_paths
+from pylib.symbols import elf_symbolizer
+
+
+def _AndroidAbiToCpuArch(android_abi):
+  """Return the Chromium CPU architecture name for a given Android ABI."""
+  _ARCH_MAP = {
+    'armeabi': 'arm',
+    'armeabi-v7a': 'arm',
+    'arm64-v8a': 'arm64',
+    'x86_64': 'x64',
+  }
+  return _ARCH_MAP.get(android_abi, android_abi)
+
+
+def _HexAddressRegexpFor(android_abi):
+  """Return a regexp matching hexadecimal addresses for a given Android ABI."""
+  if android_abi in ['x86_64', 'arm64-v8a', 'mips64']:
+    width = 16
+  else:
+    width = 8
+  return '[0-9a-f]{%d}' % width
+
+
+class HostLibraryFinder(object):
+  """Translate device library path to matching host unstripped library path.
+
+  Usage is the following:
+    1) Create instance.
+    2) Call AddSearchDir() once or more times to add host directory path to
+       look for unstripped native libraries.
+    3) Call Find(device_libpath) repeatedly to translate a device-specific
+       library path into the corresponding host path to the unstripped
+       version.
+  """
+  def __init__(self):
+    """Initialize instance."""
+    self._search_dirs = []
+    self._lib_map = {}        # Map of library name to host file paths.
+
+  def AddSearchDir(self, lib_dir):
+    """Add a directory to the search path for host native shared libraries.
+
+    Args:
+      lib_dir: host path containing native libraries.
+    """
+    if not os.path.exists(lib_dir):
+      logging.warning('Ignoring missing host library directory: %s', lib_dir)
+      return
+    if not os.path.isdir(lib_dir):
+      logging.warning('Ignoring invalid host library directory: %s', lib_dir)
+      return
+    self._search_dirs.append(lib_dir)
+    self._lib_map = {}  # Reset the map.
+
+  def Find(self, device_libpath):
+    """Find the host file path matching a specific device library path.
+
+    Args:
+      device_libpath: device-specific file path to library or executable.
+    Returns:
+      host file path to the unstripped version of the library, or None.
+    """
+    host_lib_path = None
+    lib_name = os.path.basename(device_libpath)
+    host_lib_path = self._lib_map.get(lib_name)
+    if not host_lib_path:
+      for search_dir in self._search_dirs:
+        lib_path = os.path.join(search_dir, lib_name)
+        if os.path.exists(lib_path):
+          host_lib_path = lib_path
+          break
+
+      if not host_lib_path:
+        logging.debug('Could not find host library for: %s', lib_name)
+      self._lib_map[lib_name] = host_lib_path
+
+    return host_lib_path
+
+
+
+class SymbolResolver(object):
+  """A base class for objets that can symbolize library (path, offset)
+     pairs into symbol information strings. Usage is the following:
+
+     1) Create new instance (by calling the constructor of a derived
+        class, since this is only the base one).
+
+     2) Call SetAndroidAbi() before any call to FindSymbolInfo() in order
+        to set the Android CPU ABI used for symbolization.
+
+     3) Before the first call to FindSymbolInfo(), one can call
+        AddLibraryOffset(), or AddLibraryOffsets() to record a set of offsets
+        that you will want to symbolize later through FindSymbolInfo(). Doing
+        so allows some SymbolResolver derived classes to work faster (e.g. the
+        one that invokes the 'addr2line' program, since the latter works faster
+        if the offsets provided as inputs are sorted in increasing order).
+
+     3) Call FindSymbolInfo(path, offset) to return the corresponding
+        symbol information string, or None if this doesn't correspond
+        to anything the instance can handle.
+
+        Note that whether the path is specific to the device or to the
+        host depends on the derived class implementation.
+  """
+  def __init__(self):
+    self._android_abi = None
+    self._lib_offsets_map = collections.defaultdict(set)
+
+  def SetAndroidAbi(self, android_abi):
+    """Set the Android ABI value for this instance.
+
+    Calling this function before FindSymbolInfo() is required by some
+    derived class implementations.
+
+    Args:
+      android_abi: Native Android CPU ABI name (e.g. 'armeabi-v7a').
+    Raises:
+      Exception if the ABI was already set with a different value.
+    """
+    if self._android_abi and self._android_abi != android_abi:
+      raise Exception('Cannot reset Android ABI to new value %s, already set '
+                      'to %s' % (android_abi, self._android_abi))
+
+    self._android_abi = android_abi
+
+  def AddLibraryOffset(self, lib_path, offset):
+    """Associate a single offset to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      lib_path: A library path.
+      offset: An integer offset within the corresponding library that will be
+        symbolized by future calls to FindSymbolInfo.
+    """
+    self._lib_offsets_map[lib_path].add(offset)
+
+  def AddLibraryOffsets(self, lib_path, lib_offsets):
+    """Associate a set of wanted offsets to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      lib_path: A library path.
+      lib_offsets: An iterable of integer offsets within the corresponding
+        library that will be symbolized by future calls to FindSymbolInfo.
+    """
+    self._lib_offsets_map[lib_path].update(lib_offsets)
+
+  # pylint: disable=unused-argument,no-self-use
+  def FindSymbolInfo(self, lib_path, lib_offset):
+    """Symbolize a device library path and offset.
+
+    Args:
+      lib_path: Library path (device or host specific, depending on the
+        derived class implementation).
+      lib_offset: Integer offset within the library.
+    Returns:
+      Corresponding symbol information string, or None.
+    """
+    # The base implementation cannot symbolize anything.
+    return None
+  # pylint: enable=unused-argument,no-self-use
+
+
+class ElfSymbolResolver(SymbolResolver):
+  """A SymbolResolver that can symbolize host path + offset values using
+     an elf_symbolizer.ELFSymbolizer instance.
+  """
+  def __init__(self, addr2line_path_for_tests=None):
+    super(ElfSymbolResolver, self).__init__()
+    self._addr2line_path = addr2line_path_for_tests
+
+    # Used to cache one ELFSymbolizer instance per library path.
+    self._elf_symbolizer_cache = {}
+
+    # Used to cache FindSymbolInfo() results. Maps host library paths
+    # to (offset -> symbol info string) dictionaries.
+    self._symbol_info_cache = collections.defaultdict(dict)
+    self._allow_symbolizer = True
+
+  def _CreateSymbolizerFor(self, host_path):
+    """Create the ELFSymbolizer instance associated with a given lib path."""
+    addr2line_path = self._addr2line_path
+    if not addr2line_path:
+      if not self._android_abi:
+        raise Exception(
+            'Android CPU ABI must be set before calling FindSymbolInfo!')
+
+      cpu_arch = _AndroidAbiToCpuArch(self._android_abi)
+      self._addr2line_path = host_paths.ToolPath('addr2line', cpu_arch)
+
+    return elf_symbolizer.ELFSymbolizer(
+        elf_file_path=host_path, addr2line_path=self._addr2line_path,
+        callback=ElfSymbolResolver._Callback, inlines=True)
+
+  def DisallowSymbolizerForTesting(self):
+    """Disallow FindSymbolInfo() from using a symbolizer.
+
+    This is used during unit-testing to ensure that the offsets that were
+    recorded via AddLibraryOffset()/AddLibraryOffsets() are properly
+    symbolized, but not anything else.
+    """
+    self._allow_symbolizer = False
+
+  def FindSymbolInfo(self, host_path, offset):
+    """Override SymbolResolver.FindSymbolInfo.
+
+    Args:
+      host_path: Host-specific path to the native shared library.
+      offset: Integer offset within the native library.
+    Returns:
+      A symbol info string, or None.
+    """
+    offset_map = self._symbol_info_cache[host_path]
+    symbol_info = offset_map.get(offset)
+    if symbol_info:
+      return symbol_info
+
+    # Create symbolizer on demand.
+    symbolizer = self._elf_symbolizer_cache.get(host_path)
+    if not symbolizer:
+      symbolizer = self._CreateSymbolizerFor(host_path)
+      self._elf_symbolizer_cache[host_path] = symbolizer
+
+      # If there are pre-recorded offsets for this path, symbolize them now.
+      offsets = self._lib_offsets_map.get(host_path)
+      if offsets:
+        offset_map = {}
+        for pre_offset in offsets:
+          symbolizer.SymbolizeAsync(
+              pre_offset, callback_arg=(offset_map, pre_offset))
+        symbolizer.WaitForIdle()
+        self._symbol_info_cache[host_path] = offset_map
+
+        symbol_info = offset_map.get(offset)
+        if symbol_info:
+          return symbol_info
+
+    if not self._allow_symbolizer:
+      return None
+
+    # Symbolize single offset. Slower if addresses are not provided in
+    # increasing order to addr2line.
+    symbolizer.SymbolizeAsync(offset,
+                              callback_arg=(offset_map, offset))
+    symbolizer.WaitForIdle()
+    return offset_map.get(offset)
+
+  @staticmethod
+  def _Callback(sym_info, callback_arg):
+    offset_map, offset = callback_arg
+    offset_map[offset] = str(sym_info)
+
+
+class DeviceSymbolResolver(SymbolResolver):
+  """A SymbolResolver instance that accepts device-specific path.
+
+  Usage is the following:
+    1) Create new instance, passing a parent SymbolResolver instance that
+       accepts host-specific paths, and a HostLibraryFinder instance.
+
+    2) Optional: call AddApkOffsets() to add offsets from within an APK
+       that contains uncompressed native shared libraries.
+
+    3) Use it as any SymbolResolver instance.
+  """
+  def __init__(self, host_resolver, host_lib_finder):
+    """Initialize instance.
+
+    Args:
+      host_resolver: A parent SymbolResolver instance that will be used
+        to resolve symbols from host library paths.
+      host_lib_finder: A HostLibraryFinder instance used to locate
+        unstripped libraries on the host.
+    """
+    super(DeviceSymbolResolver, self).__init__()
+    self._host_lib_finder = host_lib_finder
+    self._bad_device_lib_paths = set()
+    self._host_resolver = host_resolver
+
+  def SetAndroidAbi(self, android_abi):
+    super(DeviceSymbolResolver, self).SetAndroidAbi(android_abi)
+    self._host_resolver.SetAndroidAbi(android_abi)
+
+  def AddLibraryOffsets(self, device_lib_path, lib_offsets):
+    """Associate a set of wanted offsets to a given device library.
+
+    This must be called before FindSymbolInfo(), otherwise its input arguments
+    will be ignored.
+
+    Args:
+      device_lib_path: A device-specific library path.
+      lib_offsets: An iterable of integer offsets within the corresponding
+        library that will be symbolized by future calls to FindSymbolInfo.
+        want to symbolize.
+    """
+    if device_lib_path in self._bad_device_lib_paths:
+      return
+
+    host_lib_path = self._host_lib_finder.Find(device_lib_path)
+    if not host_lib_path:
+      # NOTE: self._bad_device_lib_paths is only used to only print this
+      #       warning once per bad library.
+      logging.warning('Could not find host library matching device path: %s',
+                      device_lib_path)
+      self._bad_device_lib_paths.add(device_lib_path)
+      return
+
+    self._host_resolver.AddLibraryOffsets(host_lib_path, lib_offsets)
+
+  def AddApkOffsets(self, device_apk_path, apk_offsets, apk_translator):
+    """Associate a set of wanted offsets to a given device APK path.
+
+    This converts the APK-relative offsets into offsets relative to the
+    uncompressed libraries it contains, then calls AddLibraryOffsets()
+    for each one of the libraries.
+
+    Must be called before FindSymbolInfo() as well, otherwise input arguments
+    will be ignored.
+
+    Args:
+      device_apk_path: Device-specific APK path.
+      apk_offsets: Iterable of offsets within the APK file.
+      apk_translator: An ApkLibraryPathTranslator instance used to extract
+        library paths from the APK.
+    """
+    libraries_map = collections.defaultdict(set)
+    for offset in apk_offsets:
+      lib_path, lib_offset = apk_translator.TranslatePath(device_apk_path,
+                                                          offset)
+      libraries_map[lib_path].add(lib_offset)
+
+    for lib_path, lib_offsets in libraries_map.iteritems():
+      self.AddLibraryOffsets(lib_path, lib_offsets)
+
+  def FindSymbolInfo(self, device_path, offset):
+    """Overrides SymbolResolver.FindSymbolInfo.
+
+    Args:
+      device_path: Device-specific library path (e.g.
+        '/data/app/com.example.app-1/lib/x86/libfoo.so')
+      offset: Offset in device library path.
+    Returns:
+      Corresponding symbol information string, or None.
+    """
+    host_path = self._host_lib_finder.Find(device_path)
+    if not host_path:
+      return None
+
+    return self._host_resolver.FindSymbolInfo(host_path, offset)
+
+
+class MemoryMap(object):
+  """Models the memory map of a given process. Usage is:
+
+    1) Create new instance, passing the Android ABI.
+
+    2) Call TranslateLine() whenever you want to detect and translate any
+       memory map input line.
+
+    3) Otherwise, it is possible to parse the whole memory map input with
+       ParseLines(), then call FindSectionForAddress() repeatedly in order
+       to translate a memory address into the corresponding mapping and
+       file information tuple (e.g. to symbolize stack entries).
+  """
+
+  # A named tuple describing interesting memory map line items.
+  # Fields:
+  #   addr_start: Mapping start address in memory.
+  #   file_offset: Corresponding file offset.
+  #   file_size: Corresponding mapping size in bytes.
+  #   file_path: Input file path.
+  #   match: Corresponding regular expression match object.
+  LineTuple = collections.namedtuple('MemoryMapLineTuple',
+                                     'addr_start,file_offset,file_size,'
+                                     'file_path, match')
+
+  # A name tuple describing a memory map section.
+  # Fields:
+  #   address: Memory address.
+  #   size: Size in bytes in memory
+  #   offset: Starting file offset.
+  #   path: Input file path.
+  SectionTuple = collections.namedtuple('MemoryMapSection',
+                                        'address,size,offset,path')
+
+  def __init__(self, android_abi):
+    """Initializes instance.
+
+    Args:
+      android_abi: Android CPU ABI name (e.g. 'armeabi-v7a')
+    """
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # pylint: disable=line-too-long
+    # A regular expression used to match memory map entries which look like:
+    #    b278c000-b2790fff r--   4fda000      5000  /data/app/com.google.android.apps.chrome-2/base.apk
+    # pylint: enable=line-too-long
+    self._re_map_section = re.compile(
+        r'\s*(?P<addr_start>' + hex_addr + r')-(?P<addr_end>' + hex_addr + ')' +
+        r'\s+' +
+        r'(?P<perm>...)\s+' +
+        r'(?P<file_offset>[0-9a-f]+)\s+' +
+        r'(?P<file_size>[0-9a-f]+)\s*' +
+        r'(?P<file_path>[^ \t]+)?')
+
+    self._addr_map = []  # Sorted list of (address, size, path, offset) tuples.
+    self._sorted_addresses = []  # Sorted list of address fields in _addr_map.
+    self._in_section = False
+
+  def TranslateLine(self, line, apk_path_translator):
+    """Try to translate a memory map input line, if detected.
+
+    This only takes care of converting mapped APK file path and offsets
+    into a corresponding uncompressed native library file path + new offsets,
+    e.g. '..... <offset> <size> /data/.../base.apk' gets
+    translated into '.... <new-offset> <size> /data/.../base.apk!lib/libfoo.so'
+
+    This function should always work, even if ParseLines() was not called
+    previously.
+
+    Args:
+      line: Input memory map / tombstone line.
+      apk_translator: An ApkLibraryPathTranslator instance, used to map
+        APK offsets into uncompressed native libraries + new offsets.
+    Returns:
+      Translated memory map line, if relevant, or unchanged input line
+      otherwise.
+    """
+    t = self._ParseLine(line.rstrip())
+    if not t:
+      return line
+
+    new_path, new_offset = apk_path_translator.TranslatePath(
+        t.file_path, t.file_offset)
+
+    if new_path == t.file_path:
+      return line
+
+    pos = t.match.start('file_path')
+    return '%s%s (offset 0x%x)%s' % (line[0:pos], new_path, new_offset,
+                                     line[t.match.end('file_path'):])
+
+  def ParseLines(self, input_lines, in_section=False):
+    """Parse a list of input lines and extract the APK memory map out of it.
+
+    Args:
+      input_lines: list, or iterable, of input lines.
+      in_section: Optional. If true, considers that the input lines are
+        already part of the memory map. Otherwise, wait until the start of
+        the section appears in the input before trying to record data.
+    Returns:
+      True iff APK-related memory map entries were found. False otherwise.
+    """
+    addr_list = []  # list of (address, size, file_path, file_offset) tuples.
+    self._in_section = in_section
+    for line in input_lines:
+      t = self._ParseLine(line.rstrip())
+      if not t:
+        continue
+
+      addr_list.append(t)
+
+    self._addr_map = sorted(addr_list,
+                            lambda x, y: cmp(x.addr_start, y.addr_start))
+    self._sorted_addresses = [e.addr_start for e in self._addr_map]
+    return bool(self._addr_map)
+
+  def _ParseLine(self, line):
+    """Used internally to recognized memory map input lines.
+
+    Args:
+      line: Input logcat or tomstone line.
+    Returns:
+      A LineTuple instance on success, or None on failure.
+    """
+    if not self._in_section:
+      self._in_section = line.startswith('memory map:')
+      return None
+
+    m = self._re_map_section.match(line)
+    if not m:
+      self._in_section = False  # End of memory map section
+      return None
+
+    # Only accept .apk and .so files that are not from the system partitions.
+    file_path = m.group('file_path')
+    if not file_path:
+      return None
+
+    if file_path.startswith('/system') or file_path.startswith('/vendor'):
+      return None
+
+    if not (file_path.endswith('.apk') or file_path.endswith('.so')):
+      return None
+
+    addr_start = int(m.group('addr_start'), 16)
+    file_offset = int(m.group('file_offset'), 16)
+    file_size = int(m.group('file_size'), 16)
+
+    return self.LineTuple(addr_start, file_offset, file_size, file_path, m)
+
+  def Dump(self):
+    """Print memory map for debugging."""
+    print('MEMORY MAP [')
+    for t in self._addr_map:
+      print('[%08x-%08x %08x %08x %s]' %
+            (t.addr_start, t.addr_start + t.file_size, t.file_size,
+             t.file_offset, t.file_path))
+    print('] MEMORY MAP')
+
+  def FindSectionForAddress(self, addr):
+    """Find the map section corresponding to a specific memory address.
+
+    Call this method only after using ParseLines() was called to extract
+    relevant information from the memory map.
+
+    Args:
+      addr: Memory address
+    Returns:
+      A SectionTuple instance on success, or None on failure.
+    """
+    pos = bisect.bisect_right(self._sorted_addresses, addr)
+    if pos > 0:
+      # All values in [0,pos) are <= addr, just ensure that the last
+      # one contains the address as well.
+      entry = self._addr_map[pos - 1]
+      if entry.addr_start + entry.file_size > addr:
+        return self.SectionTuple(entry.addr_start, entry.file_size,
+                                 entry.file_offset, entry.file_path)
+    return None
+
+
+class BacktraceTranslator(object):
+  """Translates backtrace-related lines in a tombstone or crash report.
+
+  Usage is the following:
+    1) Create new instance with appropriate arguments.
+    2) If the tombstone / logcat input is available, one can call
+       FindLibraryOffsets() in order to detect which library offsets
+       will need to be symbolized during a future parse. Doing so helps
+       speed up the ELF symbolizer.
+    3) For each tombstone/logcat input line, call TranslateLine() to
+       try to detect and symbolize backtrace lines.
+  """
+
+  # A named tuple for relevant input backtrace lines.
+  # Fields:
+  #   rel_pc: Instruction pointer, relative to offset in library start.
+  #   location: Library or APK file path.
+  #   offset: Load base of executable code in library or apk file path.
+  #   match: The corresponding regular expression match object.
+  # Note:
+  #   The actual instruction pointer always matches the position at
+  #   |offset + rel_pc| in |location|.
+  LineTuple = collections.namedtuple('BacktraceLineTuple',
+                                      'rel_pc,location,offset,match')
+
+  def __init__(self, android_abi, apk_translator):
+    """Initialize instance.
+
+    Args:
+      android_abi: Android CPU ABI name (e.g. 'armeabi-v7a').
+      apk_translator: ApkLibraryPathTranslator instance used to convert
+        mapped APK file offsets into uncompressed library file paths with
+        new offsets.
+    """
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # A regular expression used to match backtrace lines.
+    self._re_backtrace = re.compile(
+        r'.*#(?P<frame>[0-9]{2})\s+' +
+        r'(..)\s+' +
+        r'(?P<rel_pc>' + hex_addr + r')\s+' +
+        r'(?P<location>[^ \t]+)' +
+        r'(\s+\(offset 0x(?P<offset>[0-9a-f]+)\))?')
+
+    # In certain cases, offset will be provided as <location>+0x<offset>
+    # instead of <location> (offset 0x<offset>). This is a regexp to detect
+    # this.
+    self._re_location_offset = re.compile(
+        r'.*\+0x(?P<offset>[0-9a-f]+)$')
+
+    self._apk_translator = apk_translator
+    self._in_section = False
+
+  def _ParseLine(self, line):
+    """Used internally to detect and decompose backtrace input lines.
+
+    Args:
+      line: input tombstone line.
+    Returns:
+      A LineTuple instance on success, None on failure.
+    """
+    if not self._in_section:
+      self._in_section = line.startswith('backtrace:')
+      return None
+
+    line = line.rstrip()
+    m = self._re_backtrace.match(line)
+    if not m:
+      self._in_section = False
+      return None
+
+    location = m.group('location')
+    offset = m.group('offset')
+    if not offset:
+      m2 = self._re_location_offset.match(location)
+      if m2:
+        offset = m2.group('offset')
+        location = location[0:m2.start('offset') - 3]
+
+    if not offset:
+      return None
+
+    offset = int(offset, 16)
+    rel_pc = int(m.group('rel_pc'), 16)
+
+    # Two cases to consider here:
+    #
+    # * If this is a library file directly mapped in memory, then |rel_pc|
+    #   if the direct offset within the library, and doesn't need any kind
+    #   of adjustement.
+    #
+    # * If this is a library mapped directly from an .apk file, then
+    #   |rel_pc| is the offset in the APK, and |offset| happens to be the
+    #   load base of the corresponding library.
+    #
+    if location.endswith('.so'):
+      # For a native library directly mapped from the file system,
+      return self.LineTuple(rel_pc, location, offset, m)
+
+    if location.endswith('.apk'):
+      # For a native library inside an memory-mapped APK file,
+      new_location, new_offset = self._apk_translator.TranslatePath(
+          location, offset)
+
+      return self.LineTuple(rel_pc, new_location, new_offset, m)
+
+    # Ignore anything else (e.g. .oat or .odex files).
+    return None
+
+  def FindLibraryOffsets(self, input_lines, in_section=False):
+    """Parse a tombstone's backtrace section and find all library offsets in it.
+
+    Args:
+      input_lines: List or iterables of intput tombstone lines.
+      in_section: Optional. If True, considers that the stack section has
+        already started.
+    Returns:
+      A dictionary mapping device library paths to sets of offsets within
+      then.
+    """
+    self._in_section = in_section
+    result = collections.defaultdict(set)
+    for line in input_lines:
+      t = self._ParseLine(line)
+      if not t:
+        continue
+
+      result[t.location].add(t.offset + t.rel_pc)
+    return result
+
+  def TranslateLine(self, line, symbol_resolver):
+    """Symbolize backtrace line if recognized.
+
+    Args:
+      line: input backtrace line.
+      symbol_resolver: symbol resolver instance to use. This method will
+        call its FindSymbolInfo(device_lib_path, lib_offset) method to
+        convert offsets into symbol informations strings.
+    Returns:
+      Translated line (unchanged if not recognized as a back trace).
+    """
+    t = self._ParseLine(line)
+    if not t:
+      return line
+
+    symbol_info = symbol_resolver.FindSymbolInfo(t.location,
+                                                 t.offset + t.rel_pc)
+    if not symbol_info:
+      symbol_info = 'offset 0x%x' % t.offset
+
+    pos = t.match.start('location')
+    pos2 = t.match.end('offset') + 1
+    if pos2 <= 0:
+      pos2 = t.match.end('location')
+    return '%s%s (%s)%s' % (line[:pos], t.location, symbol_info, line[pos2:])
+
+
+class StackTranslator(object):
+  """Translates stack-related lines in a tombstone or crash report."""
+
+  # A named tuple describing relevant stack input lines.
+  # Fields:
+  #  address: Address as it appears in the stack.
+  #  lib_path: Library path where |address| is mapped.
+  #  lib_offset: Library load base offset. for |lib_path|.
+  #  match: Corresponding regular expression match object.
+  LineTuple = collections.namedtuple('StackLineTuple',
+                                     'address, lib_path, lib_offset, match')
+
+  def __init__(self, android_abi, memory_map, apk_translator):
+    """Initialize instance."""
+    hex_addr = _HexAddressRegexpFor(android_abi)
+
+    # pylint: disable=line-too-long
+    # A regular expression used to recognize stack entries like:
+    #
+    #    #05  bf89a180  bf89a1e4  [stack]
+    #         bf89a1c8  a0c01c51  /data/app/com.google.android.apps.chrome-2/base.apk
+    #         bf89a080  00000000
+    #         ........  ........
+    # pylint: enable=line-too-long
+    self._re_stack_line = re.compile(
+        r'\s+(?P<frame_number>#[0-9]+)?\s*' +
+        r'(?P<stack_addr>' + hex_addr + r')\s+' +
+        r'(?P<stack_value>' + hex_addr + r')' +
+        r'(\s+(?P<location>[^ \t]+))?')
+
+    self._re_stack_abbrev = re.compile(r'\s+[.]+\s+[.]+')
+
+    self._memory_map = memory_map
+    self._apk_translator = apk_translator
+    self._in_section = False
+
+  def _ParseLine(self, line):
+    """Check a given input line for a relevant _re_stack_line match.
+
+    Args:
+      line: input tombstone line.
+    Returns:
+      A LineTuple instance on success, None on failure.
+    """
+    line = line.rstrip()
+    if not self._in_section:
+      self._in_section = line.startswith('stack:')
+      return None
+
+    m = self._re_stack_line.match(line)
+    if not m:
+      if not self._re_stack_abbrev.match(line):
+        self._in_section = False
+      return None
+
+    location = m.group('location')
+    if not location:
+      return None
+
+    if not location.endswith('.apk') and not location.endswith('.so'):
+      return None
+
+    addr = int(m.group('stack_value'), 16)
+    t = self._memory_map.FindSectionForAddress(addr)
+    if t is None:
+      return None
+
+    lib_path = t.path
+    lib_offset = t.offset + (addr - t.address)
+
+    if lib_path.endswith('.apk'):
+      lib_path, lib_offset = self._apk_translator.TranslatePath(
+          lib_path, lib_offset)
+
+    return self.LineTuple(addr, lib_path, lib_offset, m)
+
+  def FindLibraryOffsets(self, input_lines, in_section=False):
+    """Parse a tombstone's stack section and find all library offsets in it.
+
+    Args:
+      input_lines: List or iterables of intput tombstone lines.
+      in_section: Optional. If True, considers that the stack section has
+        already started.
+    Returns:
+      A dictionary mapping device library paths to sets of offsets within
+      then.
+    """
+    result = collections.defaultdict(set)
+    self._in_section = in_section
+    for line in input_lines:
+      t = self._ParseLine(line)
+      if t:
+        result[t.lib_path].add(t.lib_offset)
+    return result
+
+  def TranslateLine(self, line, symbol_resolver=None):
+    """Try to translate a line of the stack dump."""
+    t = self._ParseLine(line)
+    if not t:
+      return line
+
+    symbol_info = symbol_resolver.FindSymbolInfo(t.lib_path, t.lib_offset)
+    if not symbol_info:
+      return line
+
+    pos = t.match.start('location')
+    pos2 = t.match.end('location')
+    return '%s%s (%s)%s' % (line[:pos], t.lib_path, symbol_info, line[pos2:])
diff --git a/src/build/android/pylib/symbols/symbol_utils_unittest.py b/src/build/android/pylib/symbols/symbol_utils_unittest.py
new file mode 100644
index 0000000..ed87f9e
--- /dev/null
+++ b/src/build/android/pylib/symbols/symbol_utils_unittest.py
@@ -0,0 +1,942 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import logging
+import os
+import re
+import shutil
+import tempfile
+import unittest
+
+from pylib.symbols import apk_native_libs_unittest
+from pylib.symbols import mock_addr2line
+from pylib.symbols import symbol_utils
+
+_MOCK_ELF_DATA = apk_native_libs_unittest.MOCK_ELF_DATA
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+
+
+# pylint: disable=line-too-long
+
+# list of (start_offset, end_offset, size, libpath) tuples corresponding
+# to the content of base.apk. This was taken from an x86 ChromeModern.apk
+# component build.
+_TEST_APK_LIBS = [
+  (0x01331000, 0x013696bc, 0x000386bc, 'libaccessibility.cr.so'),
+  (0x0136a000, 0x013779c4, 0x0000d9c4, 'libanimation.cr.so'),
+  (0x01378000, 0x0137f7e8, 0x000077e8, 'libapdu.cr.so'),
+  (0x01380000, 0x0155ccc8, 0x001dccc8, 'libbase.cr.so'),
+  (0x0155d000, 0x015ab98c, 0x0004e98c, 'libbase_i18n.cr.so'),
+  (0x015ac000, 0x015dff4c, 0x00033f4c, 'libbindings.cr.so'),
+  (0x015e0000, 0x015f5a54, 0x00015a54, 'libbindings_base.cr.so'),
+  (0x0160e000, 0x01731960, 0x00123960, 'libblink_common.cr.so'),
+  (0x01732000, 0x0174ce54, 0x0001ae54, 'libblink_controller.cr.so'),
+  (0x0174d000, 0x0318c528, 0x01a3f528, 'libblink_core.cr.so'),
+  (0x0318d000, 0x03191700, 0x00004700, 'libblink_mojom_broadcastchannel_bindings_shared.cr.so'),
+  (0x03192000, 0x03cd7918, 0x00b45918, 'libblink_modules.cr.so'),
+  (0x03cd8000, 0x03d137d0, 0x0003b7d0, 'libblink_mojo_bindings_shared.cr.so'),
+  (0x03d14000, 0x03d2670c, 0x0001270c, 'libblink_offscreen_canvas_mojo_bindings_shared.cr.so'),
+  (0x03d27000, 0x046c7054, 0x009a0054, 'libblink_platform.cr.so'),
+  (0x046c8000, 0x0473fbfc, 0x00077bfc, 'libbluetooth.cr.so'),
+  (0x04740000, 0x04878f40, 0x00138f40, 'libboringssl.cr.so'),
+  (0x04879000, 0x0498466c, 0x0010b66c, 'libc++_shared.so'),
+  (0x04985000, 0x0498d93c, 0x0000893c, 'libcaptive_portal.cr.so'),
+  (0x0498e000, 0x049947cc, 0x000067cc, 'libcapture_base.cr.so'),
+  (0x04995000, 0x04b39f18, 0x001a4f18, 'libcapture_lib.cr.so'),
+  (0x04b3a000, 0x04b488ec, 0x0000e8ec, 'libcbor.cr.so'),
+  (0x04b49000, 0x04e9ea5c, 0x00355a5c, 'libcc.cr.so'),
+  (0x04e9f000, 0x04ed6404, 0x00037404, 'libcc_animation.cr.so'),
+  (0x04ed7000, 0x04ef5ab4, 0x0001eab4, 'libcc_base.cr.so'),
+  (0x04ef6000, 0x04fd9364, 0x000e3364, 'libcc_blink.cr.so'),
+  (0x04fda000, 0x04fe2758, 0x00008758, 'libcc_debug.cr.so'),
+  (0x04fe3000, 0x0500ae0c, 0x00027e0c, 'libcc_ipc.cr.so'),
+  (0x0500b000, 0x05078f38, 0x0006df38, 'libcc_paint.cr.so'),
+  (0x05079000, 0x0507e734, 0x00005734, 'libcdm_manager.cr.so'),
+  (0x0507f000, 0x06f4d744, 0x01ece744, 'libchrome.cr.so'),
+  (0x06f54000, 0x06feb830, 0x00097830, 'libchromium_sqlite3.cr.so'),
+  (0x06fec000, 0x0706f554, 0x00083554, 'libclient.cr.so'),
+  (0x07070000, 0x0708da60, 0x0001da60, 'libcloud_policy_proto_generated_compile.cr.so'),
+  (0x0708e000, 0x07121f28, 0x00093f28, 'libcodec.cr.so'),
+  (0x07122000, 0x07134ab8, 0x00012ab8, 'libcolor_space.cr.so'),
+  (0x07135000, 0x07138614, 0x00003614, 'libcommon.cr.so'),
+  (0x07139000, 0x0717c938, 0x00043938, 'libcompositor.cr.so'),
+  (0x0717d000, 0x0923d78c, 0x020c078c, 'libcontent.cr.so'),
+  (0x0923e000, 0x092ae87c, 0x0007087c, 'libcontent_common_mojo_bindings_shared.cr.so'),
+  (0x092af000, 0x092be718, 0x0000f718, 'libcontent_public_common_mojo_bindings_shared.cr.so'),
+  (0x092bf000, 0x092d9a20, 0x0001aa20, 'libcrash_key.cr.so'),
+  (0x092da000, 0x092eda58, 0x00013a58, 'libcrcrypto.cr.so'),
+  (0x092ee000, 0x092f16e0, 0x000036e0, 'libdevice_base.cr.so'),
+  (0x092f2000, 0x092fe8d8, 0x0000c8d8, 'libdevice_event_log.cr.so'),
+  (0x092ff000, 0x093026a4, 0x000036a4, 'libdevice_features.cr.so'),
+  (0x09303000, 0x093f1220, 0x000ee220, 'libdevice_gamepad.cr.so'),
+  (0x093f2000, 0x09437f54, 0x00045f54, 'libdevice_vr_mojo_bindings.cr.so'),
+  (0x09438000, 0x0954c168, 0x00114168, 'libdevice_vr_mojo_bindings_blink.cr.so'),
+  (0x0954d000, 0x0955d720, 0x00010720, 'libdevice_vr_mojo_bindings_shared.cr.so'),
+  (0x0955e000, 0x0956b9c0, 0x0000d9c0, 'libdevices.cr.so'),
+  (0x0956c000, 0x0957cae8, 0x00010ae8, 'libdiscardable_memory_client.cr.so'),
+  (0x0957d000, 0x09588854, 0x0000b854, 'libdiscardable_memory_common.cr.so'),
+  (0x09589000, 0x0959cbb4, 0x00013bb4, 'libdiscardable_memory_service.cr.so'),
+  (0x0959d000, 0x095b6b90, 0x00019b90, 'libdisplay.cr.so'),
+  (0x095b7000, 0x095be930, 0x00007930, 'libdisplay_types.cr.so'),
+  (0x095bf000, 0x095c46c4, 0x000056c4, 'libdisplay_util.cr.so'),
+  (0x095c5000, 0x095f54a4, 0x000304a4, 'libdomain_reliability.cr.so'),
+  (0x095f6000, 0x0966fe08, 0x00079e08, 'libembedder.cr.so'),
+  (0x09670000, 0x096735f8, 0x000035f8, 'libembedder_switches.cr.so'),
+  (0x09674000, 0x096a3460, 0x0002f460, 'libevents.cr.so'),
+  (0x096a4000, 0x096b6d40, 0x00012d40, 'libevents_base.cr.so'),
+  (0x096b7000, 0x0981a778, 0x00163778, 'libffmpeg.cr.so'),
+  (0x0981b000, 0x09945c94, 0x0012ac94, 'libfido.cr.so'),
+  (0x09946000, 0x09a330dc, 0x000ed0dc, 'libfingerprint.cr.so'),
+  (0x09a34000, 0x09b53170, 0x0011f170, 'libfreetype_harfbuzz.cr.so'),
+  (0x09b54000, 0x09bc5c5c, 0x00071c5c, 'libgcm.cr.so'),
+  (0x09bc6000, 0x09cc8584, 0x00102584, 'libgeolocation.cr.so'),
+  (0x09cc9000, 0x09cdc8d4, 0x000138d4, 'libgeometry.cr.so'),
+  (0x09cdd000, 0x09cec8b4, 0x0000f8b4, 'libgeometry_skia.cr.so'),
+  (0x09ced000, 0x09d10e14, 0x00023e14, 'libgesture_detection.cr.so'),
+  (0x09d11000, 0x09d7595c, 0x0006495c, 'libgfx.cr.so'),
+  (0x09d76000, 0x09d7d7cc, 0x000077cc, 'libgfx_ipc.cr.so'),
+  (0x09d7e000, 0x09d82708, 0x00004708, 'libgfx_ipc_buffer_types.cr.so'),
+  (0x09d83000, 0x09d89748, 0x00006748, 'libgfx_ipc_color.cr.so'),
+  (0x09d8a000, 0x09d8f6f4, 0x000056f4, 'libgfx_ipc_geometry.cr.so'),
+  (0x09d90000, 0x09d94754, 0x00004754, 'libgfx_ipc_skia.cr.so'),
+  (0x09d95000, 0x09d9869c, 0x0000369c, 'libgfx_switches.cr.so'),
+  (0x09d99000, 0x09dba0ac, 0x000210ac, 'libgin.cr.so'),
+  (0x09dbb000, 0x09e0a8cc, 0x0004f8cc, 'libgl_in_process_context.cr.so'),
+  (0x09e0b000, 0x09e17a18, 0x0000ca18, 'libgl_init.cr.so'),
+  (0x09e18000, 0x09ee34e4, 0x000cb4e4, 'libgl_wrapper.cr.so'),
+  (0x09ee4000, 0x0a1a2e00, 0x002bee00, 'libgles2.cr.so'),
+  (0x0a1a3000, 0x0a24556c, 0x000a256c, 'libgles2_implementation.cr.so'),
+  (0x0a246000, 0x0a267038, 0x00021038, 'libgles2_utils.cr.so'),
+  (0x0a268000, 0x0a3288e4, 0x000c08e4, 'libgpu.cr.so'),
+  (0x0a329000, 0x0a3627ec, 0x000397ec, 'libgpu_ipc_service.cr.so'),
+  (0x0a363000, 0x0a388a18, 0x00025a18, 'libgpu_util.cr.so'),
+  (0x0a389000, 0x0a506d8c, 0x0017dd8c, 'libhost.cr.so'),
+  (0x0a507000, 0x0a6f0ec0, 0x001e9ec0, 'libicui18n.cr.so'),
+  (0x0a6f1000, 0x0a83b4c8, 0x0014a4c8, 'libicuuc.cr.so'),
+  (0x0a83c000, 0x0a8416e4, 0x000056e4, 'libinterfaces_shared.cr.so'),
+  (0x0a842000, 0x0a87e2a0, 0x0003c2a0, 'libipc.cr.so'),
+  (0x0a87f000, 0x0a88c98c, 0x0000d98c, 'libipc_mojom.cr.so'),
+  (0x0a88d000, 0x0a8926e4, 0x000056e4, 'libipc_mojom_shared.cr.so'),
+  (0x0a893000, 0x0a8a1e18, 0x0000ee18, 'libkeyed_service_content.cr.so'),
+  (0x0a8a2000, 0x0a8b4a30, 0x00012a30, 'libkeyed_service_core.cr.so'),
+  (0x0a8b5000, 0x0a930a80, 0x0007ba80, 'libleveldatabase.cr.so'),
+  (0x0a931000, 0x0a9b3908, 0x00082908, 'libmanager.cr.so'),
+  (0x0a9b4000, 0x0aea9bb4, 0x004f5bb4, 'libmedia.cr.so'),
+  (0x0aeaa000, 0x0b08cb88, 0x001e2b88, 'libmedia_blink.cr.so'),
+  (0x0b08d000, 0x0b0a4728, 0x00017728, 'libmedia_devices_mojo_bindings_shared.cr.so'),
+  (0x0b0a5000, 0x0b1943ec, 0x000ef3ec, 'libmedia_gpu.cr.so'),
+  (0x0b195000, 0x0b2d07d4, 0x0013b7d4, 'libmedia_mojo_services.cr.so'),
+  (0x0b2d1000, 0x0b2d4760, 0x00003760, 'libmessage_center.cr.so'),
+  (0x0b2d5000, 0x0b2e0938, 0x0000b938, 'libmessage_support.cr.so'),
+  (0x0b2e1000, 0x0b2f3ad0, 0x00012ad0, 'libmetrics_cpp.cr.so'),
+  (0x0b2f4000, 0x0b313bb8, 0x0001fbb8, 'libmidi.cr.so'),
+  (0x0b314000, 0x0b31b848, 0x00007848, 'libmojo_base_lib.cr.so'),
+  (0x0b31c000, 0x0b3329f8, 0x000169f8, 'libmojo_base_mojom.cr.so'),
+  (0x0b333000, 0x0b34b98c, 0x0001898c, 'libmojo_base_mojom_blink.cr.so'),
+  (0x0b34c000, 0x0b354700, 0x00008700, 'libmojo_base_mojom_shared.cr.so'),
+  (0x0b355000, 0x0b3608b0, 0x0000b8b0, 'libmojo_base_shared_typemap_traits.cr.so'),
+  (0x0b361000, 0x0b3ad454, 0x0004c454, 'libmojo_edk.cr.so'),
+  (0x0b3ae000, 0x0b3c4a20, 0x00016a20, 'libmojo_edk_ports.cr.so'),
+  (0x0b3c5000, 0x0b3d38a0, 0x0000e8a0, 'libmojo_mojom_bindings.cr.so'),
+  (0x0b3d4000, 0x0b3da6e8, 0x000066e8, 'libmojo_mojom_bindings_shared.cr.so'),
+  (0x0b3db000, 0x0b3e27f0, 0x000077f0, 'libmojo_public_system.cr.so'),
+  (0x0b3e3000, 0x0b3fa9fc, 0x000179fc, 'libmojo_public_system_cpp.cr.so'),
+  (0x0b3fb000, 0x0b407728, 0x0000c728, 'libmojom_core_shared.cr.so'),
+  (0x0b408000, 0x0b421744, 0x00019744, 'libmojom_platform_shared.cr.so'),
+  (0x0b422000, 0x0b43451c, 0x0001251c, 'libnative_theme.cr.so'),
+  (0x0b435000, 0x0baaa1bc, 0x006751bc, 'libnet.cr.so'),
+  (0x0bac4000, 0x0bb74670, 0x000b0670, 'libnetwork_cpp.cr.so'),
+  (0x0bb75000, 0x0bbaee8c, 0x00039e8c, 'libnetwork_cpp_base.cr.so'),
+  (0x0bbaf000, 0x0bd21844, 0x00172844, 'libnetwork_service.cr.so'),
+  (0x0bd22000, 0x0bd256e4, 0x000036e4, 'libnetwork_session_configurator.cr.so'),
+  (0x0bd26000, 0x0bd33734, 0x0000d734, 'libonc.cr.so'),
+  (0x0bd34000, 0x0bd9ce18, 0x00068e18, 'libperfetto.cr.so'),
+  (0x0bd9d000, 0x0bda4854, 0x00007854, 'libplatform.cr.so'),
+  (0x0bda5000, 0x0bec5ce4, 0x00120ce4, 'libpolicy_component.cr.so'),
+  (0x0bec6000, 0x0bf5ab58, 0x00094b58, 'libpolicy_proto.cr.so'),
+  (0x0bf5b000, 0x0bf86fbc, 0x0002bfbc, 'libprefs.cr.so'),
+  (0x0bf87000, 0x0bfa5d74, 0x0001ed74, 'libprinting.cr.so'),
+  (0x0bfa6000, 0x0bfe0e80, 0x0003ae80, 'libprotobuf_lite.cr.so'),
+  (0x0bfe1000, 0x0bff0a18, 0x0000fa18, 'libproxy_config.cr.so'),
+  (0x0bff1000, 0x0c0f6654, 0x00105654, 'libpublic.cr.so'),
+  (0x0c0f7000, 0x0c0fa6a4, 0x000036a4, 'librange.cr.so'),
+  (0x0c0fb000, 0x0c118058, 0x0001d058, 'libraster.cr.so'),
+  (0x0c119000, 0x0c133d00, 0x0001ad00, 'libresource_coordinator_cpp.cr.so'),
+  (0x0c134000, 0x0c1396a0, 0x000056a0, 'libresource_coordinator_cpp_base.cr.so'),
+  (0x0c13a000, 0x0c1973b8, 0x0005d3b8, 'libresource_coordinator_public_mojom.cr.so'),
+  (0x0c198000, 0x0c2033e8, 0x0006b3e8, 'libresource_coordinator_public_mojom_blink.cr.so'),
+  (0x0c204000, 0x0c219744, 0x00015744, 'libresource_coordinator_public_mojom_shared.cr.so'),
+  (0x0c21a000, 0x0c21e700, 0x00004700, 'libsandbox.cr.so'),
+  (0x0c21f000, 0x0c22f96c, 0x0001096c, 'libsandbox_services.cr.so'),
+  (0x0c230000, 0x0c249d58, 0x00019d58, 'libseccomp_bpf.cr.so'),
+  (0x0c24a000, 0x0c24e714, 0x00004714, 'libseccomp_starter_android.cr.so'),
+  (0x0c24f000, 0x0c4ae9f0, 0x0025f9f0, 'libservice.cr.so'),
+  (0x0c4af000, 0x0c4c3ae4, 0x00014ae4, 'libservice_manager_cpp.cr.so'),
+  (0x0c4c4000, 0x0c4cb708, 0x00007708, 'libservice_manager_cpp_types.cr.so'),
+  (0x0c4cc000, 0x0c4fbe30, 0x0002fe30, 'libservice_manager_mojom.cr.so'),
+  (0x0c4fc000, 0x0c532e78, 0x00036e78, 'libservice_manager_mojom_blink.cr.so'),
+  (0x0c533000, 0x0c53669c, 0x0000369c, 'libservice_manager_mojom_constants.cr.so'),
+  (0x0c537000, 0x0c53e85c, 0x0000785c, 'libservice_manager_mojom_constants_blink.cr.so'),
+  (0x0c53f000, 0x0c542668, 0x00003668, 'libservice_manager_mojom_constants_shared.cr.so'),
+  (0x0c543000, 0x0c54d700, 0x0000a700, 'libservice_manager_mojom_shared.cr.so'),
+  (0x0c54e000, 0x0c8fc6ec, 0x003ae6ec, 'libsessions.cr.so'),
+  (0x0c8fd000, 0x0c90a924, 0x0000d924, 'libshared_memory_support.cr.so'),
+  (0x0c90b000, 0x0c9148ec, 0x000098ec, 'libshell_dialogs.cr.so'),
+  (0x0c915000, 0x0cf8de70, 0x00678e70, 'libskia.cr.so'),
+  (0x0cf8e000, 0x0cf978bc, 0x000098bc, 'libsnapshot.cr.so'),
+  (0x0cf98000, 0x0cfb7d9c, 0x0001fd9c, 'libsql.cr.so'),
+  (0x0cfb8000, 0x0cfbe744, 0x00006744, 'libstartup_tracing.cr.so'),
+  (0x0cfbf000, 0x0d19b4e4, 0x001dc4e4, 'libstorage_browser.cr.so'),
+  (0x0d19c000, 0x0d2a773c, 0x0010b73c, 'libstorage_common.cr.so'),
+  (0x0d2a8000, 0x0d2ac6fc, 0x000046fc, 'libsurface.cr.so'),
+  (0x0d2ad000, 0x0d2baa98, 0x0000da98, 'libtracing.cr.so'),
+  (0x0d2bb000, 0x0d2f36b0, 0x000386b0, 'libtracing_cpp.cr.so'),
+  (0x0d2f4000, 0x0d326e70, 0x00032e70, 'libtracing_mojom.cr.so'),
+  (0x0d327000, 0x0d33270c, 0x0000b70c, 'libtracing_mojom_shared.cr.so'),
+  (0x0d333000, 0x0d46d804, 0x0013a804, 'libui_android.cr.so'),
+  (0x0d46e000, 0x0d4cb3f8, 0x0005d3f8, 'libui_base.cr.so'),
+  (0x0d4cc000, 0x0d4dbc40, 0x0000fc40, 'libui_base_ime.cr.so'),
+  (0x0d4dc000, 0x0d4e58d4, 0x000098d4, 'libui_data_pack.cr.so'),
+  (0x0d4e6000, 0x0d51d1e0, 0x000371e0, 'libui_devtools.cr.so'),
+  (0x0d51e000, 0x0d52b984, 0x0000d984, 'libui_message_center_cpp.cr.so'),
+  (0x0d52c000, 0x0d539a48, 0x0000da48, 'libui_touch_selection.cr.so'),
+  (0x0d53a000, 0x0d55bc60, 0x00021c60, 'liburl.cr.so'),
+  (0x0d55c000, 0x0d55f6b4, 0x000036b4, 'liburl_ipc.cr.so'),
+  (0x0d560000, 0x0d5af110, 0x0004f110, 'liburl_matcher.cr.so'),
+  (0x0d5b0000, 0x0d5e2fac, 0x00032fac, 'libuser_manager.cr.so'),
+  (0x0d5e3000, 0x0d5e66e4, 0x000036e4, 'libuser_prefs.cr.so'),
+  (0x0d5e7000, 0x0e3e1cc8, 0x00dfacc8, 'libv8.cr.so'),
+  (0x0e3e2000, 0x0e400ae0, 0x0001eae0, 'libv8_libbase.cr.so'),
+  (0x0e401000, 0x0e4d91d4, 0x000d81d4, 'libviz_common.cr.so'),
+  (0x0e4da000, 0x0e4df7e4, 0x000057e4, 'libviz_resource_format.cr.so'),
+  (0x0e4e0000, 0x0e5b7120, 0x000d7120, 'libweb_dialogs.cr.so'),
+  (0x0e5b8000, 0x0e5c7a18, 0x0000fa18, 'libwebdata_common.cr.so'),
+  (0x0e5c8000, 0x0e61bfe4, 0x00053fe4, 'libwtf.cr.so'),
+]
+
+
+# A small memory map fragment extracted from a tombstone for a process that
+# had loaded the APK corresponding to _TEST_APK_LIBS above.
+_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw-         0     cb000  /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw-     cb000    400000  /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff ---    4cb000  1fb35000  /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw-         0      1000  /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff ---      1000  1ffff000  /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw-         0    9d9000  /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r--         0   1eb2000  /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x   1eb2000   1cfc000  /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw-   3bae000      1000  /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw-         0     dc000  /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw-         0      1000  /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw-      1000     11000  /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff ---     12000   3b13000  /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw-   3b25000    3ff000  /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r--    8a9000    18f000  /data/app/com.example.app-2/base.apk
+92539000-9255bfff r--         0     23000  /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r--    213000     38000  /data/app/com.example.app-2/base.apk
+92594000-925c0fff r--    87d000     2d000  /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r--    a37000    213000  /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r--    24a000    634000  /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r--   a931000     30000  /data/app/com.example.app-2/base.apk
+92e38000-92e86fff r-x   a961000     4f000  /data/app/com.example.app-2/base.apk
+92e87000-92e8afff rw-   a9b0000      4000  /data/app/com.example.app-2/base.apk
+92e8b000-92e8bfff rw-         0      1000
+92e8c000-92e9dfff r--   d5b0000     12000  /data/app/com.example.app-2/base.apk
+92e9e000-92ebcfff r-x   d5c2000     1f000  /data/app/com.example.app-2/base.apk
+92ebd000-92ebefff rw-   d5e1000      2000  /data/app/com.example.app-2/base.apk
+92ebf000-92ebffff rw-         0      1000
+'''
+
+# list of (address, size, path, offset)  tuples that must appear in
+# _TEST_MEMORY_MAP. Not all sections need to be listed.
+_TEST_MEMORY_MAP_SECTIONS = [
+  (0x923aa000, 0x18f000, '/data/app/com.example.app-2/base.apk', 0x8a9000),
+  (0x9255c000, 0x038000, '/data/app/com.example.app-2/base.apk', 0x213000),
+  (0x92594000, 0x02d000, '/data/app/com.example.app-2/base.apk', 0x87d000),
+  (0x925c1000, 0x213000, '/data/app/com.example.app-2/base.apk', 0xa37000),
+]
+
+_EXPECTED_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw-         0     cb000  /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw-     cb000    400000  /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff ---    4cb000  1fb35000  /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw-         0      1000  /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff ---      1000  1ffff000  /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw-         0    9d9000  /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r--         0   1eb2000  /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x   1eb2000   1cfc000  /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw-   3bae000      1000  /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw-         0     dc000  /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw-         0      1000  /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw-      1000     11000  /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff ---     12000   3b13000  /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw-   3b25000    3ff000  /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r--    8a9000    18f000  /data/app/com.example.app-2/base.apk
+92539000-9255bfff r--         0     23000  /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r--    213000     38000  /data/app/com.example.app-2/base.apk
+92594000-925c0fff r--    87d000     2d000  /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r--    a37000    213000  /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r--    24a000    634000  /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r--   a931000     30000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x0)
+92e38000-92e86fff r-x   a961000     4f000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x30000)
+92e87000-92e8afff rw-   a9b0000      4000  /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x7f000)
+92e8b000-92e8bfff rw-         0      1000
+92e8c000-92e9dfff r--   d5b0000     12000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x0)
+92e9e000-92ebcfff r-x   d5c2000     1f000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x12000)
+92ebd000-92ebefff rw-   d5e1000      2000  /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x31000)
+92ebf000-92ebffff rw-         0      1000
+'''
+
+# Example stack section, taken from the same tombstone that _TEST_MEMORY_MAP
+# was extracted from.
+_TEST_STACK = r'''stack:
+        bf89a070  b7439468  /system/lib/libc.so
+        bf89a074  bf89a1e4  [stack]
+        bf89a078  932d4000  /data/app/com.example.app-2/base.apk
+        bf89a07c  b73bfbc9  /system/lib/libc.so (pthread_mutex_lock+65)
+        bf89a080  00000000
+        bf89a084  4000671c  /dev/ashmem/dalvik-main space 1 (deleted)
+        bf89a088  932d1d86  /data/app/com.example.app-2/base.apk
+        bf89a08c  b743671c  /system/lib/libc.so
+        bf89a090  b77f8c00  /system/bin/linker
+        bf89a094  b743cc90
+        bf89a098  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a09c  b73bf271  /system/lib/libc.so (__pthread_internal_find(long)+65)
+        bf89a0a0  b743cc90
+        bf89a0a4  bf89a0b0  [stack]
+        bf89a0a8  bf89a0b8  [stack]
+        bf89a0ac  00000008
+        ........  ........
+  #00  bf89a0b0  00000006
+        bf89a0b4  00000002
+        bf89a0b8  b743671c  /system/lib/libc.so
+        bf89a0bc  b73bf5d9  /system/lib/libc.so (pthread_kill+71)
+  #01  bf89a0c0  00006937
+        bf89a0c4  00006937
+        bf89a0c8  00000006
+        bf89a0cc  b77fd3a9  /system/bin/app_process32 (sigprocmask+141)
+        bf89a0d0  00000002
+        bf89a0d4  bf89a0ec  [stack]
+        bf89a0d8  00000000
+        bf89a0dc  b743671c  /system/lib/libc.so
+        bf89a0e0  bf89a12c  [stack]
+        bf89a0e4  bf89a1e4  [stack]
+        bf89a0e8  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a0ec  b7365206  /system/lib/libc.so (raise+37)
+  #02  bf89a0f0  b77f8c00  /system/bin/linker
+        bf89a0f4  00000006
+        bf89a0f8  b7439468  /system/lib/libc.so
+        bf89a0fc  b743671c  /system/lib/libc.so
+        bf89a100  bf89a12c  [stack]
+        bf89a104  b743671c  /system/lib/libc.so
+        bf89a108  bf89a12c  [stack]
+        bf89a10c  b735e9e5  /system/lib/libc.so (abort+81)
+  #03  bf89a110  00000006
+        bf89a114  bf89a12c  [stack]
+        bf89a118  00000000
+        bf89a11c  b55a3d3b  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+        bf89a120  b7439468  /system/lib/libc.so
+        bf89a124  b55ba38d  /system/lib/libprotobuf-cpp-lite.so
+        bf89a128  b55ba408  /system/lib/libprotobuf-cpp-lite.so
+        bf89a12c  ffffffdf
+        bf89a130  0000003d
+        bf89a134  adfedf00  [anon:libc_malloc]
+        bf89a138  bf89a158  [stack]
+  #04  bf89a13c  a0cee7f0  /data/app/com.example.app-2/base.apk
+        bf89a140  b55c1cb0  /system/lib/libprotobuf-cpp-lite.so
+        bf89a144  bf89a1e4  [stack]
+'''
+
+# Expected value of _TEST_STACK after translation of addresses in the APK
+# into offsets into libraries.
+_EXPECTED_STACK = r'''stack:
+        bf89a070  b7439468  /system/lib/libc.so
+        bf89a074  bf89a1e4  [stack]
+        bf89a078  932d4000  /data/app/com.example.app-2/base.apk
+        bf89a07c  b73bfbc9  /system/lib/libc.so (pthread_mutex_lock+65)
+        bf89a080  00000000
+        bf89a084  4000671c  /dev/ashmem/dalvik-main space 1 (deleted)
+        bf89a088  932d1d86  /data/app/com.example.app-2/base.apk
+        bf89a08c  b743671c  /system/lib/libc.so
+        bf89a090  b77f8c00  /system/bin/linker
+        bf89a094  b743cc90
+        bf89a098  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a09c  b73bf271  /system/lib/libc.so (__pthread_internal_find(long)+65)
+        bf89a0a0  b743cc90
+        bf89a0a4  bf89a0b0  [stack]
+        bf89a0a8  bf89a0b8  [stack]
+        bf89a0ac  00000008
+        ........  ........
+  #00  bf89a0b0  00000006
+        bf89a0b4  00000002
+        bf89a0b8  b743671c  /system/lib/libc.so
+        bf89a0bc  b73bf5d9  /system/lib/libc.so (pthread_kill+71)
+  #01  bf89a0c0  00006937
+        bf89a0c4  00006937
+        bf89a0c8  00000006
+        bf89a0cc  b77fd3a9  /system/bin/app_process32 (sigprocmask+141)
+        bf89a0d0  00000002
+        bf89a0d4  bf89a0ec  [stack]
+        bf89a0d8  00000000
+        bf89a0dc  b743671c  /system/lib/libc.so
+        bf89a0e0  bf89a12c  [stack]
+        bf89a0e4  bf89a1e4  [stack]
+        bf89a0e8  932d1d4a  /data/app/com.example.app-2/base.apk
+        bf89a0ec  b7365206  /system/lib/libc.so (raise+37)
+  #02  bf89a0f0  b77f8c00  /system/bin/linker
+        bf89a0f4  00000006
+        bf89a0f8  b7439468  /system/lib/libc.so
+        bf89a0fc  b743671c  /system/lib/libc.so
+        bf89a100  bf89a12c  [stack]
+        bf89a104  b743671c  /system/lib/libc.so
+        bf89a108  bf89a12c  [stack]
+        bf89a10c  b735e9e5  /system/lib/libc.so (abort+81)
+  #03  bf89a110  00000006
+        bf89a114  bf89a12c  [stack]
+        bf89a118  00000000
+        bf89a11c  b55a3d3b  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+        bf89a120  b7439468  /system/lib/libc.so
+        bf89a124  b55ba38d  /system/lib/libprotobuf-cpp-lite.so
+        bf89a128  b55ba408  /system/lib/libprotobuf-cpp-lite.so
+        bf89a12c  ffffffdf
+        bf89a130  0000003d
+        bf89a134  adfedf00  [anon:libc_malloc]
+        bf89a138  bf89a158  [stack]
+  #04  bf89a13c  a0cee7f0  /data/app/com.example.app-2/base.apk
+        bf89a140  b55c1cb0  /system/lib/libprotobuf-cpp-lite.so
+        bf89a144  bf89a1e4  [stack]
+'''
+
+_TEST_BACKTRACE = r'''backtrace:
+    #00 pc 00084126  /system/lib/libc.so (tgkill+22)
+    #01 pc 000815d8  /system/lib/libc.so (pthread_kill+70)
+    #02 pc 00027205  /system/lib/libc.so (raise+36)
+    #03 pc 000209e4  /system/lib/libc.so (abort+80)
+    #04 pc 0000cf73  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+    #05 pc 0000cf8e  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+    #06 pc 0000d27f  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+    #07 pc 007cd236  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #08 pc 000111a9  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #09 pc 00013228  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #10 pc 000131de  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+    #11 pc 007cd2d8  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #12 pc 007cd956  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #13 pc 007c2d4a  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #14 pc 009fc9f1  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #15 pc 009fc8ea  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #16 pc 00561c63  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #17 pc 0106fbdb  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #18 pc 004d7371  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #19 pc 004d8159  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #20 pc 004d7b96  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #21 pc 004da4b6  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #22 pc 005ab66c  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #23 pc 005afca2  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #24 pc 0000cae8  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+    #25 pc 00ce864f  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #26 pc 00ce8dfa  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #27 pc 00ce74c6  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #28 pc 00004616  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x961e000)
+    #29 pc 00ce8215  /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+    #30 pc 0013d8c7  /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+    #31 pc 00137c52  /system/lib/libart.so (art_quick_invoke_static_stub+418)
+    #32 pc 00143651  /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+    #33 pc 005e06ae  /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+    #34 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #35 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #36 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #37 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #38 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #39 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #40 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #41 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #42 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #43 pc 0032ebf9  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+    #44 pc 000fc955  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+    #45 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #46 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #47 pc 0033090c  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+    #48 pc 000fc67f  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+    #49 pc 00300700  /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+    #50 pc 00667c73  /system/lib/libart.so (artQuickToInterpreterBridge+808)
+    #51 pc 0013d98d  /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+    #52 pc 7264bc5b  /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE = r'''backtrace:
+    #00 pc 00084126  /system/lib/libc.so (tgkill+22)
+    #01 pc 000815d8  /system/lib/libc.so (pthread_kill+70)
+    #02 pc 00027205  /system/lib/libc.so (raise+36)
+    #03 pc 000209e4  /system/lib/libc.so (abort+80)
+    #04 pc 0000cf73  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+    #05 pc 0000cf8e  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+    #06 pc 0000d27f  /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+    #07 pc 007cd236  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #08 pc 000111a9  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #09 pc 00013228  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #10 pc 000131de  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+    #11 pc 007cd2d8  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #12 pc 007cd956  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #13 pc 007c2d4a  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #14 pc 009fc9f1  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #15 pc 009fc8ea  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #16 pc 00561c63  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #17 pc 0106fbdb  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #18 pc 004d7371  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #19 pc 004d8159  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #20 pc 004d7b96  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #21 pc 004da4b6  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #22 pc 005ab66c  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #23 pc 005afca2  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #24 pc 0000cae8  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+    #25 pc 00ce864f  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #26 pc 00ce8dfa  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #27 pc 00ce74c6  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #28 pc 00004616  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so (offset 0x28000)
+    #29 pc 00ce8215  /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+    #30 pc 0013d8c7  /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+    #31 pc 00137c52  /system/lib/libart.so (art_quick_invoke_static_stub+418)
+    #32 pc 00143651  /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+    #33 pc 005e06ae  /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+    #34 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #35 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #36 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #37 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #38 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #39 pc 0032cfc0  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+    #40 pc 000fc703  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+    #41 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #42 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #43 pc 0032ebf9  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+    #44 pc 000fc955  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+    #45 pc 00300af7  /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+    #46 pc 00328b5d  /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+    #47 pc 0033090c  /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+    #48 pc 000fc67f  /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+    #49 pc 00300700  /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+    #50 pc 00667c73  /system/lib/libart.so (artQuickToInterpreterBridge+808)
+    #51 pc 0013d98d  /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+    #52 pc 7264bc5b  /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE_OFFSETS_MAP = {
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so':
+      set([
+          0x1c000 + 0x111a9,
+          0x1c000 + 0x13228,
+          0x1c000 + 0x131de,
+      ]),
+
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so':
+      set([
+          0x90e000 + 0x7cd236,
+          0x90e000 + 0x7cd2d8,
+          0x90e000 + 0x7cd956,
+          0x90e000 + 0x7c2d4a,
+          0x90e000 + 0x9fc9f1,
+          0x90e000 + 0x9fc8ea,
+          0x90e000 + 0x561c63,
+          0x90e000 + 0x106fbdb,
+          0x90e000 + 0x4d7371,
+          0x90e000 + 0x4d8159,
+          0x90e000 + 0x4d7b96,
+          0x90e000 + 0x4da4b6,
+          0x90e000 + 0xcae8,
+      ]),
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so':
+      set([
+          0xc2d000 + 0x5ab66c,
+          0xc2d000 + 0x5afca2,
+          0xc2d000 + 0xce864f,
+          0xc2d000 + 0xce8dfa,
+          0xc2d000 + 0xce74c6,
+          0xc2d000 + 0xce8215,
+      ]),
+  '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so':
+      set([
+          0x28000 + 0x4616,
+      ])
+}
+
+# pylint: enable=line-too-long
+
+_ONE_MB = 1024 * 1024
+_TEST_SYMBOL_DATA = {
+  # Regular symbols
+  0: 'mock_sym_for_addr_0 [mock_src/libmock1.so.c:0]',
+  0x1000: 'mock_sym_for_addr_4096 [mock_src/libmock1.so.c:4096]',
+
+  # Symbols without source file path.
+  _ONE_MB: 'mock_sym_for_addr_1048576 [??:0]',
+  _ONE_MB + 0x8234: 'mock_sym_for_addr_1081908 [??:0]',
+
+  # Unknown symbol.
+  2 * _ONE_MB: '?? [??:0]',
+
+  # Inlined symbol.
+  3 * _ONE_MB:
+    'mock_sym_for_addr_3145728_inner [mock_src/libmock1.so.c:3145728]',
+}
+
+@contextlib.contextmanager
+def _TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def _TouchFile(path):
+  # Create parent directories.
+  try:
+    os.makedirs(os.path.dirname(path))
+  except OSError:
+    pass
+  with open(path, 'a'):
+    os.utime(path, None)
+
+class MockApkTranslator(object):
+  """A mock ApkLibraryPathTranslator object used for testing."""
+
+  # Regex that matches the content of APK native library map files generated
+  # with apk_lib_dump.py.
+  _RE_MAP_FILE = re.compile(
+      r'0x(?P<file_start>[0-9a-f]+)\s+' +
+      r'0x(?P<file_end>[0-9a-f]+)\s+' +
+      r'0x(?P<file_size>[0-9a-f]+)\s+' +
+      r'0x(?P<lib_path>[0-9a-f]+)\s+')
+
+  def __init__(self, test_apk_libs=None):
+    """Initialize instance.
+
+    Args:
+      test_apk_libs: Optional list of (file_start, file_end, size, lib_path)
+        tuples, like _TEST_APK_LIBS for example. This will be used to
+        implement TranslatePath().
+    """
+    self._apk_libs = []
+    if test_apk_libs:
+      self._AddLibEntries(test_apk_libs)
+
+  def _AddLibEntries(self, entries):
+    self._apk_libs = sorted(self._apk_libs + entries,
+                            lambda x, y: cmp(x[0], y[0]))
+
+  def ReadMapFile(self, file_path):
+    """Read an .apk.native-libs file that was produced with apk_lib_dump.py.
+
+    Args:
+      file_path: input path to .apk.native-libs file. Its format is
+        essentially: 0x<start>  0x<end> 0x<size> <library-path>
+    """
+    new_libs = []
+    with open(file_path) as f:
+      for line in f.readlines():
+        m = MockApkTranslator._RE_MAP_FILE.match(line)
+        if m:
+          file_start = int(m.group('file_start'), 16)
+          file_end = int(m.group('file_end'), 16)
+          file_size = int(m.group('file_size'), 16)
+          lib_path = m.group('lib_path')
+          # Sanity check
+          if file_start + file_size != file_end:
+            logging.warning('%s: Inconsistent (start, end, size) values '
+                            '(0x%x, 0x%x, 0x%x)',
+                            file_path, file_start, file_end, file_size)
+          else:
+            new_libs.append((file_start, file_end, file_size, lib_path))
+
+    self._AddLibEntries(new_libs)
+
+  def TranslatePath(self, lib_path, lib_offset):
+    """Translate an APK file path + offset into a library path + offset."""
+    min_pos = 0
+    max_pos = len(self._apk_libs)
+    while min_pos < max_pos:
+      mid_pos = (min_pos + max_pos) / 2
+      mid_entry = self._apk_libs[mid_pos]
+      mid_offset = mid_entry[0]
+      mid_size = mid_entry[2]
+      if lib_offset < mid_offset:
+        max_pos = mid_pos
+      elif lib_offset >= mid_offset + mid_size:
+        min_pos = mid_pos + 1
+      else:
+        # Found it
+        new_path = '%s!lib/%s' % (lib_path, mid_entry[3])
+        new_offset = lib_offset - mid_offset
+        return (new_path, new_offset)
+
+    return lib_path, lib_offset
+
+
+class HostLibraryFinderTest(unittest.TestCase):
+
+  def testEmpty(self):
+    finder = symbol_utils.HostLibraryFinder()
+    self.assertIsNone(finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+    self.assertIsNone(
+        finder.Find('/data/data/com.example.app-1/base.apk!lib/libfoo.so'))
+
+
+  def testSimpleDirectory(self):
+    finder = symbol_utils.HostLibraryFinder()
+    with _TempDir() as tmp_dir:
+      host_libfoo_path = os.path.join(tmp_dir, 'libfoo.so')
+      host_libbar_path = os.path.join(tmp_dir, 'libbar.so')
+      _TouchFile(host_libfoo_path)
+      _TouchFile(host_libbar_path)
+
+      finder.AddSearchDir(tmp_dir)
+
+      # Regular library path (extracted at installation by the PackageManager).
+      # Note that the extraction path has changed between Android releases,
+      # i.e. it can be /data/app/, /data/data/ or /data/app-lib/ depending
+      # on the system.
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/app-lib/com.example.app-1/lib/libfoo.so'))
+
+      # Verify that the path doesn't really matter
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/whatever/what.apk!lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libbar_path,
+          finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+      self.assertIsNone(
+          finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+  def testMultipleDirectories(self):
+    with _TempDir() as tmp_dir:
+      # Create the following files:
+      #   <tmp_dir>/aaa/
+      #      libfoo.so
+      #   <tmp_dir>/bbb/
+      #      libbar.so
+      #      libfoo.so    (this one should never be seen because 'aaa'
+      #                    shall be first in the search path list).
+      #
+      aaa_dir = os.path.join(tmp_dir, 'aaa')
+      bbb_dir = os.path.join(tmp_dir, 'bbb')
+      os.makedirs(aaa_dir)
+      os.makedirs(bbb_dir)
+
+      host_libfoo_path = os.path.join(aaa_dir, 'libfoo.so')
+      host_libbar_path = os.path.join(bbb_dir, 'libbar.so')
+      host_libfoo2_path = os.path.join(bbb_dir, 'libfoo.so')
+
+      _TouchFile(host_libfoo_path)
+      _TouchFile(host_libbar_path)
+      _TouchFile(host_libfoo2_path)
+
+      finder = symbol_utils.HostLibraryFinder()
+      finder.AddSearchDir(aaa_dir)
+      finder.AddSearchDir(bbb_dir)
+
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libfoo_path,
+          finder.Find('/data/whatever/base.apk!lib/libfoo.so'))
+
+      self.assertEqual(
+          host_libbar_path,
+          finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+      self.assertIsNone(
+          finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+class ElfSymbolResolverTest(unittest.TestCase):
+
+  def testCreation(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    self.assertTrue(resolver)
+
+  def testWithSimpleOffsets(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    resolver.SetAndroidAbi('ignored-abi')
+
+    for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+      self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr),
+                       expected_sym)
+
+  def testWithPreResolvedSymbols(self):
+    resolver = symbol_utils.ElfSymbolResolver(
+        addr2line_path_for_tests=_MOCK_A2L_PATH)
+    resolver.SetAndroidAbi('ignored-abi')
+    resolver.AddLibraryOffsets('/some/path/libmock1.so',
+                               _TEST_SYMBOL_DATA.keys())
+
+    resolver.DisallowSymbolizerForTesting()
+
+    for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+      sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr)
+      self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr)
+      self.assertEqual(
+          sym_info, expected_sym,
+          'Invalid symbol info for addr %x [%s] expected [%s]' % (
+              addr, sym_info, expected_sym))
+
+
+class MemoryMapTest(unittest.TestCase):
+
+  def testCreation(self):
+    mem_map = symbol_utils.MemoryMap('test-abi32')
+    self.assertIsNone(mem_map.FindSectionForAddress(0))
+
+  def testParseLines(self):
+    mem_map = symbol_utils.MemoryMap('test-abi32')
+    mem_map.ParseLines(_TEST_MEMORY_MAP.splitlines())
+    for exp_addr, exp_size, exp_path, exp_offset in _TEST_MEMORY_MAP_SECTIONS:
+      text = '(addr:%x, size:%x, path:%s, offset=%x)' % (
+          exp_addr, exp_size, exp_path, exp_offset)
+
+      t = mem_map.FindSectionForAddress(exp_addr)
+      self.assertTrue(t, 'Could not find %s' % text)
+      self.assertEqual(t.address, exp_addr)
+      self.assertEqual(t.size, exp_size)
+      self.assertEqual(t.offset, exp_offset)
+      self.assertEqual(t.path, exp_path)
+
+  def testTranslateLine(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    mem_map = symbol_utils.MemoryMap(android_abi)
+    for line, expected_line in zip(_TEST_MEMORY_MAP.splitlines(),
+                                   _EXPECTED_TEST_MEMORY_MAP.splitlines()):
+      self.assertEqual(mem_map.TranslateLine(line, apk_translator),
+                       expected_line)
+
+class StackTranslatorTest(unittest.TestCase):
+
+  def testSimpleStack(self):
+    android_abi = 'test-abi32'
+    mem_map = symbol_utils.MemoryMap(android_abi)
+    mem_map.ParseLines(_TEST_MEMORY_MAP)
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    stack_translator = symbol_utils.StackTranslator(android_abi, mem_map,
+                                                    apk_translator)
+    input_stack = _TEST_STACK.splitlines()
+    expected_stack = _EXPECTED_STACK.splitlines()
+    self.assertEqual(len(input_stack), len(expected_stack))
+    for stack_line, expected_line in zip(input_stack, expected_stack):
+      new_line = stack_translator.TranslateLine(stack_line)
+      self.assertEqual(new_line, expected_line)
+
+
+class MockSymbolResolver(symbol_utils.SymbolResolver):
+
+  # A regex matching a symbol definition as it appears in a test symbol file.
+  # Format is: <hex-offset> <whitespace> <symbol-string>
+  _RE_SYMBOL_DEFINITION = re.compile(
+      r'(?P<offset>[0-9a-f]+)\s+(?P<symbol>.*)')
+
+  def __init__(self):
+    super(MockSymbolResolver, self).__init__()
+    self._map = collections.defaultdict(dict)
+
+  def AddTestLibrarySymbols(self, lib_name, offsets_map):
+    """Add a new test entry for a given library name.
+
+    Args:
+      lib_name: Library name (e.g. 'libfoo.so')
+      offsets_map: A mapping from offsets to symbol info strings.
+    """
+    self._map[lib_name] = offsets_map
+
+  def ReadTestFile(self, file_path, lib_name):
+    """Read a single test symbol file, matching a given library.
+
+    Args:
+      file_path: Input file path.
+      lib_name: Library name these symbols correspond to (e.g. 'libfoo.so')
+    """
+    with open(file_path) as f:
+      for line in f.readlines():
+        line = line.rstrip()
+        m = MockSymbolResolver._RE_SYMBOL_DEFINITION.match(line)
+        if m:
+          offset = int(m.group('offset'))
+          symbol = m.group('symbol')
+          self._map[lib_name][offset] = symbol
+
+  def ReadTestFilesInDir(self, dir_path, file_suffix):
+    """Read all symbol test files in a given directory.
+
+    Args:
+      dir_path: Directory path.
+      file_suffix: File suffix used to detect test symbol files.
+    """
+    for filename in os.listdir(dir_path):
+      if filename.endswith(file_suffix):
+        lib_name = filename[:-len(file_suffix)]
+        self.ReadTestFile(os.path.join(dir_path, filename), lib_name)
+
+  def FindSymbolInfo(self, device_path, device_offset):
+    """Implement SymbolResolver.FindSymbolInfo."""
+    lib_name = os.path.basename(device_path)
+    offsets = self._map.get(lib_name)
+    if not offsets:
+      return None
+
+    return offsets.get(device_offset)
+
+
+class BacktraceTranslatorTest(unittest.TestCase):
+
+  def testEmpty(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator()
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    self.assertTrue(backtrace_translator)
+
+  def testFindLibraryOffsets(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    input_backtrace = _EXPECTED_BACKTRACE.splitlines()
+    expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP
+    offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace)
+    for lib_path, offsets in offset_map.iteritems():
+      self.assertTrue(lib_path in expected_lib_offsets_map,
+                      '%s is not in expected library-offsets map!' % lib_path)
+      sorted_offsets = sorted(offsets)
+      sorted_expected_offsets = sorted(expected_lib_offsets_map[lib_path])
+      self.assertEqual(sorted_offsets, sorted_expected_offsets,
+                       '%s has invalid offsets %s expected %s' % (
+                          lib_path, sorted_offsets, sorted_expected_offsets))
+
+  def testTranslateLine(self):
+    android_abi = 'test-abi'
+    apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+    backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+                                                            apk_translator)
+    input_backtrace = _TEST_BACKTRACE.splitlines()
+    expected_backtrace = _EXPECTED_BACKTRACE.splitlines()
+    self.assertEqual(len(input_backtrace), len(expected_backtrace))
+    for trace_line, expected_line in zip(input_backtrace, expected_backtrace):
+      line = backtrace_translator.TranslateLine(trace_line,
+                                                MockSymbolResolver())
+      self.assertEqual(line, expected_line)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/__init__.py b/src/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/android/pylib/utils/__init__.py
diff --git a/src/build/android/pylib/utils/app_bundle_utils.py b/src/build/android/pylib/utils/app_bundle_utils.py
new file mode 100644
index 0000000..b2e9927
--- /dev/null
+++ b/src/build/android/pylib/utils/app_bundle_utils.py
@@ -0,0 +1,165 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import re
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+
+from util import build_utils
+from util import md5_check
+from util import resource_utils
+import bundletool
+
+# List of valid modes for GenerateBundleApks()
+BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE',
+                        'TEXTURE_COMPRESSION_FORMAT')
+_SYSTEM_MODES = ('system_compressed', 'system')
+
+_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
+
+
+def _CreateDeviceSpec(bundle_path, sdk_version, locales):
+  if not sdk_version:
+    manifest_data = bundletool.RunBundleTool(
+        ['dump', 'manifest', '--bundle', bundle_path])
+    sdk_version = int(
+        re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
+
+  # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
+  # files from being created within the .apks file.
+  return {
+      'screenDensity': 1000,  # Ignored since we don't split on density.
+      'sdkVersion': sdk_version,
+      'supportedAbis': _ALL_ABIS,  # Our .aab files are already split on abi.
+      'supportedLocales': locales,
+  }
+
+
+def GenerateBundleApks(bundle_path,
+                       bundle_apks_path,
+                       aapt2_path,
+                       keystore_path,
+                       keystore_password,
+                       keystore_alias,
+                       mode=None,
+                       minimal=False,
+                       minimal_sdk_version=None,
+                       check_for_noop=True,
+                       system_image_locales=None,
+                       optimize_for=None):
+  """Generate an .apks archive from a an app bundle if needed.
+
+  Args:
+    bundle_path: Input bundle file path.
+    bundle_apks_path: Output bundle .apks archive path. Name must end with
+      '.apks' or this operation will fail.
+    aapt2_path: Path to aapt2 build tool.
+    keystore_path: Path to keystore.
+    keystore_password: Keystore password, as a string.
+    keystore_alias: Keystore signing key alias.
+    mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
+    minimal: Create the minimal set of apks possible (english-only).
+    minimal_sdk_version: Use this sdkVersion when |minimal| or
+      |system_image_locales| args are present.
+    check_for_noop: Use md5_check to short-circuit when inputs have not changed.
+    system_image_locales: Locales to package in the APK when mode is "system"
+      or "system_compressed".
+    optimize_for: Overrides split configuration, which must be None or
+      one of OPTIMIZE_FOR_OPTIONS.
+  """
+  device_spec = None
+  if minimal_sdk_version:
+    assert minimal or system_image_locales, (
+        'minimal_sdk_version is only used when minimal or system_image_locales '
+        'is specified')
+  if minimal:
+    # Measure with one language split installed. Use Hindi because it is
+    # popular. resource_size.py looks for splits/base-hi.apk.
+    # Note: English is always included since it's in base-master.apk.
+    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
+  elif mode in _SYSTEM_MODES:
+    if not system_image_locales:
+      raise Exception('system modes require system_image_locales')
+    # Bundletool doesn't seem to understand device specs with locales in the
+    # form of "<lang>-r<region>", so just provide the language code instead.
+    locales = [
+        resource_utils.ToAndroidLocaleName(l).split('-')[0]
+        for l in system_image_locales
+    ]
+    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
+
+  def rebuild():
+    logging.info('Building %s', bundle_apks_path)
+    with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file:
+      cmd_args = [
+          'build-apks',
+          '--aapt2=%s' % aapt2_path,
+          '--output=%s' % tmp_apks_file.name,
+          '--bundle=%s' % bundle_path,
+          '--ks=%s' % keystore_path,
+          '--ks-pass=pass:%s' % keystore_password,
+          '--ks-key-alias=%s' % keystore_alias,
+          '--overwrite',
+      ]
+
+      if mode is not None:
+        if mode not in BUILD_APKS_MODES:
+          raise Exception('Invalid mode parameter %s (should be in %s)' %
+                          (mode, BUILD_APKS_MODES))
+        cmd_args += ['--mode=' + mode]
+
+      if optimize_for:
+        if optimize_for not in OPTIMIZE_FOR_OPTIONS:
+          raise Exception('Invalid optimize_for parameter %s '
+                          '(should be in %s)' %
+                          (mode, OPTIMIZE_FOR_OPTIONS))
+        cmd_args += ['--optimize-for=' + optimize_for]
+
+      with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as spec_file:
+        if device_spec:
+          json.dump(device_spec, spec_file)
+          spec_file.flush()
+          cmd_args += ['--device-spec=' + spec_file.name]
+        bundletool.RunBundleTool(cmd_args)
+
+      # Make the resulting .apks file hermetic.
+      with build_utils.TempDir() as temp_dir, \
+        build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
+        files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir)
+        build_utils.DoZip(files, f, base_dir=temp_dir)
+
+  if check_for_noop:
+    # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
+    # input_paths, to speed up MD5 computations by about 400ms (the .jar file
+    # contains thousands of class files which are checked independently,
+    # resulting in an .md5.stamp of more than 60000 lines!).
+    input_paths = [bundle_path, aapt2_path, keystore_path]
+    input_strings = [
+        keystore_password,
+        keystore_alias,
+        bundletool.BUNDLETOOL_JAR_PATH,
+        # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
+        # it's simpler to assume that this may not be the case in the future.
+        bundletool.BUNDLETOOL_VERSION,
+        device_spec,
+    ]
+    if mode is not None:
+      input_strings.append(mode)
+
+    # Avoid rebuilding (saves ~20s) when the input files have not changed. This
+    # is essential when calling the apk_operations.py script multiple times with
+    # the same bundle (e.g. out/Debug/bin/monochrome_public_bundle run).
+    md5_check.CallAndRecordIfStale(
+        rebuild,
+        input_paths=input_paths,
+        input_strings=input_strings,
+        output_paths=[bundle_apks_path])
+  else:
+    rebuild()
diff --git a/src/build/android/pylib/utils/argparse_utils.py b/src/build/android/pylib/utils/argparse_utils.py
new file mode 100644
index 0000000..06544a2
--- /dev/null
+++ b/src/build/android/pylib/utils/argparse_utils.py
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+
+
+class CustomHelpAction(argparse.Action):
+  '''Allows defining custom help actions.
+
+  Help actions can run even when the parser would otherwise fail on missing
+  arguments. The first help or custom help command mentioned on the command
+  line will have its help text displayed.
+
+  Usage:
+      parser = argparse.ArgumentParser(...)
+      CustomHelpAction.EnableFor(parser)
+      parser.add_argument('--foo-help',
+                          action='custom_help',
+                          custom_help_text='this is the help message',
+                          help='What this helps with')
+  '''
+  # Derived from argparse._HelpAction from
+  # https://github.com/python/cpython/blob/master/Lib/argparse.py
+
+  # pylint: disable=redefined-builtin
+  # (complains about 'help' being redefined)
+  def __init__(self,
+               option_strings,
+               dest=argparse.SUPPRESS,
+               default=argparse.SUPPRESS,
+               custom_help_text=None,
+               help=None):
+    super(CustomHelpAction, self).__init__(option_strings=option_strings,
+                                           dest=dest,
+                                           default=default,
+                                           nargs=0,
+                                           help=help)
+
+    if not custom_help_text:
+      raise ValueError('custom_help_text is required')
+    self._help_text = custom_help_text
+
+  def __call__(self, parser, namespace, values, option_string=None):
+    print(self._help_text)
+    parser.exit()
+
+  @staticmethod
+  def EnableFor(parser):
+    parser.register('action', 'custom_help', CustomHelpAction)
diff --git a/src/build/android/pylib/utils/chrome_proxy_utils.py b/src/build/android/pylib/utils/chrome_proxy_utils.py
new file mode 100644
index 0000000..149d0b9
--- /dev/null
+++ b/src/build/android/pylib/utils/chrome_proxy_utils.py
@@ -0,0 +1,171 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for setting up and tear down WPR and TsProxy service."""
+
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+from devil.android import forwarder
+
+PROXY_HOST_IP = '127.0.0.1'
+# From Catapult/WebPageReplay document.
+IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I='
+PROXY_SERVER = 'socks5://localhost'
+DEFAULT_DEVICE_PORT = 1080
+DEFAULT_ROUND_TRIP_LATENCY_MS = 100
+DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000
+DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000
+
+
+class WPRServer(object):
+  """Utils to set up a webpagereplay_go_server instance."""
+
+  def __init__(self):
+    self._archive_path = None
+    self._host_http_port = 0
+    self._host_https_port = 0
+    self._record_mode = False
+    self._server = None
+
+  def StartServer(self, wpr_archive_path):
+    """Starts a webpagereplay_go_server instance."""
+    if wpr_archive_path == self._archive_path and self._server:
+      # Reuse existing webpagereplay_go_server instance.
+      return
+
+    if self._server:
+      self.StopServer()
+
+    replay_options = []
+    if self._record_mode:
+      replay_options.append('--record')
+
+    ports = {}
+    if not self._server:
+      self._server = webpagereplay_go_server.ReplayServer(
+          wpr_archive_path,
+          PROXY_HOST_IP,
+          http_port=self._host_http_port,
+          https_port=self._host_https_port,
+          replay_options=replay_options)
+      self._archive_path = wpr_archive_path
+      ports = self._server.StartServer()
+
+    self._host_http_port = ports['http']
+    self._host_https_port = ports['https']
+
+  def StopServer(self):
+    """Stops the webpagereplay_go_server instance and resets archive."""
+    self._server.StopServer()
+    self._server = None
+    self._host_http_port = 0
+    self._host_https_port = 0
+
+  @staticmethod
+  def SetServerBinaryPath(go_binary_path):
+    """Sets the go_binary_path for webpagereplay_go_server.ReplayServer."""
+    webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path)
+
+  @property
+  def record_mode(self):
+    return self._record_mode
+
+  @record_mode.setter
+  def record_mode(self, value):
+    self._record_mode = value
+
+  @property
+  def http_port(self):
+    return self._host_http_port
+
+  @property
+  def https_port(self):
+    return self._host_https_port
+
+  @property
+  def archive_path(self):
+    return self._archive_path
+
+
+class ChromeProxySession(object):
+  """Utils to help set up a Chrome Proxy."""
+
+  def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT):
+    self._device_proxy_port = device_proxy_port
+    self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP)
+    self._wpr_server = WPRServer()
+
+  @property
+  def wpr_record_mode(self):
+    """Returns whether this proxy session was running in record mode."""
+    return self._wpr_server.record_mode
+
+  @wpr_record_mode.setter
+  def wpr_record_mode(self, value):
+    self._wpr_server.record_mode = value
+
+  @property
+  def wpr_replay_mode(self):
+    """Returns whether this proxy session was running in replay mode."""
+    return not self._wpr_server.record_mode
+
+  @property
+  def wpr_archive_path(self):
+    """Returns the wpr archive file path used in this proxy session."""
+    return self._wpr_server.archive_path
+
+  @property
+  def device_proxy_port(self):
+    return self._device_proxy_port
+
+  def GetFlags(self):
+    """Gets the chrome command line flags to be needed by ChromeProxySession."""
+    extra_flags = []
+
+    extra_flags.append('--ignore-certificate-errors-spki-list=%s' %
+                       IGNORE_CERT_ERROR_SPKI_LIST)
+    extra_flags.append('--proxy-server=%s:%s' %
+                       (PROXY_SERVER, self._device_proxy_port))
+    return extra_flags
+
+  @staticmethod
+  def SetWPRServerBinary(go_binary_path):
+    """Sets the WPR server go_binary_path."""
+    WPRServer.SetServerBinaryPath(go_binary_path)
+
+  def Start(self, device, wpr_archive_path):
+    """Starts the wpr_server as well as the ts_proxy server and setups env.
+
+    Args:
+      device: A DeviceUtils instance.
+      wpr_archive_path: A abs path to the wpr archive file.
+
+    """
+    self._wpr_server.StartServer(wpr_archive_path)
+    self._ts_proxy_server.StartServer()
+
+    # Maps device port to host port
+    forwarder.Forwarder.Map(
+        [(self._device_proxy_port, self._ts_proxy_server.port)], device)
+    # Maps tsProxy port to wpr http/https ports
+    self._ts_proxy_server.UpdateOutboundPorts(
+        http_port=self._wpr_server.http_port,
+        https_port=self._wpr_server.https_port)
+    self._ts_proxy_server.UpdateTrafficSettings(
+        round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS,
+        download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS,
+        upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS)
+
+  def Stop(self, device):
+    """Stops the wpr_server, and ts_proxy server and tears down env.
+
+    Note that Stop does not reset wpr_record_mode, wpr_replay_mode,
+    wpr_archive_path property.
+
+    Args:
+      device: A DeviceUtils instance.
+    """
+    self._wpr_server.StopServer()
+    self._ts_proxy_server.StopServer()
+    forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device)
diff --git a/src/build/android/pylib/utils/chrome_proxy_utils_test.py b/src/build/android/pylib/utils/chrome_proxy_utils_test.py
new file mode 100755
index 0000000..b38b268
--- /dev/null
+++ b/src/build/android/pylib/utils/chrome_proxy_utils_test.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for chrome_proxy_utils."""
+
+#pylint: disable=protected-access
+
+import os
+import unittest
+
+from pylib.utils import chrome_proxy_utils
+
+from devil.android import forwarder
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+import mock  # pylint: disable=import-error
+
+
+def _DeviceUtilsMock(test_serial, is_ready=True):
+  """Returns a DeviceUtils instance based on given serial."""
+  adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+  adb.__str__ = mock.Mock(return_value=test_serial)
+  adb.GetDeviceSerial.return_value = test_serial
+  adb.is_ready = is_ready
+  return device_utils.DeviceUtils(adb)
+
+
+class ChromeProxySessionTest(unittest.TestCase):
+  """Unittest for ChromeProxySession."""
+
+  #pylint: disable=no-self-use
+
+  @mock.patch.object(forwarder.Forwarder, 'Map')
+  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings')
+  @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port',
+              new_callable=mock.PropertyMock)
+  def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock,
+                 start_server_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._host_http_port = 1
+    chrome_proxy._wpr_server._host_https_port = 2
+    port_mock.return_value = 3
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.Start(device, 'abc')
+
+    forwarder_mock.assert_called_once_with([(4, 3)], device)
+    wpr_mock.assert_called_once_with('abc')
+    start_server_mock.assert_called_once()
+    outboundport_mock.assert_called_once_with(http_port=1, https_port=2)
+    traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000,
+                                                 round_trip_latency_ms=100,
+                                                 upload_bandwidth_kbps=72000)
+    port_mock.assert_called_once()
+
+  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+  def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.wpr_record_mode = True
+    chrome_proxy._wpr_server._archive_path = 'abc'
+    chrome_proxy.Stop(device)
+
+    forwarder_mock.assert_called_once_with(4, device)
+    wpr_mock.assert_called_once_with()
+    ts_proxy_mock.assert_called_once_with()
+
+  #pylint: enable=no-self-use
+
+  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+  def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, [])
+    chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__)
+    device = _DeviceUtilsMock('01234')
+    chrome_proxy.wpr_record_mode = True
+    chrome_proxy.Stop(device)
+
+    forwarder_mock.assert_called_once_with(4, device)
+    wpr_mock.assert_called_once_with()
+    ts_proxy_mock.assert_called_once_with()
+    self.assertFalse(chrome_proxy.wpr_replay_mode)
+    self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__))
+
+  def test_SetWPRRecordMode(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy.wpr_record_mode = True
+    self.assertTrue(chrome_proxy._wpr_server.record_mode)
+    self.assertTrue(chrome_proxy.wpr_record_mode)
+    self.assertFalse(chrome_proxy.wpr_replay_mode)
+
+    chrome_proxy.wpr_record_mode = False
+    self.assertFalse(chrome_proxy._wpr_server.record_mode)
+    self.assertFalse(chrome_proxy.wpr_record_mode)
+    self.assertTrue(chrome_proxy.wpr_replay_mode)
+
+  def test_SetWPRArchivePath(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+    chrome_proxy._wpr_server._archive_path = 'abc'
+    self.assertEquals(chrome_proxy.wpr_archive_path, 'abc')
+
+  def test_UseDefaultDeviceProxyPort(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+    expected_flags = [
+        '--ignore-certificate-errors-spki-list='
+        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+        '--proxy-server=socks5://localhost:1080'
+    ]
+    self.assertEquals(chrome_proxy.device_proxy_port, 1080)
+    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+  def test_UseNewDeviceProxyPort(self):
+    chrome_proxy = chrome_proxy_utils.ChromeProxySession(1)
+    expected_flags = [
+        '--ignore-certificate-errors-spki-list='
+        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+        '--proxy-server=socks5://localhost:1'
+    ]
+    self.assertEquals(chrome_proxy.device_proxy_port, 1)
+    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+
+class WPRServerTest(unittest.TestCase):
+  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+  def test_StartSever_fresh_replaymode(self, wpr_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_archive_file = os.path.abspath(__file__)
+    wpr_server.StartServer(wpr_archive_file)
+
+    wpr_mock.assert_called_once_with(wpr_archive_file,
+                                     '127.0.0.1',
+                                     http_port=0,
+                                     https_port=0,
+                                     replay_options=[])
+
+    self.assertEqual(wpr_server._archive_path, wpr_archive_file)
+    self.assertTrue(wpr_server._server)
+
+  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+  def test_StartSever_fresh_recordmode(self, wpr_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server.record_mode = True
+    wpr_server.StartServer(os.path.abspath(__file__))
+    wpr_archive_file = os.path.abspath(__file__)
+
+    wpr_mock.assert_called_once_with(wpr_archive_file,
+                                     '127.0.0.1',
+                                     http_port=0,
+                                     https_port=0,
+                                     replay_options=['--record'])
+
+    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+    self.assertTrue(wpr_server._server)
+
+  #pylint: disable=no-self-use
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  def test_StartSever_recordmode(self, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    start_server_mock.return_value = {'http': 1, 'https': 2}
+    wpr_server.StartServer(os.path.abspath(__file__))
+
+    start_server_mock.assert_called_once()
+    self.assertEqual(wpr_server._host_http_port, 1)
+    self.assertEqual(wpr_server._host_https_port, 2)
+    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+    self.assertTrue(wpr_server._server)
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  def test_StartSever_reuseServer(self, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server._archive_path = os.path.abspath(__file__)
+    wpr_server.StartServer(os.path.abspath(__file__))
+    start_server_mock.assert_not_called()
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server._archive_path = ''
+    wpr_server.StartServer(os.path.abspath(__file__))
+    start_server_mock.assert_called_once()
+    stop_server_mock.assert_called_once()
+
+  #pylint: enable=no-self-use
+
+  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+  def test_StopServer(self, stop_server_mock):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server._server = webpagereplay_go_server.ReplayServer(
+        os.path.abspath(__file__),
+        chrome_proxy_utils.PROXY_HOST_IP,
+        http_port=0,
+        https_port=0,
+        replay_options=[])
+    wpr_server.StopServer()
+    stop_server_mock.assert_called_once()
+    self.assertFalse(wpr_server._server)
+    self.assertFalse(wpr_server._archive_path)
+    self.assertFalse(wpr_server.http_port)
+    self.assertFalse(wpr_server.https_port)
+
+  def test_SetWPRRecordMode(self):
+    wpr_server = chrome_proxy_utils.WPRServer()
+    wpr_server.record_mode = True
+    self.assertTrue(wpr_server.record_mode)
+    wpr_server.record_mode = False
+    self.assertFalse(wpr_server.record_mode)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/decorators.py b/src/build/android/pylib/utils/decorators.py
new file mode 100644
index 0000000..8eec1d1
--- /dev/null
+++ b/src/build/android/pylib/utils/decorators.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+
+
+def Memoize(f):
+  """Decorator to cache return values of function."""
+  memoize_dict = {}
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    key = repr((args, kwargs))
+    if key not in memoize_dict:
+      memoize_dict[key] = f(*args, **kwargs)
+    return memoize_dict[key]
+  return wrapper
+
+
+def NoRaiseException(default_return_value=None, exception_message=''):
+  """Returns decorator that catches and logs uncaught Exceptions.
+
+  Args:
+    default_return_value: Value to return in the case of uncaught Exception.
+    exception_message: Message for uncaught exceptions.
+  """
+  def decorator(f):
+    @functools.wraps(f)
+    def wrapper(*args, **kwargs):
+      try:
+        return f(*args, **kwargs)
+      except Exception:  # pylint: disable=broad-except
+        logging.exception(exception_message)
+        return default_return_value
+    return wrapper
+  return decorator
diff --git a/src/build/android/pylib/utils/decorators_test.py b/src/build/android/pylib/utils/decorators_test.py
new file mode 100755
index 0000000..73a9f0d
--- /dev/null
+++ b/src/build/android/pylib/utils/decorators_test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for decorators.py."""
+
+import unittest
+
+from pylib.utils import decorators
+
+
+class NoRaiseExceptionDecoratorTest(unittest.TestCase):
+
+  def testFunctionDoesNotRaiseException(self):
+    """Tests that the |NoRaiseException| decorator catches exception."""
+
+    @decorators.NoRaiseException()
+    def raiseException():
+      raise Exception()
+
+    try:
+      raiseException()
+    except Exception:  # pylint: disable=broad-except
+      self.fail('Exception was not caught by |NoRaiseException| decorator')
+
+  def testFunctionReturnsCorrectValues(self):
+    """Tests that the |NoRaiseException| decorator returns correct values."""
+
+    @decorators.NoRaiseException(default_return_value=111)
+    def raiseException():
+      raise Exception()
+
+    @decorators.NoRaiseException(default_return_value=111)
+    def doesNotRaiseException():
+      return 999
+
+    self.assertEquals(raiseException(), 111)
+    self.assertEquals(doesNotRaiseException(), 999)
+
+
+class MemoizeDecoratorTest(unittest.TestCase):
+
+  def testFunctionExceptionNotMemoized(self):
+    """Tests that |Memoize| decorator does not cache exception results."""
+
+    class ExceptionType1(Exception):
+      pass
+
+    class ExceptionType2(Exception):
+      pass
+
+    @decorators.Memoize
+    def raiseExceptions():
+      if raiseExceptions.count == 0:
+        raiseExceptions.count += 1
+        raise ExceptionType1()
+
+      if raiseExceptions.count == 1:
+        raise ExceptionType2()
+    raiseExceptions.count = 0
+
+    with self.assertRaises(ExceptionType1):
+      raiseExceptions()
+    with self.assertRaises(ExceptionType2):
+      raiseExceptions()
+
+  def testFunctionResultMemoized(self):
+    """Tests that |Memoize| decorator caches results."""
+
+    @decorators.Memoize
+    def memoized():
+      memoized.count += 1
+      return memoized.count
+    memoized.count = 0
+
+    def notMemoized():
+      notMemoized.count += 1
+      return notMemoized.count
+    notMemoized.count = 0
+
+    self.assertEquals(memoized(), 1)
+    self.assertEquals(memoized(), 1)
+    self.assertEquals(memoized(), 1)
+
+    self.assertEquals(notMemoized(), 1)
+    self.assertEquals(notMemoized(), 2)
+    self.assertEquals(notMemoized(), 3)
+
+  def testFunctionMemoizedBasedOnArgs(self):
+    """Tests that |Memoize| caches results based on args and kwargs."""
+
+    @decorators.Memoize
+    def returnValueBasedOnArgsKwargs(a, k=0):
+      return a + k
+
+    self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2)
+    self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3)
+    self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3)
+    self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/device_dependencies.py b/src/build/android/pylib/utils/device_dependencies.py
new file mode 100644
index 0000000..9cb5bd8
--- /dev/null
+++ b/src/build/android/pylib/utils/device_dependencies.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+from pylib import constants
+
+
+_EXCLUSIONS = [
+    re.compile(r'.*OWNERS'),  # Should never be included.
+    re.compile(r'.*\.crx'),  # Chrome extension zip files.
+    re.compile(os.path.join('.*',
+                            r'\.git.*')),  # Any '.git*' directories/files.
+    re.compile(r'.*\.so'),  # Libraries packed into .apk.
+    re.compile(r'.*Mojo.*manifest\.json'),  # Some source_set()s pull these in.
+    re.compile(r'.*\.py'),  # Some test_support targets include python deps.
+    re.compile(r'.*\.apk'),  # Should be installed separately.
+    re.compile(r'.*lib.java/.*'),  # Never need java intermediates.
+
+    # Test filter files:
+    re.compile(r'.*/testing/buildbot/filters/.*'),
+
+    # Chrome external extensions config file.
+    re.compile(r'.*external_extensions\.json'),
+
+    # Exists just to test the compile, not to be run.
+    re.compile(r'.*jni_generator_tests'),
+
+    # v8's blobs and icu data get packaged into APKs.
+    re.compile(r'.*snapshot_blob.*\.bin'),
+    re.compile(r'.*icudtl.bin'),
+
+    # Scripts that are needed by swarming, but not on devices:
+    re.compile(r'.*llvm-symbolizer'),
+    re.compile(r'.*md5sum_bin'),
+    re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+
+    # Required for java deobfuscation on the host:
+    re.compile(r'.*build/android/stacktrace/.*'),
+    re.compile(r'.*third_party/jdk/.*'),
+    re.compile(r'.*third_party/proguard/.*'),
+
+    # Build artifacts:
+    re.compile(r'.*\.stamp'),
+    re.compile(r'.*.pak\.info'),
+    re.compile(r'.*\.incremental\.json'),
+]
+
+
+def _FilterDataDeps(abs_host_files):
+  exclusions = _EXCLUSIONS + [
+      re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))
+  ]
+  return [p for p in abs_host_files if not any(r.match(p) for r in exclusions)]
+
+
+def DevicePathComponentsFor(host_path, output_directory):
+  """Returns the device path components for a given host path.
+
+  This returns the device path as a list of joinable path components,
+  with None as the first element to indicate that the path should be
+  rooted at $EXTERNAL_STORAGE.
+
+  e.g., given
+
+    '$RUNTIME_DEPS_ROOT_DIR/foo/bar/baz.txt'
+
+  this would return
+
+    [None, 'foo', 'bar', 'baz.txt']
+
+  This handles a couple classes of paths differently than it otherwise would:
+    - All .pak files get mapped to top-level paks/
+    - All other dependencies get mapped to the top level directory
+        - If a file is not in the output directory then it's relative path to
+          the output directory will start with .. strings, so we remove those
+          and then the path gets mapped to the top-level directory
+        - If a file is in the output directory then the relative path to the
+          output directory gets mapped to the top-level directory
+
+  e.g. given
+
+    '$RUNTIME_DEPS_ROOT_DIR/out/Release/icu_fake_dir/icudtl.dat'
+
+  this would return
+
+    [None, 'icu_fake_dir', 'icudtl.dat']
+
+  Args:
+    host_path: The absolute path to the host file.
+  Returns:
+    A list of device path components.
+  """
+  if (host_path.startswith(output_directory) and
+      os.path.splitext(host_path)[1] == '.pak'):
+    return [None, 'paks', os.path.basename(host_path)]
+
+  rel_host_path = os.path.relpath(host_path, output_directory)
+
+  device_path_components = [None]
+  p = rel_host_path
+  while p:
+    p, d = os.path.split(p)
+    # The relative path from the output directory to a file under the runtime
+    # deps root directory may start with multiple .. strings, so they need to
+    # be skipped.
+    if d and d != os.pardir:
+      device_path_components.insert(1, d)
+  return device_path_components
+
+
+def GetDataDependencies(runtime_deps_path):
+  """Returns a list of device data dependencies.
+
+  Args:
+    runtime_deps_path: A str path to the .runtime_deps file.
+  Returns:
+    A list of (host_path, device_path) tuples.
+  """
+  if not runtime_deps_path:
+    return []
+
+  with open(runtime_deps_path, 'r') as runtime_deps_file:
+    rel_host_files = [l.strip() for l in runtime_deps_file if l]
+
+  output_directory = constants.GetOutDirectory()
+  abs_host_files = [
+      os.path.abspath(os.path.join(output_directory, r))
+      for r in rel_host_files]
+  filtered_abs_host_files = _FilterDataDeps(abs_host_files)
+  # TODO(crbug.com/752610): Filter out host executables, and investigate
+  # whether other files could be filtered as well.
+  return [(f, DevicePathComponentsFor(f, output_directory))
+          for f in filtered_abs_host_files]
diff --git a/src/build/android/pylib/utils/device_dependencies_test.py b/src/build/android/pylib/utils/device_dependencies_test.py
new file mode 100755
index 0000000..b2da5a7
--- /dev/null
+++ b/src/build/android/pylib/utils/device_dependencies_test.py
@@ -0,0 +1,56 @@
+#! /usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from pylib import constants
+from pylib.utils import device_dependencies
+
+
+class DevicePathComponentsForTest(unittest.TestCase):
+
+  def testCheckedInFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'foo', 'bar', 'baz.txt'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectoryFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'icudtl.dat')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'icudtl.dat'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectorySubdirFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'test_dir', 'icudtl.dat')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'test_dir', 'icudtl.dat'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+  def testOutputDirectoryPakFile(self):
+    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+                             'foo.pak')
+    output_directory = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+    self.assertEquals(
+        [None, 'paks', 'foo.pak'],
+        device_dependencies.DevicePathComponentsFor(
+            test_path, output_directory))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/dexdump.py b/src/build/android/pylib/utils/dexdump.py
new file mode 100644
index 0000000..f81ac60
--- /dev/null
+++ b/src/build/android/pylib/utils/dexdump.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import shutil
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+from util import build_utils
+
+DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+
+def Dump(apk_path):
+  """Dumps class and method information from a APK into a dict via dexdump.
+
+  Args:
+    apk_path: An absolute path to an APK file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        <package_name>: {
+          'classes': {
+            <class_name>: {
+              'methods': [<method_1>, <method_2>]
+            }
+          }
+        }
+      }
+  """
+  try:
+    dexfile_dir = tempfile.mkdtemp()
+    parsed_dex_files = []
+    for dex_file in build_utils.ExtractAll(apk_path,
+                                           dexfile_dir,
+                                           pattern='*classes*.dex'):
+      output_xml = cmd_helper.GetCmdOutput(
+          [DEXDUMP_PATH, '-l', 'xml', dex_file])
+      # Dexdump doesn't escape its XML output very well; decode it as utf-8 with
+      # invalid sequences replaced, then remove forbidden characters and
+      # re-encode it (as etree expects a byte string as input so it can figure
+      # out the encoding itself from the XML declaration)
+      BAD_XML_CHARS = re.compile(
+          u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' +
+          u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]')
+      if sys.version_info[0] < 3:
+        decoded_xml = output_xml.decode('utf-8', 'replace')
+        clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml)
+      else:
+        # Line duplicated to avoid pylint redefined-variable-type error.
+        clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml)
+      parsed_dex_files.append(
+          _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8'))))
+    return parsed_dex_files
+  finally:
+    shutil.rmtree(dexfile_dir)
+
+
+def _ParseRootNode(root):
+  """Parses the XML output of dexdump. This output is in the following format.
+
+  This is a subset of the information contained within dexdump output.
+
+  <api>
+    <package name="foo.bar">
+      <class name="Class" extends="foo.bar.SuperClass">
+        <field name="Field">
+        </field>
+        <constructor name="Method">
+          <parameter name="Param" type="int">
+          </parameter>
+        </constructor>
+        <method name="Method">
+          <parameter name="Param" type="int">
+          </parameter>
+        </method>
+      </class>
+    </package>
+  </api>
+  """
+  results = {}
+  for child in root:
+    if child.tag == 'package':
+      package_name = child.attrib['name']
+      parsed_node = _ParsePackageNode(child)
+      if package_name in results:
+        results[package_name]['classes'].update(parsed_node['classes'])
+      else:
+        results[package_name] = parsed_node
+  return results
+
+
+def _ParsePackageNode(package_node):
+  """Parses a <package> node from the dexdump xml output.
+
+  Returns:
+    A dict in the format:
+      {
+        'classes': {
+          <class_1>: {
+            'methods': [<method_1>, <method_2>]
+          },
+          <class_2>: {
+            'methods': [<method_1>, <method_2>]
+          },
+        }
+      }
+  """
+  classes = {}
+  for child in package_node:
+    if child.tag == 'class':
+      classes[child.attrib['name']] = _ParseClassNode(child)
+  return {'classes': classes}
+
+
+def _ParseClassNode(class_node):
+  """Parses a <class> node from the dexdump xml output.
+
+  Returns:
+    A dict in the format:
+      {
+        'methods': [<method_1>, <method_2>]
+      }
+  """
+  methods = []
+  for child in class_node:
+    if child.tag == 'method':
+      methods.append(child.attrib['name'])
+  return {'methods': methods, 'superclass': class_node.attrib['extends']}
diff --git a/src/build/android/pylib/utils/dexdump_test.py b/src/build/android/pylib/utils/dexdump_test.py
new file mode 100755
index 0000000..3197853
--- /dev/null
+++ b/src/build/android/pylib/utils/dexdump_test.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from xml.etree import ElementTree
+
+from pylib.utils import dexdump
+
+# pylint: disable=protected-access
+
+
+class DexdumpXMLParseTest(unittest.TestCase):
+
+  def testParseRootXmlNode(self):
+    example_xml_string = (
+        '<api>'
+        '<package name="com.foo.bar1">'
+        '<class'
+        '  name="Class1"'
+        '  extends="java.lang.Object"'
+        '  abstract="false"'
+        '  static="false"'
+        '  final="true"'
+        '  visibility="public">'
+        '<method'
+        '  name="class1Method1"'
+        '  return="java.lang.String"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '<method'
+        '  name="class1Method2"'
+        '  return="viod"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '</class>'
+        '<class'
+        '  name="Class2"'
+        '  extends="java.lang.Object"'
+        '  abstract="false"'
+        '  static="false"'
+        '  final="true"'
+        '  visibility="public">'
+        '<method'
+        '  name="class2Method1"'
+        '  return="java.lang.String"'
+        '  abstract="false"'
+        '  native="false"'
+        '  synchronized="false"'
+        '  static="false"'
+        '  final="false"'
+        '  visibility="public">'
+        '</method>'
+        '</class>'
+        '</package>'
+        '<package name="com.foo.bar2">'
+        '</package>'
+        '<package name="com.foo.bar3">'
+        '</package>'
+        '</api>')
+
+    actual = dexdump._ParseRootNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'com.foo.bar1' : {
+        'classes': {
+          'Class1': {
+            'methods': ['class1Method1', 'class1Method2'],
+            'superclass': 'java.lang.Object',
+          },
+          'Class2': {
+            'methods': ['class2Method1'],
+            'superclass': 'java.lang.Object',
+          }
+        },
+      },
+      'com.foo.bar2' : {'classes': {}},
+      'com.foo.bar3' : {'classes': {}},
+    }
+    self.assertEquals(expected, actual)
+
+  def testParsePackageNode(self):
+    example_xml_string = (
+        '<package name="com.foo.bar">'
+        '<class name="Class1" extends="java.lang.Object">'
+        '</class>'
+        '<class name="Class2" extends="java.lang.Object">'
+        '</class>'
+        '</package>')
+
+
+    actual = dexdump._ParsePackageNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'classes': {
+        'Class1': {
+          'methods': [],
+          'superclass': 'java.lang.Object',
+        },
+        'Class2': {
+          'methods': [],
+          'superclass': 'java.lang.Object',
+        },
+      },
+    }
+    self.assertEquals(expected, actual)
+
+  def testParseClassNode(self):
+    example_xml_string = (
+        '<class name="Class1" extends="java.lang.Object">'
+        '<method name="method1">'
+        '</method>'
+        '<method name="method2">'
+        '</method>'
+        '</class>')
+
+    actual = dexdump._ParseClassNode(
+        ElementTree.fromstring(example_xml_string))
+
+    expected = {
+      'methods': ['method1', 'method2'],
+      'superclass': 'java.lang.Object',
+    }
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/gold_utils.py b/src/build/android/pylib/utils/gold_utils.py
new file mode 100644
index 0000000..0b79a6d
--- /dev/null
+++ b/src/build/android/pylib/utils/gold_utils.py
@@ -0,0 +1,78 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""//build/android implementations of //testing/skia_gold_common.
+
+Used for interacting with the Skia Gold image diffing service.
+"""
+
+import os
+import shutil
+
+from devil.utils import cmd_helper
+from pylib.base.output_manager import Datatype
+from pylib.constants import host_paths
+from pylib.utils import repo_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+  from skia_gold_common import skia_gold_session
+  from skia_gold_common import skia_gold_session_manager
+  from skia_gold_common import skia_gold_properties
+
+
+class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession):
+  def _StoreDiffLinks(self, image_name, output_manager, output_dir):
+    """See SkiaGoldSession._StoreDiffLinks for general documentation.
+
+    |output_manager| must be a build.android.pylib.base.OutputManager instance.
+    """
+    given_path = closest_path = diff_path = None
+    # The directory should contain "input-<hash>.png", "closest-<hash>.png",
+    # and "diff.png".
+    for f in os.listdir(output_dir):
+      filepath = os.path.join(output_dir, f)
+      if f.startswith('input-'):
+        given_path = filepath
+      elif f.startswith('closest-'):
+        closest_path = filepath
+      elif f == 'diff.png':
+        diff_path = filepath
+    results = self._comparison_results.setdefault(image_name,
+                                                  self.ComparisonResults())
+    if given_path:
+      with output_manager.ArchivedTempfile('given_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as given_file:
+        shutil.move(given_path, given_file.name)
+      results.local_diff_given_image = given_file.Link()
+    if closest_path:
+      with output_manager.ArchivedTempfile('closest_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as closest_file:
+        shutil.move(closest_path, closest_file.name)
+      results.local_diff_closest_image = closest_file.Link()
+    if diff_path:
+      with output_manager.ArchivedTempfile('diff_%s.png' % image_name,
+                                           'gold_local_diffs',
+                                           Datatype.PNG) as diff_file:
+        shutil.move(diff_path, diff_file.name)
+      results.local_diff_diff_image = diff_file.Link()
+
+  @staticmethod
+  def _RunCmdForRcAndOutput(cmd):
+    rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd,
+                                                          merge_stderr=True)
+    return rc, stdout
+
+
+class AndroidSkiaGoldSessionManager(
+    skia_gold_session_manager.SkiaGoldSessionManager):
+  @staticmethod
+  def GetSessionClass():
+    return AndroidSkiaGoldSession
+
+
+class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties):
+  @staticmethod
+  def _GetGitOriginMasterHeadSha1():
+    return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT)
diff --git a/src/build/android/pylib/utils/gold_utils_test.py b/src/build/android/pylib/utils/gold_utils_test.py
new file mode 100755
index 0000000..2d3cc5c
--- /dev/null
+++ b/src/build/android/pylib/utils/gold_utils_test.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for gold_utils."""
+
+#pylint: disable=protected-access
+
+import contextlib
+import os
+import tempfile
+import unittest
+
+from pylib.constants import host_paths
+from pylib.utils import gold_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+  from skia_gold_common import unittest_utils
+
+import mock  # pylint: disable=import-error
+from pyfakefs import fake_filesystem_unittest  # pylint: disable=import-error
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+def assertArgWith(test, arg_list, arg, value):
+  i = arg_list.index(arg)
+  test.assertEqual(arg_list[i + 1], value)
+
+
+class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = gold_utils.AndroidSkiaGoldProperties(args)
+    session = gold_utils.AndroidSkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                'corpus',
+                                                instance='instance')
+    session.Diff('name', 'png_file', None)
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('diff', call_args)
+    assertArgWith(self, call_args, '--corpus', 'corpus')
+    # TODO(skbug.com/10610): Remove the -public once we go back to using the
+    # non-public instance, or add a second test for testing that the correct
+    # instance is chosen if we decide to support both depending on what the
+    # user is authenticated for.
+    assertArgWith(self, call_args, '--instance', 'instance-public')
+    assertArgWith(self, call_args, '--input', 'png_file')
+    assertArgWith(self, call_args, '--test', 'name')
+    # TODO(skbug.com/10611): Re-add this assert and remove the check for the
+    # absence of the directory once we switch back to using the proper working
+    # directory.
+    # assertArgWith(self, call_args, '--work-dir', self._working_dir)
+    self.assertNotIn(self._working_dir, call_args)
+    i = call_args.index('--out-dir')
+    # The output directory should be a subdirectory of the working directory.
+    self.assertIn(self._working_dir, call_args[i + 1])
+
+
+class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase):
+  class FakeArchivedFile(object):
+    def __init__(self, path):
+      self.name = path
+
+    def Link(self):
+      return 'file://' + self.name
+
+  class FakeOutputManager(object):
+    def __init__(self):
+      self.output_dir = tempfile.mkdtemp()
+
+    @contextlib.contextmanager
+    def ArchivedTempfile(self, image_name, _, __):
+      filepath = os.path.join(self.output_dir, image_name)
+      yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath)
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  def test_outputManagerUsed(self):
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = gold_utils.AndroidSkiaGoldProperties(args)
+    session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f:
+      f.write('input')
+    with open(os.path.join(self._working_dir, 'closest-closesthash.png'),
+              'w') as f:
+      f.write('closest')
+    with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f:
+      f.write('diff')
+
+    output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager()
+    session._StoreDiffLinks('foo', output_manager, self._working_dir)
+
+    copied_input = os.path.join(output_manager.output_dir, 'given_foo.png')
+    copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png')
+    copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png')
+    with open(copied_input) as f:
+      self.assertEqual(f.read(), 'input')
+    with open(copied_closest) as f:
+      self.assertEqual(f.read(), 'closest')
+    with open(copied_diff) as f:
+      self.assertEqual(f.read(), 'diff')
+
+    self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input)
+    self.assertEqual(session.GetClosestImageLink('foo'),
+                     'file://' + copied_closest)
+    self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/android/pylib/utils/google_storage_helper.py b/src/build/android/pylib/utils/google_storage_helper.py
new file mode 100644
index 0000000..d184810
--- /dev/null
+++ b/src/build/android/pylib/utils/google_storage_helper.py
@@ -0,0 +1,126 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to Google Storage.
+
+Text data should be streamed to logdog using |logdog_helper| module.
+Due to logdog not having image or HTML viewer, those instead should be uploaded
+to Google Storage directly using this module.
+"""
+
+import logging
+import os
+import sys
+import time
+import urlparse
+
+from pylib.constants import host_paths
+from pylib.utils import decorators
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+_GSUTIL_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult',
+    'third_party', 'gsutil', 'gsutil.py')
+_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
+_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
+
+
+@decorators.NoRaiseException(default_return_value='')
+def upload(name, filepath, bucket, gs_args=None, command_args=None,
+           content_type=None, authenticated_link=True):
+  """Uploads data to Google Storage.
+
+  Args:
+    name: Name of the file on Google Storage.
+    filepath: Path to file you want to upload.
+    bucket: Bucket to upload file to.
+    content_type: Content type to upload as. If not specified, Google storage
+        will attempt to infer content type from file extension.
+    authenticated_link: Whether to return a link that requires user to
+        authenticate with a Google account. Setting this to false will return
+        a link that does not require user to be signed into Google account but
+        will only work for completely public storage buckets.
+  Returns:
+    Web link to item uploaded to Google Storage bucket.
+  """
+  bucket = _format_bucket_name(bucket)
+
+  gs_path = 'gs://%s/%s' % (bucket, name)
+  logging.info('Uploading %s to %s', filepath, gs_path)
+
+  cmd = [_GSUTIL_PATH, '-q']
+  cmd.extend(gs_args or [])
+  if content_type:
+    cmd.extend(['-h', 'Content-Type:%s' % content_type])
+  cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
+
+  cmd_helper.RunCmd(cmd)
+
+  return get_url_link(name, bucket, authenticated_link)
+
+
+@decorators.NoRaiseException(default_return_value='')
+def read_from_link(link):
+  # Note that urlparse returns the path with an initial '/', so we only need to
+  # add one more after the 'gs;'
+  gs_path = 'gs:/%s' % urlparse.urlparse(link).path
+  cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
+  return cmd_helper.GetCmdOutput(cmd)
+
+
+@decorators.NoRaiseException(default_return_value=False)
+def exists(name, bucket):
+  bucket = _format_bucket_name(bucket)
+  gs_path = 'gs://%s/%s' % (bucket, name)
+
+  cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
+  return_code = cmd_helper.RunCmd(cmd)
+  return return_code == 0
+
+
+# TODO(jbudorick): Delete this function. Only one user of it.
+def unique_name(basename, suffix='', timestamp=True, device=None):
+  """Helper function for creating a unique name for a file to store in GS.
+
+  Args:
+    basename: Base of the unique filename.
+    suffix: Suffix of filename.
+    timestamp: Whether or not to add a timestamp to name.
+    device: Device to add device serial of to name.
+  """
+  return '%s%s%s%s' % (
+      basename,
+      '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
+          if timestamp else '',
+      '_%s' % device.serial if device else '',
+      suffix)
+
+
+def get_url_link(name, bucket, authenticated_link=True):
+  """Get url link before/without uploading.
+
+  Args:
+    name: Name of the file on Google Storage.
+    bucket: Bucket to upload file to.
+    authenticated_link: Whether to return a link that requires user to
+        authenticate with a Google account. Setting this to false will return
+        a link that does not require user to be signed into Google account but
+        will only work for completely public storage buckets.
+  Returns:
+    Web link to item to be uploaded to Google Storage bucket
+  """
+  bucket = _format_bucket_name(bucket)
+  url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
+  return os.path.join(url_template % bucket, name)
+
+
+def _format_bucket_name(bucket):
+  if bucket.startswith('gs://'):
+    bucket = bucket[len('gs://'):]
+  if bucket.endswith('/'):
+    bucket = bucket[:-1]
+  return bucket
diff --git a/src/build/android/pylib/utils/instrumentation_tracing.py b/src/build/android/pylib/utils/instrumentation_tracing.py
new file mode 100644
index 0000000..f1d03a0
--- /dev/null
+++ b/src/build/android/pylib/utils/instrumentation_tracing.py
@@ -0,0 +1,204 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions to instrument all Python function calls.
+
+This generates a JSON file readable by Chrome's about:tracing. To use it,
+either call start_instrumenting and stop_instrumenting at the appropriate times,
+or use the Instrument context manager.
+
+A function is only traced if it is from a Python module that matches at least
+one regular expression object in to_include, and does not match any in
+to_exclude. In between the start and stop events, every function call of a
+function from such a module will be added to the trace.
+"""
+
+import contextlib
+import functools
+import inspect
+import os
+import re
+import sys
+import threading
+
+from py_trace_event import trace_event
+
+
+# Modules to exclude by default (to avoid problems like infinite loops)
+DEFAULT_EXCLUDE = [r'py_trace_event\..*']
+
+class _TraceArguments(object):
+  def __init__(self):
+    """Wraps a dictionary to ensure safe evaluation of repr()."""
+    self._arguments = {}
+
+  @staticmethod
+  def _safeStringify(item):
+    try:
+      item_str = repr(item)
+    except Exception: # pylint: disable=broad-except
+      try:
+        item_str = str(item)
+      except Exception: # pylint: disable=broad-except
+        item_str = "<ERROR>"
+    return item_str
+
+  def add(self, key, val):
+    key_str = _TraceArguments._safeStringify(key)
+    val_str = _TraceArguments._safeStringify(val)
+
+    self._arguments[key_str] = val_str
+
+  def __repr__(self):
+    return repr(self._arguments)
+
+
+saved_thread_ids = set()
+
+def _shouldTrace(frame, to_include, to_exclude, included, excluded):
+  """
+  Decides whether or not the function called in frame should be traced.
+
+  Args:
+    frame: The Python frame object of this function call.
+    to_include: Set of regex objects for modules which should be traced.
+    to_exclude: Set of regex objects for modules which should not be traced.
+    included: Set of module names we've determined should be traced.
+    excluded: Set of module names we've determined should not be traced.
+  """
+  if not inspect.getmodule(frame):
+    return False
+
+  module_name = inspect.getmodule(frame).__name__
+
+  if module_name in included:
+    includes = True
+  elif to_include:
+    includes = any([pattern.match(module_name) for pattern in to_include])
+  else:
+    includes = True
+
+  if includes:
+    included.add(module_name)
+  else:
+    return False
+
+  # Find the modules of every function in the stack trace.
+  frames = inspect.getouterframes(frame)
+  calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
+
+  # Return False for anything with an excluded module's function anywhere in the
+  # stack trace (even if the function itself is in an included module).
+  if to_exclude:
+    for calling_module in calling_module_names:
+      if calling_module in excluded:
+        return False
+      for pattern in to_exclude:
+        if pattern.match(calling_module):
+          excluded.add(calling_module)
+          return False
+
+  return True
+
+def _generate_trace_function(to_include, to_exclude):
+  to_include = {re.compile(item) for item in to_include}
+  to_exclude = {re.compile(item) for item in to_exclude}
+  to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
+
+  included = set()
+  excluded = set()
+
+  tracing_pid = os.getpid()
+
+  def traceFunction(frame, event, arg):
+    del arg
+
+    # Don't try to trace in subprocesses.
+    if os.getpid() != tracing_pid:
+      sys.settrace(None)
+      return None
+
+    # pylint: disable=unused-argument
+    if event not in ("call", "return"):
+      return None
+
+    function_name = frame.f_code.co_name
+    filename = frame.f_code.co_filename
+    line_number = frame.f_lineno
+
+    if _shouldTrace(frame, to_include, to_exclude, included, excluded):
+      if event == "call":
+        # This function is beginning; we save the thread name (if that hasn't
+        # been done), record the Begin event, and return this function to be
+        # used as the local trace function.
+
+        thread_id = threading.current_thread().ident
+
+        if thread_id not in saved_thread_ids:
+          thread_name = threading.current_thread().name
+
+          trace_event.trace_set_thread_name(thread_name)
+
+          saved_thread_ids.add(thread_id)
+
+        arguments = _TraceArguments()
+        # The function's argument values are stored in the frame's
+        # |co_varnames| as the first |co_argcount| elements. (Following that
+        # are local variables.)
+        for idx in range(frame.f_code.co_argcount):
+          arg_name = frame.f_code.co_varnames[idx]
+          arguments.add(arg_name, frame.f_locals[arg_name])
+        trace_event.trace_begin(function_name, arguments=arguments,
+                                module=inspect.getmodule(frame).__name__,
+                                filename=filename, line_number=line_number)
+
+        # Return this function, so it gets used as the "local trace function"
+        # within this function's frame (and in particular, gets called for this
+        # function's "return" event).
+        return traceFunction
+
+      if event == "return":
+        trace_event.trace_end(function_name)
+        return None
+
+  return traceFunction
+
+
+def no_tracing(f):
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    trace_func = sys.gettrace()
+    try:
+      sys.settrace(None)
+      threading.settrace(None)
+      return f(*args, **kwargs)
+    finally:
+      sys.settrace(trace_func)
+      threading.settrace(trace_func)
+  return wrapper
+
+
+def start_instrumenting(output_file, to_include=(), to_exclude=()):
+  """Enable tracing of all function calls (from specified modules)."""
+  trace_event.trace_enable(output_file)
+
+  traceFunc = _generate_trace_function(to_include, to_exclude)
+  sys.settrace(traceFunc)
+  threading.settrace(traceFunc)
+
+
+def stop_instrumenting():
+  trace_event.trace_disable()
+
+  sys.settrace(None)
+  threading.settrace(None)
+
+
+@contextlib.contextmanager
+def Instrument(output_file, to_include=(), to_exclude=()):
+  try:
+    start_instrumenting(output_file, to_include, to_exclude)
+    yield None
+  finally:
+    stop_instrumenting()
diff --git a/src/build/android/pylib/utils/local_utils.py b/src/build/android/pylib/utils/local_utils.py
new file mode 100644
index 0000000..027cca3
--- /dev/null
+++ b/src/build/android/pylib/utils/local_utils.py
@@ -0,0 +1,19 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for determining if a test is being run locally or not."""
+
+import os
+
+
+def IsOnSwarming():
+  """Determines whether we are on swarming or not.
+
+  Returns:
+    True if the test is being run on swarming, otherwise False.
+  """
+  # Look for the presence of the SWARMING_SERVER environment variable as a
+  # heuristic to determine whether we're running on a workstation or a bot.
+  # This should always be set on swarming, but would be strange to be set on
+  # a workstation.
+  return 'SWARMING_SERVER' in os.environ
diff --git a/src/build/android/pylib/utils/logdog_helper.py b/src/build/android/pylib/utils/logdog_helper.py
new file mode 100644
index 0000000..68a7ba5
--- /dev/null
+++ b/src/build/android/pylib/utils/logdog_helper.py
@@ -0,0 +1,94 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to logdog."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import decorators
+
+sys.path.insert(0, os.path.abspath(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client')))
+from libs.logdog import bootstrap # pylint: disable=import-error
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def text(name, data, content_type=None):
+  """Uploads text to logdog.
+
+  Args:
+    name: Name of the logdog stream.
+    data: String with data you want to upload.
+    content_type: The optional content type of the stream. If None, a
+      default content type will be chosen.
+
+  Returns:
+    Link to view uploaded text in logdog viewer.
+  """
+  logging.info('Writing text to logdog stream, %s', name)
+  with get_logdog_client().text(name, content_type=content_type) as stream:
+    stream.write(data)
+    return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value=None,
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def open_text(name):
+  """Returns a file like object which you can write to.
+
+  Args:
+    name: Name of the logdog stream.
+
+  Returns:
+    A file like object. close() file when done.
+  """
+  logging.info('Opening text logdog stream, %s', name)
+  return get_logdog_client().open_text(name)
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def binary(name, binary_path):
+  """Uploads binary to logdog.
+
+  Args:
+    name: Name of the logdog stream.
+    binary_path: Path to binary you want to upload.
+
+  Returns:
+    Link to view uploaded binary in logdog viewer.
+  """
+  logging.info('Writing binary to logdog stream, %s', name)
+  with get_logdog_client().binary(name) as stream:
+    with open(binary_path, 'r') as f:
+      stream.write(f.read())
+      return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value='',
+                             exception_message=('Ignore this exception. '
+                                                'crbug.com/675666'))
+def get_viewer_url(name):
+  """Get Logdog viewer URL.
+
+  Args:
+    name: Name of the logdog stream.
+
+  Returns:
+    Link to view uploaded binary in logdog viewer.
+  """
+  return get_logdog_client().get_viewer_url(name)
+
+
+@decorators.Memoize
+def get_logdog_client():
+  logging.info('Getting logdog client.')
+  return bootstrap.ButlerBootstrap.probe().stream_client()
diff --git a/src/build/android/pylib/utils/logging_utils.py b/src/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000..9c4eae3
--- /dev/null
+++ b/src/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+
+from pylib.constants import host_paths
+
+_COLORAMA_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
+
+with host_paths.SysPath(_COLORAMA_PATH, position=0):
+  import colorama
+
+BACK = colorama.Back
+FORE = colorama.Fore
+STYLE = colorama.Style
+
+
+class _ColorFormatter(logging.Formatter):
+  # pylint does not see members added dynamically in the constructor.
+  # pylint: disable=no-member
+  color_map = {
+    logging.DEBUG: (FORE.CYAN),
+    logging.WARNING: (FORE.YELLOW),
+    logging.ERROR: (FORE.RED),
+    logging.CRITICAL: (BACK.RED),
+  }
+
+  def __init__(self, wrapped_formatter=None):
+    """Wraps a |logging.Formatter| and adds color."""
+    super(_ColorFormatter, self).__init__(self)
+    self._wrapped_formatter = wrapped_formatter or logging.Formatter()
+
+  #override
+  def format(self, record):
+    message = self._wrapped_formatter.format(record)
+    return self.Colorize(message, record.levelno)
+
+  def Colorize(self, message, log_level):
+    try:
+      return (''.join(self.color_map[log_level]) + message +
+              colorama.Style.RESET_ALL)
+    except KeyError:
+      return message
+
+
+class ColorStreamHandler(logging.StreamHandler):
+  """Handler that can be used to colorize logging output.
+
+  Example using a specific logger:
+
+    logger = logging.getLogger('my_logger')
+    logger.addHandler(ColorStreamHandler())
+    logger.info('message')
+
+  Example using the root logger:
+
+    ColorStreamHandler.MakeDefault()
+    logging.info('message')
+
+  """
+  def __init__(self, force_color=False):
+    super(ColorStreamHandler, self).__init__()
+    self.force_color = force_color
+    self.setFormatter(logging.Formatter())
+
+  @property
+  def is_tty(self):
+    isatty = getattr(self.stream, 'isatty', None)
+    return isatty and isatty()
+
+  #override
+  def setFormatter(self, formatter):
+    if self.force_color or self.is_tty:
+      formatter = _ColorFormatter(formatter)
+    super(ColorStreamHandler, self).setFormatter(formatter)
+
+  @staticmethod
+  def MakeDefault(force_color=False):
+     """
+     Replaces the default logging handlers with a coloring handler. To use
+     a colorizing handler at the same time as others, either register them
+     after this call, or add the ColorStreamHandler on the logger using
+     Logger.addHandler()
+
+     Args:
+       force_color: Set to True to bypass the tty check and always colorize.
+     """
+     # If the existing handlers aren't removed, messages are duplicated
+     logging.getLogger().handlers = []
+     logging.getLogger().addHandler(ColorStreamHandler(force_color))
+
+
+@contextlib.contextmanager
+def OverrideColor(level, color):
+  """Temporarily override the logging color for a specified level.
+
+  Args:
+    level: logging level whose color gets overridden.
+    color: tuple of formats to apply to log lines.
+  """
+  prev_colors = {}
+  for handler in logging.getLogger().handlers:
+    if isinstance(handler.formatter, _ColorFormatter):
+      prev_colors[handler.formatter] = handler.formatter.color_map[level]
+      handler.formatter.color_map[level] = color
+  try:
+    yield
+  finally:
+    for formatter, prev_color in prev_colors.iteritems():
+      formatter.color_map[level] = prev_color
+
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+  """Momentarilly suppress logging events from all loggers.
+
+  TODO(jbudorick): This is not thread safe. Log events from other threads might
+  also inadvertently disappear.
+
+  Example:
+
+    with logging_utils.SuppressLogging():
+      # all but CRITICAL logging messages are suppressed
+      logging.info('just doing some thing') # not shown
+      logging.critical('something really bad happened') # still shown
+
+  Args:
+    level: logging events with this or lower levels are suppressed.
+  """
+  logging.disable(level)
+  yield
+  logging.disable(logging.NOTSET)
diff --git a/src/build/android/pylib/utils/maven_downloader.py b/src/build/android/pylib/utils/maven_downloader.py
new file mode 100755
index 0000000..1dc1542
--- /dev/null
+++ b/src/build/android/pylib/utils/maven_downloader.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
+import devil_chromium  # pylint: disable=unused-import
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+
+
+def _MakeDirsIfAbsent(path):
+  try:
+    os.makedirs(path)
+  except OSError as err:
+    if err.errno != errno.EEXIST or not os.path.isdir(path):
+      raise
+
+
+class MavenDownloader(object):
+  '''
+  Downloads and installs the requested artifacts from the Google Maven repo.
+  The artifacts are expected to be specified in the format
+  "group_id:artifact_id:version:file_type", as the default file type is JAR
+  but most Android libraries are provided as AARs, which would otherwise fail
+  downloading. See Install()
+  '''
+
+  # Remote repository to download the artifacts from. The support library and
+  # Google Play service are only distributed there, but third party libraries
+  # could use Maven Central or JCenter for example. The default Maven remote
+  # is Maven Central.
+  _REMOTE_REPO = 'https://maven.google.com'
+
+  # Default Maven repository.
+  _DEFAULT_REPO_PATH = os.path.join(
+      os.path.expanduser('~'), '.m2', 'repository')
+
+  def __init__(self, debug=False):
+    self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
+    self._remote_url = MavenDownloader._REMOTE_REPO
+    self._debug = debug
+
+  def Install(self, target_repo, artifacts, include_poms=False):
+    logging.info('Installing %d artifacts...', len(artifacts))
+    downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
+                   for artifact in artifacts]
+    if self._debug:
+      for downloader in downloaders:
+        downloader.Run(include_poms)
+    else:
+      parallelizer.SyncParallelizer(downloaders).Run(include_poms)
+    logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
+
+  @property
+  def repo_path(self):
+    return self._repo_path
+
+  @property
+  def remote_url(self):
+    return self._remote_url
+
+  @property
+  def debug(self):
+    return self._debug
+
+
+class _SingleArtifactDownloader(object):
+  '''Handles downloading and installing a single Maven artifact.'''
+
+  _POM_FILE_TYPE = 'pom'
+
+  def __init__(self, download_manager, artifact, target_repo):
+    self._download_manager = download_manager
+    self._artifact = artifact
+    self._target_repo = target_repo
+
+  def Run(self, include_pom=False):
+    parts = self._artifact.split(':')
+    if len(parts) != 4:
+      raise Exception('Artifacts expected as '
+                      '"group_id:artifact_id:version:file_type".')
+    group_id, artifact_id, version, file_type = parts
+    self._InstallArtifact(group_id, artifact_id, version, file_type)
+
+    if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
+      self._InstallArtifact(group_id, artifact_id, version,
+                            _SingleArtifactDownloader._POM_FILE_TYPE)
+
+  def _InstallArtifact(self, group_id, artifact_id, version, file_type):
+    logging.debug('Processing %s', self._artifact)
+
+    download_relpath = self._DownloadArtifact(
+        group_id, artifact_id, version, file_type)
+    logging.debug('Downloaded.')
+
+    install_path = self._ImportArtifact(download_relpath)
+    logging.debug('Installed %s', os.path.relpath(install_path))
+
+  def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
+    '''
+    Downloads the specified artifact using maven, to its standard location, see
+    MavenDownloader._DEFAULT_REPO_PATH.
+    '''
+    cmd = ['mvn',
+           'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
+           '-DremoteRepositories={}'.format(self._download_manager.remote_url),
+           '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
+                                           file_type)]
+
+    stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
+
+    try:
+      ret_code = cmd_helper.Call(cmd, stdout=stdout)
+      if ret_code != 0:
+        raise Exception('Command "{}" failed'.format(' '.join(cmd)))
+    except OSError as e:
+      if e.errno == os.errno.ENOENT:
+        raise Exception('mvn command not found. Please install Maven.')
+      raise
+
+    return os.path.join(os.path.join(*group_id.split('.')),
+                        artifact_id,
+                        version,
+                        '{}-{}.{}'.format(artifact_id, version, file_type))
+
+  def _ImportArtifact(self, artifact_path):
+    src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
+    dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
+
+    _MakeDirsIfAbsent(dst_dir)
+    shutil.copy(src_dir, dst_dir)
+
+    return dst_dir
diff --git a/src/build/android/pylib/utils/proguard.py b/src/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000..9d5bae2
--- /dev/null
+++ b/src/build/android/pylib/utils/proguard.py
@@ -0,0 +1,285 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+    r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+    r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$')
+_ELEMENT_PRIMITIVE = 0
+_ELEMENT_ARRAY = 1
+_ELEMENT_ANNOTATION = 2
+_PROGUARD_ELEMENT_RES = [
+  (_ELEMENT_PRIMITIVE,
+   re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')),
+  (_ELEMENT_ARRAY,
+   re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')),
+  (_ELEMENT_ANNOTATION,
+   re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$'))
+]
+_PROGUARD_INDENT_WIDTH = 2
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$')
+
+
+def _GetProguardPath():
+  return os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'proguard',
+                      'lib', 'proguard603.jar')
+
+
+def Dump(jar_path):
+  """Dumps class and method information from a JAR into a dict via proguard.
+
+  Args:
+    jar_path: An absolute path to the JAR file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        'classes': [
+          {
+            'class': '',
+            'superclass': '',
+            'annotations': {/* dict -- see below */},
+            'methods': [
+              {
+                'method': '',
+                'annotations': {/* dict -- see below */},
+              },
+              ...
+            ],
+          },
+          ...
+        ],
+      }
+
+    Annotations dict format:
+      {
+        'empty-annotation-class-name': None,
+        'annotation-class-name': {
+          'field': 'primitive-value',
+          'field': [ 'array-item-1', 'array-item-2', ... ],
+          'field': {
+            /* Object value */
+            'field': 'primitive-value',
+            'field': [ 'array-item-1', 'array-item-2', ... ],
+            'field': { /* Object value */ }
+          }
+        }
+      }
+
+    Note that for top-level annotations their class names are used for
+    identification, whereas for any nested annotations the corresponding
+    field names are used.
+
+    One drawback of this approach is that an array containing empty
+    annotation classes will be represented as an array of 'None' values,
+    thus it will not be possible to find out annotation class names.
+    On the other hand, storing both annotation class name and the field name
+    would produce a very complex JSON.
+  """
+
+  with tempfile.NamedTemporaryFile() as proguard_output:
+    cmd_helper.GetCmdStatusAndOutput([
+        'java',
+        '-jar', _GetProguardPath(),
+        '-injars', jar_path,
+        '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+        '-dump', proguard_output.name])
+    return Parse(proguard_output)
+
+class _AnnotationElement(object):
+  def __init__(self, name, ftype, depth):
+    self.ref = None
+    self.name = name
+    self.ftype = ftype
+    self.depth = depth
+
+class _ParseState(object):
+  _INITIAL_VALUES = (lambda: None, list, dict)
+  # Empty annotations are represented as 'None', not as an empty dictionary.
+  _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None)
+
+  def __init__(self):
+    self._class_result = None
+    self._method_result = None
+    self._parse_annotations = False
+    self._annotation_stack = []
+
+  def ResetPerSection(self, section_name):
+    self.InitMethod(None)
+    self._parse_annotations = (
+      section_name in ['Class file attributes', 'Methods'])
+
+  def ParseAnnotations(self):
+    return self._parse_annotations
+
+  def CreateAndInitClass(self, class_name):
+    self.InitMethod(None)
+    self._class_result = {
+      'class': class_name,
+      'superclass': '',
+      'annotations': {},
+      'methods': [],
+    }
+    return self._class_result
+
+  def HasCurrentClass(self):
+    return bool(self._class_result)
+
+  def SetSuperClass(self, superclass):
+    assert self.HasCurrentClass()
+    self._class_result['superclass'] = superclass
+
+  def InitMethod(self, method_name):
+    self._annotation_stack = []
+    if method_name:
+      self._method_result = {
+        'method': method_name,
+        'annotations': {},
+      }
+      self._class_result['methods'].append(self._method_result)
+    else:
+      self._method_result = None
+
+  def InitAnnotation(self, annotation, depth):
+    if not self._annotation_stack:
+      # Add a fake parent element comprising 'annotations' dictionary,
+      # so we can work uniformly with both top-level and nested annotations.
+      annotations = _AnnotationElement(
+        '<<<top level>>>', _ELEMENT_ANNOTATION, depth - 1)
+      if self._method_result:
+        annotations.ref = self._method_result['annotations']
+      else:
+        annotations.ref = self._class_result['annotations']
+      self._annotation_stack = [annotations]
+    self._BacktrackAnnotationStack(depth)
+    if not self.HasCurrentAnnotation():
+      self._annotation_stack.append(
+        _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth))
+    self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+  def HasCurrentAnnotation(self):
+    return len(self._annotation_stack) > 1
+
+  def InitAnnotationField(self, field, field_type, depth):
+    self._BacktrackAnnotationStack(depth)
+    # Create the parent representation, if needed. E.g. annotations
+    # are represented with `None`, not with `{}` until they receive the first
+    # field.
+    self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES)
+    if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY:
+      # Nested arrays are not allowed in annotations.
+      assert not field_type == _ELEMENT_ARRAY
+      # Use array index instead of bogus field name.
+      field = len(self._annotation_stack[-1].ref)
+    self._annotation_stack.append(_AnnotationElement(field, field_type, depth))
+    self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+  def UpdateCurrentAnnotationFieldValue(self, value, depth):
+    self._BacktrackAnnotationStack(depth)
+    self._InitOrUpdateCurrentField(value)
+
+  def _CreateAnnotationPlaceHolder(self, constructors):
+    assert self.HasCurrentAnnotation()
+    field = self._annotation_stack[-1]
+    if field.ref is None:
+      field.ref = constructors[field.ftype]()
+      self._InitOrUpdateCurrentField(field.ref)
+
+  def _BacktrackAnnotationStack(self, depth):
+    stack = self._annotation_stack
+    while len(stack) > 0 and stack[-1].depth >= depth:
+      stack.pop()
+
+  def _InitOrUpdateCurrentField(self, value):
+    assert self.HasCurrentAnnotation()
+    parent = self._annotation_stack[-2]
+    assert not parent.ref is None
+    # There can be no nested constant element values.
+    assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION]
+    field = self._annotation_stack[-1]
+    if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE:
+      # The value comes from the output parser via
+      # UpdateCurrentAnnotationFieldValue, and should be a value of a constant
+      # element. If it isn't, just skip it.
+      return
+    if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref):
+      parent.ref.append(value)
+    else:
+      parent.ref[field.name] = value
+
+
+def _GetDepth(prefix):
+  return len(prefix) // _PROGUARD_INDENT_WIDTH
+
+def Parse(proguard_output):
+  results = {
+    'classes': [],
+  }
+
+  state = _ParseState()
+
+  for line in proguard_output:
+    line = line.strip('\r\n')
+
+    m = _PROGUARD_CLASS_RE.match(line)
+    if m:
+      results['classes'].append(
+        state.CreateAndInitClass(m.group(1).replace('/', '.')))
+      continue
+
+    if not state.HasCurrentClass():
+      continue
+
+    m = _PROGUARD_SUPERCLASS_RE.match(line)
+    if m:
+      state.SetSuperClass(m.group(1).replace('/', '.'))
+      continue
+
+    m = _PROGUARD_SECTION_RE.match(line)
+    if m:
+      state.ResetPerSection(m.group(1))
+      continue
+
+    m = _PROGUARD_METHOD_RE.match(line)
+    if m:
+      state.InitMethod(m.group(1))
+      continue
+
+    if not state.ParseAnnotations():
+      continue
+
+    m = _PROGUARD_ANNOTATION_RE.match(line)
+    if m:
+      # Ignore the annotation package.
+      state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1)))
+      continue
+
+    if state.HasCurrentAnnotation():
+      m = None
+      for (element_type, element_re) in _PROGUARD_ELEMENT_RES:
+        m = element_re.match(line)
+        if m:
+          state.InitAnnotationField(
+            m.group(2), element_type, _GetDepth(m.group(1)))
+          break
+      if m:
+        continue
+      m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+      if m:
+        state.UpdateCurrentAnnotationFieldValue(
+          m.group(2), _GetDepth(m.group(1)))
+      else:
+        state.InitMethod(None)
+
+  return results
diff --git a/src/build/android/pylib/utils/proguard_test.py b/src/build/android/pylib/utils/proguard_test.py
new file mode 100755
index 0000000..b11c299
--- /dev/null
+++ b/src/build/android/pylib/utils/proguard_test.py
@@ -0,0 +1,495 @@
+#! /usr/bin/env vpython
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.utils import proguard
+
+class TestParse(unittest.TestCase):
+
+  def setUp(self):
+    self.maxDiff = None
+
+  def testClass(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       '  Superclass: java/lang/Object'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': 'java.lang.Object',
+          'annotations': {},
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethod(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       <init>()V'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': '<init>',
+              'annotations': {}
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassAnnotation(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Annotation [Lorg/example/Annotation;]:',
+       '  - Annotation [Lorg/example/AnnotationWithValue;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+       '    - Constant element value [attr1 \'13\']',
+       '      - Utf8 [val1]',
+       '    - Constant element value [attr2 \'13\']',
+       '      - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'Annotation': None,
+            'AnnotationWithValue': {'attr': 'val'},
+            'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassAnnotationWithArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '  - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'AnnotationWithEmptyArray': {'arrayAttr': []},
+            'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+            'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testNestedClassAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 1):',
+       '  - Annotation [Lorg/example/OuterAnnotation;]:',
+       '    - Constant element value [outerAttr \'13\']',
+       '      - Utf8 [outerVal]',
+       '    - Array element value [outerArr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal2]',
+       '    - Annotation element value [emptyAnn]:',
+       '      - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '    - Annotation element value [ann]:',
+       '      - Annotation [Lorg/example/InnerAnnotation;]:',
+       '        - Constant element value [innerAttr \'13\']',
+       '          - Utf8 [innerVal]',
+       '        - Array element value [innerArr]:',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal1]',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal2]',
+       '        - Annotation element value [emptyInnerAnn]:',
+       '          - Annotation [Lorg/example/EmptyAnnotation;]:'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'OuterAnnotation': {
+              'outerAttr': 'outerVal',
+              'outerArr': ['outerArrVal1', 'outerArrVal2'],
+              'emptyAnn': None,
+              'ann': {
+                'innerAttr': 'innerVal',
+                'innerArr': ['innerArrVal1', 'innerArrVal2'],
+                'emptyInnerAnn': None
+              }
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testClassArraysOfAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 1):',
+       '   - Annotation [Lorg/example/OuterAnnotation;]:',
+       '     - Array element value [arrayWithEmptyAnnotations]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '     - Array element value [outerArray]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/InnerAnnotation;]:',
+       '           - Constant element value [innerAttr \'115\']',
+       '             - Utf8 [innerVal]',
+       '           - Array element value [arguments]:',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg1Attr \'115\']',
+       '                   - Utf8 [arg1Val]',
+       '                 - Array element value [arg1Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [11]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [12]',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg2Attr \'115\']',
+       '                   - Utf8 [arg2Val]',
+       '                 - Array element value [arg2Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [21]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [22]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'OuterAnnotation': {
+              'arrayWithEmptyAnnotations': [None, None],
+              'outerArray': [
+                {
+                  'innerAttr': 'innerVal',
+                  'arguments': [
+                    {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+                    {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+                  ]
+                }
+              ]
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testReadFullClassFileAttributes(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Class file attributes (count = 3):',
+       '  - Source file attribute:',
+       '    - Utf8 [Class.java]',
+       '  - Runtime visible annotations attribute:',
+       '    - Annotation [Lorg/example/IntValueAnnotation;]:',
+       '      - Constant element value [value \'73\']',
+       '        - Integer [19]',
+       '  - Inner classes attribute (count = 1)',
+       '    - InnerClassesInfo:',
+       '      Access flags:  0x9 = public static',
+       '      - Class [org/example/Class1]',
+       '      - Class [org/example/Class2]',
+       '      - Utf8 [OnPageFinishedHelper]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {
+            'IntValueAnnotation': {
+              'value': '19',
+            }
+          },
+          'methods': []
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotation(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/Annotation;]:',
+       '  - Annotation [Lorg/example/AnnotationWithValue;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+       '    - Constant element value [attr1 \'13\']',
+       '      - Utf8 [val1]',
+       '    - Constant element value [attr2 \'13\']',
+       '      - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'Annotation': None,
+                'AnnotationWithValue': {'attr': 'val'},
+                'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotationWithArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '  - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'AnnotationWithEmptyArray': {'arrayAttr': []},
+                'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+                'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodAnnotationWithPrimitivesAndArrays(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:',
+       '    - Array element value [arrayAttr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val]',
+       '    - Constant element value [attr \'13\']',
+       '      - Utf8 [val]',
+       '  - Annotation [Lorg/example/AnnotationTwoArrays;]:',
+       '    - Array element value [arrayAttr1]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val1]',
+       '    - Array element value [arrayAttr2]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [val2]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'AnnotationPrimitiveThenArray': {'attr': 'val',
+                                                 'arrayAttr': ['val']},
+                'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'],
+                                                 'attr': 'val'},
+                'AnnotationTwoArrays': {'arrayAttr1': ['val1'],
+                                        'arrayAttr2': ['val2']}
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testNestedMethodAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '  - Annotation [Lorg/example/OuterAnnotation;]:',
+       '    - Constant element value [outerAttr \'13\']',
+       '      - Utf8 [outerVal]',
+       '    - Array element value [outerArr]:',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal1]',
+       '      - Constant element value [(default) \'13\']',
+       '        - Utf8 [outerArrVal2]',
+       '    - Annotation element value [emptyAnn]:',
+       '      - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '    - Annotation element value [ann]:',
+       '      - Annotation [Lorg/example/InnerAnnotation;]:',
+       '        - Constant element value [innerAttr \'13\']',
+       '          - Utf8 [innerVal]',
+       '        - Array element value [innerArr]:',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal1]',
+       '          - Constant element value [(default) \'13\']',
+       '            - Utf8 [innerArrVal2]',
+       '        - Annotation element value [emptyInnerAnn]:',
+       '          - Annotation [Lorg/example/EmptyAnnotation;]:'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'OuterAnnotation': {
+                  'outerAttr': 'outerVal',
+                  'outerArr': ['outerArrVal1', 'outerArrVal2'],
+                  'emptyAnn': None,
+                  'ann': {
+                    'innerAttr': 'innerVal',
+                    'innerArr': ['innerArrVal1', 'innerArrVal2'],
+                    'emptyInnerAnn': None
+                  }
+                }
+              },
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+  def testMethodArraysOfAnnotations(self):
+    actual = proguard.Parse(
+      ['- Program class: org/example/Test',
+       'Methods (count = 1):',
+       '- Method:       Test()V',
+       '   - Annotation [Lorg/example/OuterAnnotation;]:',
+       '     - Array element value [arrayWithEmptyAnnotations]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/EmptyAnnotation;]:',
+       '     - Array element value [outerArray]:',
+       '       - Annotation element value [(default)]:',
+       '         - Annotation [Lorg/example/InnerAnnotation;]:',
+       '           - Constant element value [innerAttr \'115\']',
+       '             - Utf8 [innerVal]',
+       '           - Array element value [arguments]:',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg1Attr \'115\']',
+       '                   - Utf8 [arg1Val]',
+       '                 - Array element value [arg1Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [11]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [12]',
+       '             - Annotation element value [(default)]:',
+       '               - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+       '                 - Constant element value [arg2Attr \'115\']',
+       '                   - Utf8 [arg2Val]',
+       '                 - Array element value [arg2Array]:',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [21]',
+       '                   - Constant element value [(default) \'73\']',
+       '                     - Integer [22]'])
+    expected = {
+      'classes': [
+        {
+          'class': 'org.example.Test',
+          'superclass': '',
+          'annotations': {},
+          'methods': [
+            {
+              'method': 'Test',
+              'annotations': {
+                'OuterAnnotation': {
+                  'arrayWithEmptyAnnotations': [None, None],
+                  'outerArray': [
+                    {
+                      'innerAttr': 'innerVal',
+                      'arguments': [
+                        {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+                        {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+                      ]
+                    }
+                  ]
+                }
+              }
+            }
+          ]
+        }
+      ]
+    }
+    self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/android/pylib/utils/repo_utils.py b/src/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000..f9d300a
--- /dev/null
+++ b/src/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from devil.utils import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+  """Returns the git hash tag for the given directory.
+
+  Args:
+    in_directory: The directory where git is to be run.
+  """
+  command_line = ['git', 'log', '-1', '--pretty=format:%H']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output[0:40]
+
+
+def GetGitOriginMasterHeadSHA1(in_directory):
+  command_line = ['git', 'rev-parse', 'origin/master']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output.strip()
diff --git a/src/build/android/pylib/utils/shared_preference_utils.py b/src/build/android/pylib/utils/shared_preference_utils.py
new file mode 100644
index 0000000..ae0d31b
--- /dev/null
+++ b/src/build/android/pylib/utils/shared_preference_utils.py
@@ -0,0 +1,95 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for modifying an app's settings file using JSON."""
+
+import json
+import logging
+
+
+def UnicodeToStr(data):
+  """Recursively converts any Unicode to Python strings.
+
+  Args:
+    data: The data to be converted.
+
+  Return:
+    A copy of the given data, but with instances of Unicode converted to Python
+    strings.
+  """
+  if isinstance(data, dict):
+    return {UnicodeToStr(key): UnicodeToStr(value)
+            for key, value in data.iteritems()}
+  elif isinstance(data, list):
+    return [UnicodeToStr(element) for element in data]
+  elif isinstance(data, unicode):
+    return data.encode('utf-8')
+  return data
+
+
+def ExtractSettingsFromJson(filepath):
+  """Extracts the settings data from the given JSON file.
+
+  Args:
+    filepath: The path to the JSON file to read.
+
+  Return:
+    The data read from the JSON file with strings converted to Python strings.
+  """
+  # json.load() loads strings as unicode, which causes issues when trying
+  # to edit string values in preference files, so convert to Python strings
+  with open(filepath) as prefs_file:
+    return UnicodeToStr(json.load(prefs_file))
+
+
+def ApplySharedPreferenceSetting(shared_pref, setting):
+  """Applies the given app settings to the given device.
+
+  Modifies an installed app's settings by modifying its shared preference
+  settings file. Provided settings data must be a settings dictionary,
+  which are in the following format:
+  {
+    "package": "com.example.package",
+    "filename": "AppSettingsFile.xml",
+    "supports_encrypted_path": true,
+    "set": {
+      "SomeBoolToSet": true,
+      "SomeStringToSet": "StringValue",
+    },
+    "remove": [
+      "list",
+      "of",
+      "keys",
+      "to",
+      "remove",
+    ]
+  }
+
+  Example JSON files that can be read with ExtractSettingsFromJson and passed to
+  this function are in //chrome/android/shared_preference_files/test/.
+
+  Args:
+    shared_pref: The devil SharedPrefs object for the device the settings will
+        be applied to.
+    setting: A settings dictionary to apply.
+  """
+  shared_pref.Load()
+  for key in setting.get('remove', []):
+    try:
+      shared_pref.Remove(key)
+    except KeyError:
+      logging.warning("Attempted to remove non-existent key %s", key)
+  for key, value in setting.get('set', {}).iteritems():
+    if isinstance(value, bool):
+      shared_pref.SetBoolean(key, value)
+    elif isinstance(value, basestring):
+      shared_pref.SetString(key, value)
+    elif isinstance(value, long) or isinstance(value, int):
+      shared_pref.SetLong(key, value)
+    elif isinstance(value, list):
+      shared_pref.SetStringSet(key, value)
+    else:
+      raise ValueError("Given invalid value type %s for key %s" % (
+          str(type(value)), key))
+  shared_pref.Commit()
diff --git a/src/build/android/pylib/utils/simpleperf.py b/src/build/android/pylib/utils/simpleperf.py
new file mode 100644
index 0000000..b3ba00e
--- /dev/null
+++ b/src/build/android/pylib/utils/simpleperf.py
@@ -0,0 +1,260 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from devil import devil_env
+from devil.android import device_signal
+from devil.android.sdk import version_codes
+from pylib import constants
+
+
+def _ProcessType(proc):
+  _, _, suffix = proc.name.partition(':')
+  if not suffix:
+    return 'browser'
+  if suffix.startswith('sandboxed_process'):
+    return 'renderer'
+  if suffix.startswith('privileged_process'):
+    return 'gpu'
+  return None
+
+
+def _GetSpecifiedPID(device, package_name, process_specifier):
+  if process_specifier is None:
+    return None
+
+  # Check for numeric PID
+  try:
+    pid = int(process_specifier)
+    return pid
+  except ValueError:
+    pass
+
+  # Check for exact process name; can be any of these formats:
+  #   <package>:<process name>, i.e. 'org.chromium.chrome:sandboxed_process0'
+  #   :<process name>, i.e. ':sandboxed_process0'
+  #   <process name>, i.e. 'sandboxed_process0'
+  full_process_name = process_specifier
+  if process_specifier.startswith(':'):
+    full_process_name = package_name + process_specifier
+  elif ':' not in process_specifier:
+    full_process_name = '%s:%s' % (package_name, process_specifier)
+  matching_processes = device.ListProcesses(full_process_name)
+  if len(matching_processes) == 1:
+    return matching_processes[0].pid
+  if len(matching_processes) > 1:
+    raise RuntimeError('Found %d processes with name "%s".' % (
+        len(matching_processes), process_specifier))
+
+  # Check for process type (i.e. 'renderer')
+  package_processes = device.ListProcesses(package_name)
+  matching_processes = [p for p in package_processes if (
+      _ProcessType(p) == process_specifier)]
+  if process_specifier == 'renderer' and len(matching_processes) > 1:
+    raise RuntimeError('Found %d renderer processes; please re-run with only '
+                       'one open tab.' % len(matching_processes))
+  if len(matching_processes) != 1:
+    raise RuntimeError('Found %d processes of type "%s".' % (
+        len(matching_processes), process_specifier))
+  return matching_processes[0].pid
+
+
+def _ThreadsForProcess(device, pid):
+  # The thread list output format for 'ps' is the same regardless of version.
+  # Here's the column headers, and a sample line for a thread belonging to
+  # pid 12345 (note that the last few columns are not aligned with headers):
+  #
+  # USER        PID   TID  PPID     VSZ    RSS WCHAN            ADDR S CMD
+  # u0_i101   12345 24680   567 1357902  97531 futex_wait_queue_me e85acd9c S \
+  #     CrRendererMain
+  if device.build_version_sdk >= version_codes.OREO:
+    pid_regex = (
+        r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
+    ps_cmd = "ps -T -e | grep '%s'" % pid_regex
+    ps_output_lines = device.RunShellCommand(
+        ps_cmd, shell=True, check_return=True)
+  else:
+    ps_cmd = ['ps', '-p', str(pid), '-t']
+    ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
+  result = []
+  for l in ps_output_lines:
+    fields = l.split()
+    # fields[2] is tid, fields[-1] is thread name. Output may include an entry
+    # for the process itself with tid=pid; omit that one.
+    if fields[2] == str(pid):
+      continue
+    result.append((int(fields[2]), fields[-1]))
+  return result
+
+
+def _ThreadType(thread_name):
+  if not thread_name:
+    return 'unknown'
+  if (thread_name.startswith('Chrome_ChildIO') or
+      thread_name.startswith('Chrome_IO')):
+    return 'io'
+  if thread_name.startswith('Compositor'):
+    return 'compositor'
+  if (thread_name.startswith('ChildProcessMai') or
+      thread_name.startswith('CrGpuMain') or
+      thread_name.startswith('CrRendererMain')):
+    return 'main'
+  if thread_name.startswith('RenderThread'):
+    return 'render'
+
+
+def _GetSpecifiedTID(device, pid, thread_specifier):
+  if thread_specifier is None:
+    return None
+
+  # Check for numeric TID
+  try:
+    tid = int(thread_specifier)
+    return tid
+  except ValueError:
+    pass
+
+  # Check for thread type
+  if pid is not None:
+    matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
+        _ThreadType(t[1]) == thread_specifier)]
+    if len(matching_threads) != 1:
+      raise RuntimeError('Found %d threads of type "%s".' % (
+          len(matching_threads), thread_specifier))
+    return matching_threads[0][0]
+
+  return None
+
+
+def PrepareDevice(device):
+  if device.build_version_sdk < version_codes.NOUGAT:
+    raise RuntimeError('Simpleperf profiling is only supported on Android N '
+                       'and later.')
+
+  # Necessary for profiling
+  # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
+  device.SetProp('security.perf_harden', '0')
+
+
+def InstallSimpleperf(device, package_name):
+  package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
+  host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
+  if not host_simpleperf_path:
+    raise Exception('Could not get path to simpleperf executable on host.')
+  device_simpleperf_path = '/'.join(
+      ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
+  device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
+  return device_simpleperf_path
+
+
+@contextlib.contextmanager
+def RunSimpleperf(device, device_simpleperf_path, package_name,
+                  process_specifier, thread_specifier, profiler_args,
+                  host_out_path):
+  pid = _GetSpecifiedPID(device, package_name, process_specifier)
+  tid = _GetSpecifiedTID(device, pid, thread_specifier)
+  if pid is None and tid is None:
+    raise RuntimeError('Could not find specified process/thread running on '
+                       'device. Make sure the apk is already running before '
+                       'attempting to profile.')
+  profiler_args = list(profiler_args)
+  if profiler_args and profiler_args[0] == 'record':
+    profiler_args.pop(0)
+  if '--call-graph' not in profiler_args and '-g' not in profiler_args:
+    profiler_args.append('-g')
+  if '-f' not in profiler_args:
+    profiler_args.extend(('-f', '1000'))
+  device_out_path = '/data/local/tmp/perf.data'
+  if '-o' in profiler_args:
+    device_out_path = profiler_args[profiler_args.index('-o') + 1]
+  else:
+    profiler_args.extend(('-o', device_out_path))
+
+  if tid:
+    profiler_args.extend(('-t', str(tid)))
+  else:
+    profiler_args.extend(('-p', str(pid)))
+
+  adb_shell_simpleperf_process = device.adb.StartShell(
+      [device_simpleperf_path, 'record'] + profiler_args)
+
+  completed = False
+  try:
+    yield
+    completed = True
+
+  finally:
+    device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
+                   quiet=True)
+    if completed:
+      adb_shell_simpleperf_process.wait()
+      device.PullFile(device_out_path, host_out_path)
+
+
+def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
+                             pprof_out_path):
+  # The simpleperf scripts require the unstripped libs to be installed in the
+  # same directory structure as the libs on the device. Much of the logic here
+  # is just figuring out and creating the necessary directory structure, and
+  # symlinking the unstripped shared libs.
+
+  # Get the set of libs that we can symbolize
+  unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
+  unstripped_libs = set(
+      f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
+
+  # report.py will show the directory structure above the shared libs;
+  # that is the directory structure we need to recreate on the host.
+  script_dir = devil_env.config.LocalPath('simpleperf_scripts')
+  report_path = os.path.join(script_dir, 'report.py')
+  report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
+  device_lib_path = None
+  for line in subprocess.check_output(
+      report_cmd, stderr=subprocess.STDOUT).splitlines():
+    fields = line.split()
+    if len(fields) < 5:
+      continue
+    shlib_path = fields[4]
+    shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
+    if shlib_basename in unstripped_libs:
+      device_lib_path = shlib_dirname
+      break
+  if not device_lib_path:
+    raise RuntimeError('No chrome-related symbols in profiling data in %s. '
+                       'Either the process was idle for the entire profiling '
+                       'period, or something went very wrong (and you should '
+                       'file a bug at crbug.com/new with component '
+                       'Speed>Tracing, and assign it to szager@chromium.org).'
+                       % simpleperf_out_path)
+
+  # Recreate the directory structure locally, and symlink unstripped libs.
+  processing_dir = tempfile.mkdtemp()
+  try:
+    processing_lib_dir = os.path.join(
+        processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
+    os.makedirs(processing_lib_dir)
+    for lib in unstripped_libs:
+      unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
+      processing_lib_path = os.path.join(processing_lib_dir, lib)
+      os.symlink(unstripped_lib_path, processing_lib_path)
+
+    # Run the script to annotate symbols and convert from simpleperf format to
+    # pprof format.
+    pprof_converter_script = os.path.join(
+        script_dir, 'pprof_proto_generator.py')
+    pprof_converter_cmd = [
+        sys.executable, pprof_converter_script, '-i', simpleperf_out_path, '-o',
+        os.path.abspath(pprof_out_path), '--ndk_path',
+        constants.ANDROID_NDK_ROOT
+    ]
+    subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
+                            cwd=processing_dir)
+  finally:
+    shutil.rmtree(processing_dir, ignore_errors=True)
diff --git a/src/build/android/pylib/utils/test_filter.py b/src/build/android/pylib/utils/test_filter.py
new file mode 100644
index 0000000..6db6243
--- /dev/null
+++ b/src/build/android/pylib/utils/test_filter.py
@@ -0,0 +1,141 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+
+_CMDLINE_NAME_SEGMENT_RE = re.compile(
+    r' with(?:out)? \{[^\}]*\}')
+
+class ConflictingPositiveFiltersException(Exception):
+  """Raised when both filter file and filter argument have positive filters."""
+
+
+def ParseFilterFile(input_lines):
+  """Converts test filter file contents to positive and negative pattern lists.
+
+  See //testing/buildbot/filters/README.md for description of the
+  syntax that |input_lines| are expected to follow.
+
+  See
+  https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests
+  for description of the syntax that --gtest_filter argument should follow.
+
+  Args:
+    input_lines: An iterable (e.g. a list or a file) containing input lines.
+  Returns:
+    tuple containing the lists of positive patterns and negative patterns
+  """
+  # Strip comments and whitespace from each line and filter non-empty lines.
+  stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
+  filter_lines = [l for l in stripped_lines if l]
+
+  # Split the tests into positive and negative patterns (gtest treats
+  # every pattern after the first '-' sign as an exclusion).
+  positive_patterns = [l for l in filter_lines if l[0] != '-']
+  negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
+  return positive_patterns, negative_patterns
+
+
+def AddFilterOptions(parser):
+  """Adds filter command-line options to the provided parser.
+
+  Args:
+    parser: an argparse.ArgumentParser instance.
+  """
+  parser.add_argument(
+      # Deprecated argument.
+      '--gtest-filter-file',
+      # New argument.
+      '--test-launcher-filter-file',
+      dest='test_filter_file',
+      help='Path to file that contains googletest-style filter strings. '
+      'See also //testing/buildbot/filters/README.md.')
+
+  filter_group = parser.add_mutually_exclusive_group()
+  filter_group.add_argument(
+      '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+      dest='test_filter',
+      help='googletest-style filter string.',
+      default=os.environ.get('GTEST_FILTER'))
+  filter_group.add_argument(
+      '--isolated-script-test-filter',
+      help='isolated script filter string. '
+           'Like gtest filter strings, but with :: separators instead of :')
+
+
+def AppendPatternsToFilter(test_filter, positive_patterns=None,
+                           negative_patterns=None):
+  """Returns a test-filter string with additional patterns.
+
+  Args:
+    test_filter: test filter string
+    positive_patterns: list of positive patterns to add to string
+    negative_patterns: list of negative patterns to add to string
+  """
+  positives = []
+  negatives = []
+  positive = ''
+  negative = ''
+
+  split_filter = test_filter.split('-', 1)
+  if len(split_filter) == 1:
+    positive = split_filter[0]
+  else:
+    positive, negative = split_filter
+
+  positives += [f for f in positive.split(':') if f]
+  negatives += [f for f in negative.split(':') if f]
+
+  positives += positive_patterns if positive_patterns else []
+  negatives += negative_patterns if negative_patterns else []
+
+  final_filter = ':'.join([p.replace('#', '.') for p in positives])
+  if negatives:
+    final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
+  return final_filter
+
+
+def HasPositivePatterns(test_filter):
+  """Returns True if test_filter contains a positive pattern, else False
+
+  Args:
+    test_filter: test-filter style string
+  """
+  return bool(len(test_filter) > 0 and test_filter[0] != '-')
+
+
+def InitializeFilterFromArgs(args):
+  """Returns a filter string from the command-line option values.
+
+  Args:
+    args: an argparse.Namespace instance resulting from a using parser
+      to which the filter options above were added.
+
+  Raises:
+    ConflictingPositiveFiltersException if both filter file and command line
+    specify positive filters.
+  """
+  test_filter = ''
+  if args.isolated_script_test_filter:
+    args.test_filter = args.isolated_script_test_filter.replace('::', ':')
+  if args.test_filter:
+    test_filter = _CMDLINE_NAME_SEGMENT_RE.sub(
+        '', args.test_filter.replace('#', '.'))
+
+  if args.test_filter_file:
+    for test_filter_file in args.test_filter_file.split(';'):
+      with open(test_filter_file, 'r') as f:
+        positive_file_patterns, negative_file_patterns = ParseFilterFile(f)
+        if positive_file_patterns and HasPositivePatterns(test_filter):
+          raise ConflictingPositiveFiltersException(
+              'Cannot specify positive pattern in both filter file and ' +
+              'filter command line argument')
+        test_filter = AppendPatternsToFilter(
+            test_filter,
+            positive_patterns=positive_file_patterns,
+            negative_patterns=negative_file_patterns)
+
+  return test_filter
diff --git a/src/build/android/pylib/utils/test_filter_test.py b/src/build/android/pylib/utils/test_filter_test.py
new file mode 100755
index 0000000..1ae5a7e
--- /dev/null
+++ b/src/build/android/pylib/utils/test_filter_test.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+import tempfile
+import unittest
+
+from pylib.utils import test_filter
+
+class ParseFilterFileTest(unittest.TestCase):
+
+  def testParseFilterFile_commentsAndBlankLines(self):
+    input_lines = [
+      'positive1',
+      '# comment',
+      'positive2  # Another comment',
+      ''
+      'positive3'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2', 'positive3'], []
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_onlyPositive(self):
+    input_lines = [
+      'positive1',
+      'positive2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2'], []
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_onlyNegative(self):
+    input_lines = [
+      '-negative1',
+      '-negative2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = [], ['negative1', 'negative2']
+    self.assertEquals(expected, actual)
+
+  def testParseFilterFile_positiveAndNegative(self):
+    input_lines = [
+      'positive1',
+      'positive2',
+      '-negative1',
+      '-negative2'
+    ]
+    actual = test_filter.ParseFilterFile(input_lines)
+    expected = ['positive1', 'positive2'], ['negative1', 'negative2']
+    self.assertEquals(expected, actual)
+
+
+class InitializeFilterFromArgsTest(unittest.TestCase):
+
+  def testInitializeBasicFilter(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--test-filter',
+        'FooTest.testFoo:BarTest.testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testInitializeJavaStyleFilter(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--test-filter',
+        'FooTest#testFoo:BarTest#testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testInitializeBasicIsolatedScript(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    args = parser.parse_args([
+        '--isolated-script-test-filter',
+        'FooTest.testFoo::BarTest.testBar'])
+    expected = 'FooTest.testFoo:BarTest.testBar'
+    actual = test_filter.InitializeFilterFromArgs(args)
+    self.assertEquals(actual, expected)
+
+  def testFilterArgWithPositiveFilterInFilterFile(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter=-negative1',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = 'positive1:positive2-negative1:negative2:negative3'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+  def testFilterFileWithPositiveFilterInFilterArg(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('-negative2\n-negative3\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter',
+          'positive1:positive2-negative1',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = 'positive1:positive2-negative1:negative2:negative3'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+  def testPositiveFilterInBothFileAndArg(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('positive1\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter',
+          'positive2',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      with self.assertRaises(test_filter.ConflictingPositiveFiltersException):
+        test_filter.InitializeFilterFromArgs(args)
+
+  def testFilterArgWithFilterFileAllNegative(self):
+    parser = argparse.ArgumentParser()
+    test_filter.AddFilterOptions(parser)
+    with tempfile.NamedTemporaryFile() as tmp_file:
+      tmp_file.write('-negative3\n-negative4\n')
+      tmp_file.seek(0)
+      args = parser.parse_args([
+          '--test-filter=-negative1:negative2',
+          '--test-launcher-filter-file',
+          tmp_file.name])
+      expected = '-negative1:negative2:negative3:negative4'
+      actual = test_filter.InitializeFilterFromArgs(args)
+      self.assertEquals(actual, expected)
+
+
+class AppendPatternsToFilter(unittest.TestCase):
+  def testAllEmpty(self):
+    expected = ''
+    actual = test_filter.AppendPatternsToFilter('', [], [])
+    self.assertEquals(actual, expected)
+  def testAppendOnlyPositiveToEmptyFilter(self):
+    expected = 'positive'
+    actual = test_filter.AppendPatternsToFilter('', ['positive'])
+    self.assertEquals(actual, expected)
+  def testAppendOnlyNegativeToEmptyFilter(self):
+    expected = '-negative'
+    actual = test_filter.AppendPatternsToFilter('',
+                                                negative_patterns=['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToEmptyFilter(self):
+    expected = 'positive-negative'
+    actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToPositiveOnlyFilter(self):
+    expected = 'positive1:positive2-negative'
+    actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
+                                                ['negative'])
+    self.assertEquals(actual, expected)
+  def testAppendToNegativeOnlyFilter(self):
+    expected = 'positive-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
+                                                ['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendPositiveToFilter(self):
+    expected = 'positive1:positive2-negative1'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                ['positive2'])
+    self.assertEquals(actual, expected)
+  def testAppendNegativeToFilter(self):
+    expected = 'positive1-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                negative_patterns=['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendBothToFilter(self):
+    expected = 'positive1:positive2-negative1:negative2'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                positive_patterns=['positive2'],
+                                                negative_patterns=['negative2'])
+    self.assertEquals(actual, expected)
+  def testAppendMultipleToFilter(self):
+    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                ['positive2', 'positive3'],
+                                                ['negative2', 'negative3'])
+    self.assertEquals(actual, expected)
+  def testRepeatedAppendToFilter(self):
+    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+    filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
+                                                       ['positive2'],
+                                                       ['negative2'])
+    actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
+                                                ['negative3'])
+    self.assertEquals(actual, expected)
+  def testAppendHashSeparatedPatternsToFilter(self):
+    expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
+    actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
+                                                       ['positive#test2'],
+                                                       ['negative#test2'])
+    self.assertEquals(actual, expected)
+
+
+class HasPositivePatterns(unittest.TestCase):
+  def testEmpty(self):
+    expected = False
+    actual = test_filter.HasPositivePatterns('')
+    self.assertEquals(actual, expected)
+  def testHasOnlyPositive(self):
+    expected = True
+    actual = test_filter.HasPositivePatterns('positive')
+    self.assertEquals(actual, expected)
+  def testHasOnlyNegative(self):
+    expected = False
+    actual = test_filter.HasPositivePatterns('-negative')
+    self.assertEquals(actual, expected)
+  def testHasBoth(self):
+    expected = True
+    actual = test_filter.HasPositivePatterns('positive-negative')
+    self.assertEquals(actual, expected)
+
+
+if __name__ == '__main__':
+  sys.exit(unittest.main())
diff --git a/src/build/android/pylib/utils/time_profile.py b/src/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000..094799c
--- /dev/null
+++ b/src/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description='operation'):
+    self._starttime = None
+    self._endtime = None
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+    self._endtime = None
+
+  def GetDelta(self):
+    """Returns the rounded delta.
+
+    Also stops the timer if Stop() has not already been called.
+    """
+    if self._endtime is None:
+      self.Stop(log=False)
+    delta = self._endtime - self._starttime
+    delta = round(delta, 2) if delta < 10 else round(delta, 1)
+    return delta
+
+  def LogResult(self):
+    """Logs the result."""
+    logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
+
+  def Stop(self, log=True):
+    """Stop profiling.
+
+    Args:
+      log: Log the delta (defaults to true).
+    """
+    self._endtime = time.time()
+    if log:
+      self.LogResult()
diff --git a/src/build/android/pylib/utils/xvfb.py b/src/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000..cb9d50e
--- /dev/null
+++ b/src/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+  """Return True if on Linux; else False."""
+  return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not _IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
diff --git a/src/build/android/pylib/valgrind_tools.py b/src/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..4689dc3
--- /dev/null
+++ b/src/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,115 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=R0201
+
+from __future__ import print_function
+
+import logging
+import sys
+
+from devil.android import device_errors
+from devil.android.valgrind_tools import base_tool
+
+
+def SetChromeTimeoutScale(device, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    device.RemovePath(path, force=True, as_root=True)
+  else:
+    device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+
+class AddressSanitizerTool(base_tool.BaseTool):
+  """AddressSanitizer tool."""
+
+  WRAPPER_NAME = '/system/bin/asanwrapper'
+  # Disable memcmp overlap check.There are blobs (gl drivers)
+  # on some android devices that use memcmp on overlapping regions,
+  # nothing we can do about that.
+  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+  def __init__(self, device):
+    super(AddressSanitizerTool, self).__init__()
+    self._device = device
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies ASan tools to the device."""
+    del device
+
+  def GetTestWrapper(self):
+    return AddressSanitizerTool.WRAPPER_NAME
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Try to set the timeout scale anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+TOOL_REGISTRY = {
+    'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return base_tool.BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(device)
+  else:
+    print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join(
+        sorted(TOOL_REGISTRY.keys()))))
+    sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+  """Pushes the files required for |tool_name| to |device|.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  """
+  if not tool_name:
+    return
+
+  clazz = TOOL_REGISTRY.get(tool_name)
+  if clazz:
+    clazz.CopyFiles(device)
+  else:
+    print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join(
+        sorted(TOOL_REGISTRY.keys()))))
+    sys.exit(1)
diff --git a/src/build/android/pylintrc b/src/build/android/pylintrc
new file mode 100644
index 0000000..2a721bf
--- /dev/null
+++ b/src/build/android/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/src/build/android/resource_sizes.gni b/src/build/android/resource_sizes.gni
new file mode 100644
index 0000000..2c91749
--- /dev/null
+++ b/src/build/android/resource_sizes.gni
@@ -0,0 +1,100 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/internal_rules.gni")
+
+# Generates a script in the bin directory that runs
+# //build/android/resource_sizes.py against the provided apk.
+#
+# Only one of apk_name or file_path should be provided.
+#
+# Variables:
+#   apk_name: The name of the apk, without the extension.
+#   file_path: The path to the apk or .minimal.apks.
+#   trichrome_chrome_path: The path to chrome apk or .minimal.apks.
+#   trichrome_webview_path: The path to webview apk or .minimal.apks.
+#   trichrome_library_path: The path to library apk or .minimal.apks.
+template("android_resource_sizes_test") {
+  generate_android_wrapper(target_name) {
+    forward_variables_from(invoker, [ "data_deps" ])
+    executable = "//build/android/resource_sizes.py"
+    wrapper_script = "$root_out_dir/bin/run_${target_name}"
+
+    assert(defined(invoker.apk_name) != defined(invoker.file_path),
+           "Exactly one of apk_name or file_path should be provided.")
+
+    deps = [ "//build/android:resource_sizes_py" ]
+    executable_args = [
+      "--output-format",
+      "histograms",
+      "--chromium-output-directory",
+      "@WrappedPath(.)",
+    ]
+
+    data = [
+      "//.vpython",
+      "//.vpython3",
+    ]
+    if (defined(invoker.trichrome_chrome_path)) {
+      data += [
+        invoker.trichrome_chrome_path,
+        invoker.trichrome_webview_path,
+        invoker.trichrome_library_path,
+      ]
+      _rebased_chrome =
+          rebase_path(invoker.trichrome_chrome_path, root_build_dir)
+      _rebased_webview =
+          rebase_path(invoker.trichrome_webview_path, root_build_dir)
+      _rebased_library =
+          rebase_path(invoker.trichrome_library_path, root_build_dir)
+
+      # apk_name used only as test suite name. Not a path in this case.
+      executable_args += [
+        "--trichrome-chrome",
+        "@WrappedPath(${_rebased_chrome})",
+        "--trichrome-webview",
+        "@WrappedPath(${_rebased_webview})",
+        "--trichrome-library",
+        "@WrappedPath(${_rebased_library})",
+        "${invoker.apk_name}",
+      ]
+    } else {
+      if (defined(invoker.apk_name)) {
+        _file_path = "$root_out_dir/apks/${invoker.apk_name}.apk"
+        data += [ "$root_out_dir/arsc/apks/${invoker.apk_name}.ap_" ]
+      } else if (defined(invoker.file_path)) {
+        _file_path = invoker.file_path
+      }
+      data += [ _file_path ]
+      _rebased_file_path = rebase_path(_file_path, root_build_dir)
+      executable_args += [ "@WrappedPath(${_rebased_file_path})" ]
+    }
+  }
+}
+
+# Generates a "size config JSON file" to specify data to be passed from recipes
+# to Python scripts for binary size measurement on bots. All filenames are
+# relative to $root_build_dir. The resulting JSON file is written to
+# "$root_build_dir/config/${invoker.name}_size_config.json".
+#
+# Variables:
+#   name: The name of the path to the generated size config JSON file.
+#   mapping_files: List of mapping files.
+#   to_resource_sizes_py: Scope containing data to pass to resource_sizes.py,
+#     processed by generate_commit_size_analysis.py.
+#   supersize_input_file: Main input for SuperSize.
+template("android_size_bot_config") {
+  _full_target_name = get_label_info(target_name, "label_no_toolchain")
+  _out_json = {
+    _HEADER = "Written by build target '${_full_target_name}'"
+    forward_variables_from(invoker,
+                           [
+                             "mapping_files",
+                             "to_resource_sizes_py",
+                             "supersize_input_file",
+                           ])
+  }
+  _output_json_path = "$root_build_dir/config/${invoker.name}_size_config.json"
+  write_file(_output_json_path, _out_json, "json")
+}
diff --git a/src/build/android/resource_sizes.py b/src/build/android/resource_sizes.py
new file mode 100755
index 0000000..c592970
--- /dev/null
+++ b/src/build/android/resource_sizes.py
@@ -0,0 +1,910 @@
+#!/usr/bin/env vpython
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Reports binary size metrics for an APK.
+
+More information at //docs/speed/binary_size/metrics.md.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+from contextlib import contextmanager
+import json
+import logging
+import os
+import posixpath
+import re
+import struct
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import devil_chromium
+from devil.android.sdk import build_tools
+from devil.utils import cmd_helper
+from devil.utils import lazy
+import method_count
+from pylib import constants
+from pylib.constants import host_paths
+
+_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
+_BUILD_UTILS_PATH = os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+
+with host_paths.SysPath(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build')):
+  import gn_helpers  # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+  import perf_tests_results_helper  # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+  from tracing.value import convert_chart_json  # pylint: disable=import-error
+
+with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+  from util import build_utils  # pylint: disable=import-error
+  from util import zipalign  # pylint: disable=import-error
+
+
+zipalign.ApplyZipFileZipAlignFix()
+
+# Captures an entire config from aapt output.
+_AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:'
+# Matches string resource entries from aapt output.
+_AAPT_ENTRY_RE = re.compile(
+    r'resource (?P<id>\w{10}) [\w\.]+:string/.*?"(?P<val>.+?)"', re.DOTALL)
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes',
+    'benchmark_description': 'APK resource size information.',
+    'trace_rerun_options': [],
+    'charts': {}
+}
+# Macro definitions look like (something, 123) when
+# enable_resource_allowlist_generation=true.
+_RC_HEADER_RE = re.compile(r'^#define (?P<name>\w+).* (?P<id>\d+)\)?$')
+_RE_NON_LANGUAGE_PAK = re.compile(r'^assets/.*(resources|percent)\.pak$')
+_READELF_SIZES_METRICS = {
+    'text': ['.text'],
+    'data': ['.data', '.rodata', '.data.rel.ro', '.data.rel.ro.local'],
+    'relocations': ['.rel.dyn', '.rel.plt', '.rela.dyn', '.rela.plt'],
+    'unwind': [
+        '.ARM.extab', '.ARM.exidx', '.eh_frame', '.eh_frame_hdr',
+        '.ARM.exidxsentinel_section_after_text'
+    ],
+    'symbols': [
+        '.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
+        '.got.plt', '.hash', '.gnu.hash'
+    ],
+    'other': [
+        '.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
+        '.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
+        '.ARM.attributes', '.note.gnu.build-id', '.gnu.version',
+        '.gnu.version_d', '.gnu.version_r', '.interp', '.gcc_except_table'
+    ]
+}
+
+
+class _AccumulatingReporter(object):
+  def __init__(self):
+    self._combined_metrics = collections.defaultdict(int)
+
+  def __call__(self, graph_title, trace_title, value, units):
+    self._combined_metrics[(graph_title, trace_title, units)] += value
+
+  def DumpReports(self, report_func):
+    for (graph_title, trace_title,
+         units), value in sorted(self._combined_metrics.iteritems()):
+      report_func(graph_title, trace_title, value, units)
+
+
+class _ChartJsonReporter(_AccumulatingReporter):
+  def __init__(self, chartjson):
+    super(_ChartJsonReporter, self).__init__()
+    self._chartjson = chartjson
+    self.trace_title_prefix = ''
+
+  def __call__(self, graph_title, trace_title, value, units):
+    super(_ChartJsonReporter, self).__call__(graph_title, trace_title, value,
+                                             units)
+
+    perf_tests_results_helper.ReportPerfResult(
+        self._chartjson, graph_title, self.trace_title_prefix + trace_title,
+        value, units)
+
+  def SynthesizeTotals(self, unique_method_count):
+    for tup, value in sorted(self._combined_metrics.iteritems()):
+      graph_title, trace_title, units = tup
+      if trace_title == 'unique methods':
+        value = unique_method_count
+      perf_tests_results_helper.ReportPerfResult(self._chartjson, graph_title,
+                                                 'Combined_' + trace_title,
+                                                 value, units)
+
+
+def _PercentageDifference(a, b):
+  if a == 0:
+    return 0
+  return float(b - a) / a
+
+
+def _ReadZipInfoExtraFieldLength(zip_file, zip_info):
+  """Reads the value of |extraLength| from |zip_info|'s local file header.
+
+  |zip_info| has an |extra| field, but it's read from the central directory.
+  Android's zipalign tool sets the extra field only in local file headers.
+  """
+  # Refer to https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
+  zip_file.fp.seek(zip_info.header_offset + 28)
+  return struct.unpack('<H', zip_file.fp.read(2))[0]
+
+
+def _MeasureApkSignatureBlock(zip_file):
+  """Measures the size of the v2 / v3 signing block.
+
+  Refer to: https://source.android.com/security/apksigning/v2
+  """
+  # Seek to "end of central directory" struct.
+  eocd_offset_from_end = -22 - len(zip_file.comment)
+  zip_file.fp.seek(eocd_offset_from_end, os.SEEK_END)
+  assert zip_file.fp.read(4) == b'PK\005\006', (
+      'failed to find end-of-central-directory')
+
+  # Read out the "start of central directory" offset.
+  zip_file.fp.seek(eocd_offset_from_end + 16, os.SEEK_END)
+  start_of_central_directory = struct.unpack('<I', zip_file.fp.read(4))[0]
+
+  # Compute the offset after the last zip entry.
+  last_info = max(zip_file.infolist(), key=lambda i: i.header_offset)
+  last_header_size = (30 + len(last_info.filename) +
+                      _ReadZipInfoExtraFieldLength(zip_file, last_info))
+  end_of_last_file = (last_info.header_offset + last_header_size +
+                      last_info.compress_size)
+  return start_of_central_directory - end_of_last_file
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+  return cmd_helper.GetCmdOutput(
+      [tool_prefix + 'readelf'] + options + [so_path])
+
+
+def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
+  with Unzip(apk_path, filename=lib_path) as extracted_lib_path:
+    grouped_section_sizes = collections.defaultdict(int)
+    no_bits_section_sizes, section_sizes = _CreateSectionNameSizeMap(
+        extracted_lib_path, tool_prefix)
+    for group_name, section_names in _READELF_SIZES_METRICS.iteritems():
+      for section_name in section_names:
+        if section_name in section_sizes:
+          grouped_section_sizes[group_name] += section_sizes.pop(section_name)
+
+    # Consider all NOBITS sections as .bss.
+    grouped_section_sizes['bss'] = sum(
+        v for v in no_bits_section_sizes.itervalues())
+
+    # Group any unknown section headers into the "other" group.
+    for section_header, section_size in section_sizes.iteritems():
+      sys.stderr.write('Unknown elf section header: %s\n' % section_header)
+      grouped_section_sizes['other'] += section_size
+
+    return grouped_section_sizes
+
+
+def _CreateSectionNameSizeMap(so_path, tool_prefix):
+  stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix)
+  section_sizes = {}
+  no_bits_section_sizes = {}
+  # Matches  [ 2] .hash HASH 00000000006681f0 0001f0 003154 04   A  3   0  8
+  for match in re.finditer(r'\[[\s\d]+\] (\..*)$', stdout, re.MULTILINE):
+    items = match.group(1).split()
+    target = no_bits_section_sizes if items[1] == 'NOBITS' else section_sizes
+    target[items[0]] = int(items[4], 16)
+
+  return no_bits_section_sizes, section_sizes
+
+
+def _ParseManifestAttributes(apk_path):
+  # Check if the manifest specifies whether or not to extract native libs.
+  output = cmd_helper.GetCmdOutput([
+      _AAPT_PATH.read(), 'd', 'xmltree', apk_path, 'AndroidManifest.xml'])
+
+  def parse_attr(name):
+    # android:extractNativeLibs(0x010104ea)=(type 0x12)0x0
+    # android:extractNativeLibs(0x010104ea)=(type 0x12)0xffffffff
+    # dist:onDemand=(type 0x12)0xffffffff
+    m = re.search(name + r'(?:\(.*?\))?=\(type .*?\)(\w+)', output)
+    return m and int(m.group(1), 16)
+
+  skip_extract_lib = bool(parse_attr('android:extractNativeLibs'))
+  sdk_version = parse_attr('android:minSdkVersion')
+  is_feature_split = parse_attr('android:isFeatureSplit')
+  # Can use <dist:on-demand>, or <module dist:onDemand="true">.
+  on_demand = parse_attr('dist:onDemand') or 'dist:on-demand' in output
+  on_demand = bool(on_demand and is_feature_split)
+
+  return sdk_version, skip_extract_lib, on_demand
+
+
+def _NormalizeLanguagePaks(translations, factor):
+  english_pak = translations.FindByPattern(r'.*/en[-_][Uu][Ss]\.l?pak')
+  num_translations = translations.GetNumEntries()
+  ret = 0
+  if english_pak:
+    ret -= translations.ComputeZippedSize()
+    ret += int(english_pak.compress_size * num_translations * factor)
+  return ret
+
+
+def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations,
+                            out_dir):
+  """Estimates the expected overhead of untranslated strings in resources.arsc.
+
+  See http://crbug.com/677966 for why this is necessary.
+  """
+  # If there are multiple .arsc files, use the resource packaged APK instead.
+  if num_arsc_files > 1:
+    if not out_dir:
+      return -float('inf')
+    ap_name = os.path.basename(apk_path).replace('.apk', '.ap_')
+    ap_path = os.path.join(out_dir, 'arsc/apks', ap_name)
+    if not os.path.exists(ap_path):
+      raise Exception('Missing expected file: %s, try rebuilding.' % ap_path)
+    apk_path = ap_path
+
+  aapt_output = _RunAaptDumpResources(apk_path)
+  # en-rUS is in the default config and may be cluttered with non-translatable
+  # strings, so en-rGB is a better baseline for finding missing translations.
+  en_strings = _CreateResourceIdValueMap(aapt_output, 'en-rGB')
+  fr_strings = _CreateResourceIdValueMap(aapt_output, 'fr')
+
+  # en-US and en-GB will never be translated.
+  config_count = num_translations - 2
+
+  size = 0
+  for res_id, string_val in en_strings.iteritems():
+    if string_val == fr_strings[res_id]:
+      string_size = len(string_val)
+      # 7 bytes is the per-entry overhead (not specific to any string). See
+      # https://android.googlesource.com/platform/frameworks/base.git/+/android-4.2.2_r1/tools/aapt/StringPool.cpp#414.
+      # The 1.5 factor was determined experimentally and is meant to account for
+      # other languages generally having longer strings than english.
+      size += config_count * (7 + string_size * 1.5)
+
+  return int(size)
+
+
+def _CreateResourceIdValueMap(aapt_output, lang):
+  """Return a map of resource ids to string values for the given |lang|."""
+  config_re = _AAPT_CONFIG_PATTERN % lang
+  return {entry.group('id'): entry.group('val')
+          for config_section in re.finditer(config_re, aapt_output, re.DOTALL)
+          for entry in re.finditer(_AAPT_ENTRY_RE, config_section.group(0))}
+
+
+def _RunAaptDumpResources(apk_path):
+  cmd = [_AAPT_PATH.read(), 'dump', '--values', 'resources', apk_path]
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running aapt command: "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+
+class _FileGroup(object):
+  """Represents a category that apk files can fall into."""
+
+  def __init__(self, name):
+    self.name = name
+    self._zip_infos = []
+    self._extracted_multipliers = []
+
+  def AddZipInfo(self, zip_info, extracted_multiplier=0):
+    self._zip_infos.append(zip_info)
+    self._extracted_multipliers.append(extracted_multiplier)
+
+  def AllEntries(self):
+    return iter(self._zip_infos)
+
+  def GetNumEntries(self):
+    return len(self._zip_infos)
+
+  def FindByPattern(self, pattern):
+    return next((i for i in self._zip_infos if re.match(pattern, i.filename)),
+                None)
+
+  def FindLargest(self):
+    if not self._zip_infos:
+      return None
+    return max(self._zip_infos, key=lambda i: i.file_size)
+
+  def ComputeZippedSize(self):
+    return sum(i.compress_size for i in self._zip_infos)
+
+  def ComputeUncompressedSize(self):
+    return sum(i.file_size for i in self._zip_infos)
+
+  def ComputeExtractedSize(self):
+    ret = 0
+    for zi, multiplier in zip(self._zip_infos, self._extracted_multipliers):
+      ret += zi.file_size * multiplier
+    return ret
+
+  def ComputeInstallSize(self):
+    return self.ComputeExtractedSize() + self.ComputeZippedSize()
+
+
+def _AnalyzeInternal(apk_path,
+                     sdk_version,
+                     report_func,
+                     dex_stats_collector,
+                     out_dir,
+                     tool_prefix,
+                     apks_path=None,
+                     split_name=None):
+  """Analyse APK to determine size contributions of different file classes.
+
+  Returns: Normalized APK size.
+  """
+  dex_stats_collector.CollectFromZip(split_name or '', apk_path)
+  file_groups = []
+
+  def make_group(name):
+    group = _FileGroup(name)
+    file_groups.append(group)
+    return group
+
+  def has_no_extension(filename):
+    return os.path.splitext(filename)[1] == ''
+
+  native_code = make_group('Native code')
+  java_code = make_group('Java code')
+  native_resources_no_translations = make_group('Native resources (no l10n)')
+  translations = make_group('Native resources (l10n)')
+  stored_translations = make_group('Native resources stored (l10n)')
+  icu_data = make_group('ICU (i18n library) data')
+  v8_snapshots = make_group('V8 Snapshots')
+  png_drawables = make_group('PNG drawables')
+  res_directory = make_group('Non-compiled Android resources')
+  arsc = make_group('Compiled Android resources')
+  metadata = make_group('Package metadata')
+  unknown = make_group('Unknown files')
+  notices = make_group('licenses.notice file')
+  unwind_cfi = make_group('unwind_cfi (dev and canary only)')
+
+  with zipfile.ZipFile(apk_path, 'r') as apk:
+    apk_contents = apk.infolist()
+    # Account for zipalign overhead that exists in local file header.
+    zipalign_overhead = sum(
+        _ReadZipInfoExtraFieldLength(apk, i) for i in apk_contents)
+    # Account for zipalign overhead that exists in central directory header.
+    # Happens when python aligns entries in apkbuilder.py, but does not
+    # exist when using Android's zipalign. E.g. for bundle .apks files.
+    zipalign_overhead += sum(len(i.extra) for i in apk_contents)
+    signing_block_size = _MeasureApkSignatureBlock(apk)
+
+  _, skip_extract_lib, _ = _ParseManifestAttributes(apk_path)
+
+  # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
+  # L, M: ART - .odex file is compiled version of the dex file (~4x).
+  # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
+  #    shared apps (~4x).
+  # Actual multipliers calculated using "apk_operations.py disk-usage".
+  # Will need to update multipliers once apk obfuscation is enabled.
+  # E.g. with obfuscation, the 4.04 changes to 4.46.
+  speed_profile_dex_multiplier = 1.17
+  orig_filename = apks_path or apk_path
+  is_webview = 'WebView' in orig_filename
+  is_monochrome = 'Monochrome' in orig_filename
+  is_library = 'Library' in orig_filename
+  is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview
+                                         or is_library)
+  # Dex decompression overhead varies by Android version.
+  if sdk_version < 21:
+    # JellyBean & KitKat
+    dex_multiplier = 1.16
+  elif sdk_version < 24:
+    # Lollipop & Marshmallow
+    dex_multiplier = 4.04
+  elif is_shared_apk:
+    # Oreo and above, compilation_filter=speed
+    dex_multiplier = 4.04
+  else:
+    # Oreo and above, compilation_filter=speed-profile
+    dex_multiplier = speed_profile_dex_multiplier
+
+  total_apk_size = os.path.getsize(apk_path)
+  for member in apk_contents:
+    filename = member.filename
+    if filename.endswith('/'):
+      continue
+    if filename.endswith('.so'):
+      basename = posixpath.basename(filename)
+      should_extract_lib = not skip_extract_lib and basename.startswith('lib')
+      native_code.AddZipInfo(
+          member, extracted_multiplier=int(should_extract_lib))
+    elif filename.endswith('.dex'):
+      java_code.AddZipInfo(member, extracted_multiplier=dex_multiplier)
+    elif re.search(_RE_NON_LANGUAGE_PAK, filename):
+      native_resources_no_translations.AddZipInfo(member)
+    elif filename.endswith('.pak') or filename.endswith('.lpak'):
+      compressed = member.compress_type != zipfile.ZIP_STORED
+      bucket = translations if compressed else stored_translations
+      extracted_multiplier = 0
+      if compressed:
+        extracted_multiplier = int('en_' in filename or 'en-' in filename)
+      bucket.AddZipInfo(member, extracted_multiplier=extracted_multiplier)
+    elif 'icu' in filename and filename.endswith('.dat'):
+      icu_data.AddZipInfo(member)
+    elif filename.endswith('.bin'):
+      v8_snapshots.AddZipInfo(member)
+    elif filename.startswith('res/'):
+      if (filename.endswith('.png') or filename.endswith('.webp')
+          or has_no_extension(filename)):
+        png_drawables.AddZipInfo(member)
+      else:
+        res_directory.AddZipInfo(member)
+    elif filename.endswith('.arsc'):
+      arsc.AddZipInfo(member)
+    elif filename.startswith('META-INF') or filename in (
+        'AndroidManifest.xml', 'assets/webapk_dex_version.txt'):
+      metadata.AddZipInfo(member)
+    elif filename.endswith('.notice'):
+      notices.AddZipInfo(member)
+    elif filename.startswith('assets/unwind_cfi'):
+      unwind_cfi.AddZipInfo(member)
+    else:
+      unknown.AddZipInfo(member)
+
+  if apks_path:
+    # We're mostly focused on size of Chrome for non-English locales, so assume
+    # Hindi (arbitrarily chosen) locale split is installed.
+    with zipfile.ZipFile(apks_path) as z:
+      subpath = 'splits/{}-hi.apk'.format(split_name)
+      if subpath in z.namelist():
+        hindi_apk_info = z.getinfo(subpath)
+        total_apk_size += hindi_apk_info.file_size
+      else:
+        assert split_name != 'base', 'splits/base-hi.apk should always exist'
+
+  total_install_size = total_apk_size
+  total_install_size_android_go = total_apk_size
+  zip_overhead = total_apk_size
+
+  for group in file_groups:
+    actual_size = group.ComputeZippedSize()
+    install_size = group.ComputeInstallSize()
+    uncompressed_size = group.ComputeUncompressedSize()
+    extracted_size = group.ComputeExtractedSize()
+    total_install_size += extracted_size
+    zip_overhead -= actual_size
+
+    report_func('Breakdown', group.name + ' size', actual_size, 'bytes')
+    report_func('InstallBreakdown', group.name + ' size', int(install_size),
+                'bytes')
+    # Only a few metrics are compressed in the first place.
+    # To avoid over-reporting, track uncompressed size only for compressed
+    # entries.
+    if uncompressed_size != actual_size:
+      report_func('Uncompressed', group.name + ' size', uncompressed_size,
+                  'bytes')
+
+    if group is java_code and is_shared_apk:
+      # Updates are compiled using quicken, but system image uses speed-profile.
+      extracted_size = int(uncompressed_size * speed_profile_dex_multiplier)
+      total_install_size_android_go += extracted_size
+      report_func('InstallBreakdownGo', group.name + ' size',
+                  actual_size + extracted_size, 'bytes')
+    elif group is translations and apks_path:
+      # Assume Hindi rather than English (accounted for above in total_apk_size)
+      total_install_size_android_go += actual_size
+    else:
+      total_install_size_android_go += extracted_size
+
+  # Per-file zip overhead is caused by:
+  # * 30 byte entry header + len(file name)
+  # * 46 byte central directory entry + len(file name)
+  # * 0-3 bytes for zipalign.
+  report_func('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
+  report_func('InstallSize', 'APK size', total_apk_size, 'bytes')
+  report_func('InstallSize', 'Estimated installed size',
+              int(total_install_size), 'bytes')
+  if is_shared_apk:
+    report_func('InstallSize', 'Estimated installed size (Android Go)',
+                int(total_install_size_android_go), 'bytes')
+  transfer_size = _CalculateCompressedSize(apk_path)
+  report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
+
+  # Size of main dex vs remaining.
+  main_dex_info = java_code.FindByPattern('classes.dex')
+  if main_dex_info:
+    main_dex_size = main_dex_info.file_size
+    report_func('Specifics', 'main dex size', main_dex_size, 'bytes')
+    secondary_size = java_code.ComputeUncompressedSize() - main_dex_size
+    report_func('Specifics', 'secondary dex size', secondary_size, 'bytes')
+
+  main_lib_info = native_code.FindLargest()
+  native_code_unaligned_size = 0
+  for lib_info in native_code.AllEntries():
+    section_sizes = _ExtractLibSectionSizesFromApk(apk_path, lib_info.filename,
+                                                   tool_prefix)
+    native_code_unaligned_size += sum(
+        v for k, v in section_sizes.iteritems() if k != 'bss')
+    # Size of main .so vs remaining.
+    if lib_info == main_lib_info:
+      main_lib_size = lib_info.file_size
+      report_func('Specifics', 'main lib size', main_lib_size, 'bytes')
+      secondary_size = native_code.ComputeUncompressedSize() - main_lib_size
+      report_func('Specifics', 'other lib size', secondary_size, 'bytes')
+
+      for metric_name, size in section_sizes.iteritems():
+        report_func('MainLibInfo', metric_name, size, 'bytes')
+
+  # Main metric that we want to monitor for jumps.
+  normalized_apk_size = total_apk_size
+  # unwind_cfi exists only in dev, canary, and non-channel builds.
+  normalized_apk_size -= unwind_cfi.ComputeZippedSize()
+  # Sections within .so files get 4kb aligned, so use section sizes rather than
+  # file size. Also gets rid of compression.
+  normalized_apk_size -= native_code.ComputeZippedSize()
+  normalized_apk_size += native_code_unaligned_size
+  # Normalized dex size: Size within the zip + size on disk for Android Go
+  # devices running Android O (which ~= uncompressed dex size).
+  # Use a constant compression factor to account for fluctuations.
+  normalized_apk_size -= java_code.ComputeZippedSize()
+  normalized_apk_size += java_code.ComputeUncompressedSize()
+  # Don't include zipalign overhead in normalized size, since it effectively
+  # causes size changes files that proceed aligned files to be rounded.
+  # For APKs where classes.dex directly proceeds libchrome.so (the normal case),
+  # this causes small dex size changes to disappear into libchrome.so alignment.
+  normalized_apk_size -= zipalign_overhead
+  # Don't include the size of the apk's signing block because it can fluctuate
+  # by up to 4kb (from my non-scientific observations), presumably based on hash
+  # sizes.
+  normalized_apk_size -= signing_block_size
+
+  # Unaligned size should be ~= uncompressed size or something is wrong.
+  # As of now, padding_fraction ~= .007
+  padding_fraction = -_PercentageDifference(
+      native_code.ComputeUncompressedSize(), native_code_unaligned_size)
+  # Ignore this check for small / no native code
+  if native_code.ComputeUncompressedSize() > 1000000:
+    assert 0 <= padding_fraction < .02, (
+        'Padding was: {} (file_size={}, sections_sum={})'.format(
+            padding_fraction, native_code.ComputeUncompressedSize(),
+            native_code_unaligned_size))
+
+  if apks_path:
+    # Locale normalization not needed when measuring only one locale.
+    # E.g. a change that adds 300 chars of unstranslated strings would cause the
+    # metric to be off by only 390 bytes (assuming a multiplier of 2.3 for
+    # Hindi).
+    pass
+  else:
+    # Avoid noise caused when strings change and translations haven't yet been
+    # updated.
+    num_translations = translations.GetNumEntries()
+    num_stored_translations = stored_translations.GetNumEntries()
+
+    if num_translations > 1:
+      # Multipliers found by looking at MonochromePublic.apk and seeing how much
+      # smaller en-US.pak is relative to the average locale.pak.
+      normalized_apk_size += _NormalizeLanguagePaks(translations, 1.17)
+    if num_stored_translations > 1:
+      normalized_apk_size += _NormalizeLanguagePaks(stored_translations, 1.43)
+    if num_translations + num_stored_translations > 1:
+      if num_translations == 0:
+        # WebView stores all locale paks uncompressed.
+        num_arsc_translations = num_stored_translations
+      else:
+        # Monochrome has more configurations than Chrome since it includes
+        # WebView (which supports more locales), but these should mostly be
+        # empty so ignore them here.
+        num_arsc_translations = num_translations
+      normalized_apk_size += _NormalizeResourcesArsc(apk_path,
+                                                     arsc.GetNumEntries(),
+                                                     num_arsc_translations,
+                                                     out_dir)
+
+  # It will be -Inf for .apk files with multiple .arsc files and no out_dir set.
+  if normalized_apk_size < 0:
+    sys.stderr.write('Skipping normalized_apk_size (no output directory set)\n')
+  else:
+    report_func('Specifics', 'normalized apk size', normalized_apk_size,
+                'bytes')
+  # The "file count" metric cannot be grouped with any other metrics when the
+  # end result is going to be uploaded to the perf dashboard in the HistogramSet
+  # format due to mixed units (bytes vs. zip entries) causing malformed
+  # summaries to be generated.
+  # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
+  # ever supported.
+  report_func('FileCount', 'file count', len(apk_contents), 'zip entries')
+
+  for info in unknown.AllEntries():
+    sys.stderr.write(
+        'Unknown entry: %s %d\n' % (info.filename, info.compress_size))
+  return normalized_apk_size
+
+
+def _CalculateCompressedSize(file_path):
+  CHUNK_SIZE = 256 * 1024
+  compressor = zlib.compressobj()
+  total_size = 0
+  with open(file_path, 'rb') as f:
+    for chunk in iter(lambda: f.read(CHUNK_SIZE), ''):
+      total_size += len(compressor.compress(chunk))
+  total_size += len(compressor.flush())
+  return total_size
+
+
+@contextmanager
+def Unzip(zip_file, filename=None):
+  """Utility for temporary use of a single file in a zip archive."""
+  with build_utils.TempDir() as unzipped_dir:
+    unzipped_files = build_utils.ExtractAll(
+        zip_file, unzipped_dir, True, pattern=filename)
+    if len(unzipped_files) == 0:
+      raise Exception(
+          '%s not found in %s' % (filename, zip_file))
+    yield unzipped_files[0]
+
+
+def _ConfigOutDirAndToolsPrefix(out_dir):
+  if out_dir:
+    constants.SetOutputDirectory(out_dir)
+  else:
+    try:
+      # Triggers auto-detection when CWD == output directory.
+      constants.CheckOutputDirectory()
+      out_dir = constants.GetOutDirectory()
+    except Exception:  # pylint: disable=broad-except
+      return out_dir, ''
+  build_vars = gn_helpers.ReadBuildVars(out_dir)
+  tool_prefix = os.path.join(out_dir, build_vars['android_tool_prefix'])
+  return out_dir, tool_prefix
+
+
+def _IterSplits(namelist):
+  for subpath in namelist:
+    # Looks for paths like splits/vr-master.apk, splits/vr-hi.apk.
+    name_parts = subpath.split('/')
+    if name_parts[0] == 'splits' and len(name_parts) == 2:
+      name_parts = name_parts[1].split('-')
+      if len(name_parts) == 2:
+        split_name, config_name = name_parts
+        if config_name == 'master.apk':
+          yield subpath, split_name
+
+
+def _ExtractToTempFile(zip_obj, subpath, temp_file):
+  temp_file.seek(0)
+  temp_file.truncate()
+  temp_file.write(zip_obj.read(subpath))
+  temp_file.flush()
+
+
+def _AnalyzeApkOrApks(report_func, apk_path, args):
+  # Create DexStatsCollector here to track unique methods across base & chrome
+  # modules.
+  dex_stats_collector = method_count.DexStatsCollector()
+  out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir)
+
+  if apk_path.endswith('.apk'):
+    sdk_version, _, _ = _ParseManifestAttributes(apk_path)
+    _AnalyzeInternal(apk_path, sdk_version, report_func, dex_stats_collector,
+                     out_dir, tool_prefix)
+  elif apk_path.endswith('.apks'):
+    with tempfile.NamedTemporaryFile(suffix='.apk') as f:
+      with zipfile.ZipFile(apk_path) as z:
+        # Currently bundletool is creating two apks when .apks is created
+        # without specifying an sdkVersion. Always measure the one with an
+        # uncompressed shared library.
+        try:
+          info = z.getinfo('splits/base-master_2.apk')
+        except KeyError:
+          info = z.getinfo('splits/base-master.apk')
+        _ExtractToTempFile(z, info.filename, f)
+        sdk_version, _, _ = _ParseManifestAttributes(f.name)
+
+        orig_report_func = report_func
+        report_func = _AccumulatingReporter()
+
+        def do_measure(split_name, on_demand):
+          logging.info('Measuring %s on_demand=%s', split_name, on_demand)
+          # Use no-op reporting functions to get normalized size for DFMs.
+          inner_report_func = report_func
+          inner_dex_stats_collector = dex_stats_collector
+          if on_demand:
+            inner_report_func = lambda *_: None
+            inner_dex_stats_collector = method_count.DexStatsCollector()
+
+          size = _AnalyzeInternal(f.name,
+                                  sdk_version,
+                                  inner_report_func,
+                                  inner_dex_stats_collector,
+                                  out_dir,
+                                  tool_prefix,
+                                  apks_path=apk_path,
+                                  split_name=split_name)
+          report_func('DFM_' + split_name, 'Size with hindi', size, 'bytes')
+
+        # Measure base outside of the loop since we've already extracted it.
+        do_measure('base', on_demand=False)
+
+        for subpath, split_name in _IterSplits(z.namelist()):
+          if split_name != 'base':
+            _ExtractToTempFile(z, subpath, f)
+            _, _, on_demand = _ParseManifestAttributes(f.name)
+            do_measure(split_name, on_demand=on_demand)
+
+        report_func.DumpReports(orig_report_func)
+        report_func = orig_report_func
+  else:
+    raise Exception('Unknown file type: ' + apk_path)
+
+  # Report dex stats outside of _AnalyzeInternal() so that the "unique methods"
+  # metric is not just the sum of the base and chrome modules.
+  for metric, count in dex_stats_collector.GetTotalCounts().items():
+    report_func('Dex', metric, count, 'entries')
+  report_func('Dex', 'unique methods',
+              dex_stats_collector.GetUniqueMethodCount(), 'entries')
+  report_func('DexCache', 'DexCache',
+              dex_stats_collector.GetDexCacheSize(pre_oreo=sdk_version < 26),
+              'bytes')
+
+  return dex_stats_collector
+
+
+def _ResourceSizes(args):
+  chartjson = _BASE_CHART.copy() if args.output_format else None
+  reporter = _ChartJsonReporter(chartjson)
+  # Create DexStatsCollector here to track unique methods across trichrome APKs.
+  dex_stats_collector = method_count.DexStatsCollector()
+
+  specs = [
+      ('Chrome_', args.trichrome_chrome),
+      ('WebView_', args.trichrome_webview),
+      ('Library_', args.trichrome_library),
+  ]
+  for prefix, path in specs:
+    if path:
+      reporter.trace_title_prefix = prefix
+      child_dex_stats_collector = _AnalyzeApkOrApks(reporter, path, args)
+      dex_stats_collector.MergeFrom(prefix, child_dex_stats_collector)
+
+  if any(path for _, path in specs):
+    reporter.SynthesizeTotals(dex_stats_collector.GetUniqueMethodCount())
+  else:
+    _AnalyzeApkOrApks(reporter, args.input, args)
+
+  if chartjson:
+    _DumpChartJson(args, chartjson)
+
+
+def _DumpChartJson(args, chartjson):
+  if args.output_file == '-':
+    json_file = sys.stdout
+  elif args.output_file:
+    json_file = open(args.output_file, 'w')
+  else:
+    results_path = os.path.join(args.output_dir, 'results-chart.json')
+    logging.critical('Dumping chartjson to %s', results_path)
+    json_file = open(results_path, 'w')
+
+  json.dump(chartjson, json_file, indent=2)
+
+  if json_file is not sys.stdout:
+    json_file.close()
+
+  # We would ideally generate a histogram set directly instead of generating
+  # chartjson then converting. However, perf_tests_results_helper is in
+  # //build, which doesn't seem to have any precedent for depending on
+  # anything in Catapult. This can probably be fixed, but since this doesn't
+  # need to be super fast or anything, converting is a good enough solution
+  # for the time being.
+  if args.output_format == 'histograms':
+    histogram_result = convert_chart_json.ConvertChartJson(results_path)
+    if histogram_result.returncode != 0:
+      raise Exception('chartjson conversion failed with error: ' +
+                      histogram_result.stdout)
+
+    histogram_path = os.path.join(args.output_dir, 'perf_results.json')
+    logging.critical('Dumping histograms to %s', histogram_path)
+    with open(histogram_path, 'w') as json_file:
+      json_file.write(histogram_result.stdout)
+
+
+def main():
+  argparser = argparse.ArgumentParser(description='Print APK size metrics.')
+  argparser.add_argument(
+      '--min-pak-resource-size',
+      type=int,
+      default=20 * 1024,
+      help='Minimum byte size of displayed pak resources.')
+  argparser.add_argument(
+      '--chromium-output-directory',
+      dest='out_dir',
+      type=os.path.realpath,
+      help='Location of the build artifacts.')
+  argparser.add_argument(
+      '--chartjson',
+      action='store_true',
+      help='DEPRECATED. Use --output-format=chartjson '
+      'instead.')
+  argparser.add_argument(
+      '--output-format',
+      choices=['chartjson', 'histograms'],
+      help='Output the results to a file in the given '
+      'format instead of printing the results.')
+  argparser.add_argument('--loadable_module', help='Obsolete (ignored).')
+
+  # Accepted to conform to the isolated script interface, but ignored.
+  argparser.add_argument(
+      '--isolated-script-test-filter', help=argparse.SUPPRESS)
+  argparser.add_argument(
+      '--isolated-script-test-perf-output',
+      type=os.path.realpath,
+      help=argparse.SUPPRESS)
+
+  output_group = argparser.add_mutually_exclusive_group()
+
+  output_group.add_argument(
+      '--output-dir', default='.', help='Directory to save chartjson to.')
+  output_group.add_argument(
+      '--output-file',
+      help='Path to output .json (replaces --output-dir). Works only for '
+      '--output-format=chartjson')
+  output_group.add_argument(
+      '--isolated-script-test-output',
+      type=os.path.realpath,
+      help='File to which results will be written in the '
+      'simplified JSON output format.')
+
+  argparser.add_argument('input', help='Path to .apk or .apks file to measure.')
+  trichrome_group = argparser.add_argument_group(
+      'Trichrome inputs',
+      description='When specified, |input| is used only as Test suite name.')
+  trichrome_group.add_argument(
+      '--trichrome-chrome', help='Path to Trichrome Chrome .apks')
+  trichrome_group.add_argument(
+      '--trichrome-webview', help='Path to Trichrome WebView .apk(s)')
+  trichrome_group.add_argument(
+      '--trichrome-library', help='Path to Trichrome Library .apk')
+  args = argparser.parse_args()
+
+  devil_chromium.Initialize(output_directory=args.out_dir)
+
+  # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
+  if args.chartjson:
+    args.output_format = 'chartjson'
+
+  isolated_script_output = {'valid': False, 'failures': []}
+
+  test_name = 'resource_sizes (%s)' % os.path.basename(args.input)
+
+  if args.isolated_script_test_output:
+    args.output_dir = os.path.join(
+        os.path.dirname(args.isolated_script_test_output), test_name)
+    if not os.path.exists(args.output_dir):
+      os.makedirs(args.output_dir)
+
+  try:
+    _ResourceSizes(args)
+    isolated_script_output = {
+        'valid': True,
+        'failures': [],
+    }
+  finally:
+    if args.isolated_script_test_output:
+      results_path = os.path.join(args.output_dir, 'test_results.json')
+      with open(results_path, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+      with open(args.isolated_script_test_output, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/resource_sizes.pydeps b/src/build/android/resource_sizes.pydeps
new file mode 100644
index 0000000..d956f5b
--- /dev/null
+++ b/src/build/android/resource_sizes.pydeps
@@ -0,0 +1,58 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/resource_sizes.pydeps build/android/resource_sizes.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/six/six.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
+../gn_helpers.py
+../util/lib/common/perf_result_data_type.py
+../util/lib/common/perf_tests_results_helper.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/zipalign.py
+method_count.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/dex/__init__.py
+pylib/dex/dex_parser.py
+resource_sizes.py
diff --git a/src/build/android/screenshot.py b/src/build/android/screenshot.py
new file mode 100755
index 0000000..523d859
--- /dev/null
+++ b/src/build/android/screenshot.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import screenshot
+
+if __name__ == '__main__':
+  devil_chromium.Initialize()
+  sys.exit(screenshot.main())
diff --git a/src/build/android/stacktrace/BUILD.gn b/src/build/android/stacktrace/BUILD.gn
new file mode 100644
index 0000000..ce13a15
--- /dev/null
+++ b/src/build/android/stacktrace/BUILD.gn
@@ -0,0 +1,28 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_library("java_deobfuscate_java") {
+  sources = [ "java/org/chromium/build/FlushingReTrace.java" ]
+
+  # Avoid using java_prebuilt() to ensure all uses go through the checked-in
+  # wrapper script.
+  input_jars_paths = [
+    "//third_party/proguard/lib/proguard603.jar",
+    "//third_party/proguard/lib/retrace603.jar",
+  ]
+}
+
+# Use the checked-in copy of the wrapper script & .jar rather than the built
+# one to simplify usage of the tool.
+group("java_deobfuscate") {
+  data = [
+    "java_deobfuscate.py",
+    "java_deobfuscate.jar",
+    "//third_party/proguard/lib/proguard603.jar",
+    "//third_party/proguard/lib/retrace603.jar",
+  ]
+  deps = [ "//third_party/jdk:java_data" ]
+}
diff --git a/src/build/android/stacktrace/README.md b/src/build/android/stacktrace/README.md
new file mode 100644
index 0000000..58ea94b
--- /dev/null
+++ b/src/build/android/stacktrace/README.md
@@ -0,0 +1,28 @@
+# java_deobfuscate.py
+
+A wrapper around ProGuard's ReTrace tool, which:
+
+1) Updates the regular expression used to identify stack lines, and
+2) Streams its output.
+
+The second point here is what allows you to run:
+
+    adb logcat | build/android/stacktrace/java_deobfuscate.py out/Default/apks/ChromePublic.apk.mapping
+
+And have it actually show output without logcat terminating.
+
+
+## Update Instructions:
+
+    ninja -C out/Release java_deobfuscate
+    cp out/Release/lib.java/build/android/stacktrace/java_deobfuscate.jar build/android/stacktrace
+
+# stackwalker.py
+
+Extracts Breakpad microdumps from a log file and uses `stackwalker` to symbolize
+them.
+
+
+# crashpad_stackwalker.py
+
+Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
diff --git a/src/build/android/stacktrace/crashpad_stackwalker.py b/src/build/android/stacktrace/crashpad_stackwalker.py
new file mode 100755
index 0000000..9616a54
--- /dev/null
+++ b/src/build/android/stacktrace/crashpad_stackwalker.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
+# All the non-trivial operations are performed by generate_breakpad_symbols.py,
+# dump_syms, minidump_dump and minidump_stackwalk.
+
+import argparse
+import logging
+import os
+import posixpath
+import re
+import sys
+import shutil
+import subprocess
+import tempfile
+
+_BUILD_ANDROID_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(_BUILD_ANDROID_PATH)
+import devil_chromium
+from devil.android import device_utils
+from devil.utils import timeout_retry
+
+
+def _CreateSymbolsDir(build_path, dynamic_library_names):
+  generator = os.path.normpath(
+      os.path.join(_BUILD_ANDROID_PATH, '..', '..', 'components', 'crash',
+                   'content', 'tools', 'generate_breakpad_symbols.py'))
+  syms_dir = os.path.join(build_path, 'crashpad_syms')
+  shutil.rmtree(syms_dir, ignore_errors=True)
+  os.mkdir(syms_dir)
+  for lib in dynamic_library_names:
+    unstripped_library_path = os.path.join(build_path, 'lib.unstripped', lib)
+    if not os.path.exists(unstripped_library_path):
+      continue
+    logging.info('Generating symbols for: %s', unstripped_library_path)
+    cmd = [
+        generator,
+        '--symbols-dir',
+        syms_dir,
+        '--build-dir',
+        build_path,
+        '--binary',
+        unstripped_library_path,
+        '--platform',
+        'android',
+    ]
+    return_code = subprocess.call(cmd)
+    if return_code != 0:
+      logging.error('Could not extract symbols, command failed: %s',
+                    ' '.join(cmd))
+  return syms_dir
+
+
+def _ChooseLatestCrashpadDump(device, crashpad_dump_path):
+  if not device.PathExists(crashpad_dump_path):
+    logging.warning('Crashpad dump directory does not exist: %s',
+                    crashpad_dump_path)
+    return None
+  latest = None
+  latest_timestamp = 0
+  for crashpad_file in device.ListDirectory(crashpad_dump_path):
+    if crashpad_file.endswith('.dmp'):
+      stat = device.StatPath(posixpath.join(crashpad_dump_path, crashpad_file))
+      current_timestamp = stat['st_mtime']
+      if current_timestamp > latest_timestamp:
+        latest_timestamp = current_timestamp
+        latest = crashpad_file
+  return latest
+
+
+def _ExtractLibraryNamesFromDump(build_path, dump_path):
+  default_library_name = 'libmonochrome.so'
+  dumper_path = os.path.join(build_path, 'minidump_dump')
+  if not os.access(dumper_path, os.X_OK):
+    logging.warning(
+        'Cannot extract library name from dump because %s is not found, '
+        'default to: %s', dumper_path, default_library_name)
+    return [default_library_name]
+  p = subprocess.Popen([dumper_path, dump_path],
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE)
+  stdout, stderr = p.communicate()
+  if p.returncode != 0:
+    # Dumper errors often do not affect stack walkability, just a warning.
+    logging.warning('Reading minidump failed with output:\n%s', stderr)
+
+  library_names = []
+  module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
+                                      r'"(?P<library_name>lib[^. ]+.so)"')
+  in_module = False
+  for line in stdout.splitlines():
+    line = line.lstrip().rstrip('\n')
+    if line == 'MDRawModule':
+      in_module = True
+      continue
+    if line == '':
+      in_module = False
+      continue
+    if in_module:
+      m = module_library_line_re.match(line)
+      if m:
+        library_names.append(m.group('library_name'))
+  if not library_names:
+    logging.warning(
+        'Could not find any library name in the dump, '
+        'default to: %s', default_library_name)
+    return [default_library_name]
+  return library_names
+
+
+def main():
+  logging.basicConfig(level=logging.INFO)
+  parser = argparse.ArgumentParser(
+      description='Fetches Crashpad dumps from a given device, '
+      'walks and symbolizes the stacks.')
+  parser.add_argument('--device', required=True, help='Device serial number')
+  parser.add_argument('--adb-path', help='Path to the "adb" command')
+  parser.add_argument(
+      '--build-path',
+      required=True,
+      help='Build output directory, equivalent to CHROMIUM_OUTPUT_DIR')
+  parser.add_argument(
+      '--chrome-cache-path',
+      required=True,
+      help='Directory on the device where Chrome stores cached files,'
+      ' crashpad stores dumps in a subdirectory of it')
+  args = parser.parse_args()
+
+  stackwalk_path = os.path.join(args.build_path, 'minidump_stackwalk')
+  if not os.path.exists(stackwalk_path):
+    logging.error('Missing minidump_stackwalk executable')
+    return 1
+
+  devil_chromium.Initialize(output_directory=args.build_path,
+                            adb_path=args.adb_path)
+  device = device_utils.DeviceUtils(args.device)
+
+  device_crashpad_path = posixpath.join(args.chrome_cache_path, 'Crashpad',
+                                        'pending')
+
+  def CrashpadDumpExists():
+    return _ChooseLatestCrashpadDump(device, device_crashpad_path)
+
+  crashpad_file = timeout_retry.WaitFor(
+      CrashpadDumpExists, wait_period=1, max_tries=9)
+  if not crashpad_file:
+    logging.error('Could not locate a crashpad dump')
+    return 1
+
+  dump_dir = tempfile.mkdtemp()
+  symbols_dir = None
+  try:
+    device.PullFile(
+        device_path=posixpath.join(device_crashpad_path, crashpad_file),
+        host_path=dump_dir)
+    dump_full_path = os.path.join(dump_dir, crashpad_file)
+    library_names = _ExtractLibraryNamesFromDump(args.build_path,
+                                                 dump_full_path)
+    symbols_dir = _CreateSymbolsDir(args.build_path, library_names)
+    stackwalk_cmd = [stackwalk_path, dump_full_path, symbols_dir]
+    subprocess.call(stackwalk_cmd)
+  finally:
+    shutil.rmtree(dump_dir, ignore_errors=True)
+    if symbols_dir:
+      shutil.rmtree(symbols_dir, ignore_errors=True)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
new file mode 100644
index 0000000..baa9313
--- /dev/null
+++ b/src/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
@@ -0,0 +1,116 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+
+import proguard.retrace.ReTrace;
+
+/**
+ * A wrapper around ReTrace that:
+ *  1. Hardcodes a more useful line regular expression
+ *  2. Disables output buffering
+ */
+public class FlushingReTrace {
+    // E.g.: D/ConnectivityService(18029): Message
+    // E.g.: W/GCM     ( 151): Message
+    // E.g.: 09-08 14:22:59.995 18029 18055 I ProcessStatsService: Message
+    // E.g.: 09-08 14:30:59.145 17731 18020 D MDnsDS  : Message
+    private static final String LOGCAT_PREFIX =
+            "(?:[VDIWEF]/.*?\\( *\\d+\\): |\\d\\d-\\d\\d [0-9:. ]+[VDIWEF] .*?: )?";
+
+    // Note: Order of these sub-patterns defines their precedence.
+    // Note: Deobfuscation of methods without the presense of line numbers basically never works.
+    // There is a test for these pattern at //build/android/stacktrace/java_deobfuscate_test.py
+    private static final String LINE_PARSE_REGEX =
+            // Eagerly match logcat prefix to avoid conflicting with the patterns below.
+            LOGCAT_PREFIX
+            + "(?:"
+            // Based on default ReTrace regex, but with whitespaces allowed in file:line parentheses
+            // and "at" changed to to allow :
+            // E.g.: 06-22 13:58:02.895  4674  4674 E THREAD_STATE:     bLA.a( PG : 173 )
+            // Normal stack trace lines look like:
+            // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682)
+            + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|"
+            // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA'
+            // on a null object reference
+            + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|"
+            // E.g.: java.lang.VerifyError: bLA
+            + "(?:java\\.lang\\.VerifyError: %c)|"
+            // E.g.: java.lang.NoSuchFieldError: No instance field e of type L...; in class LbxK;
+            + "(?:java\\.lang\\.NoSuchFieldError: No instance field %f of type .*? in class L%C;)|"
+            // E.g.: Object of type Clazz was not destroyed... (See LifetimeAssert.java)
+            + "(?:.*?Object of type %c .*)|"
+            // E.g.: VFY: unable to resolve new-instance 3810 (LSome/Framework/Class;) in Lfoo/Bar;
+            + "(?:.*L%C;.*)|"
+            // E.g.: END SomeTestClass#someMethod
+            + "(?:.*?%c#%m.*?)|"
+            // Special-case for a common junit logcat message:
+            // E.g.: java.lang.NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+            + "(?:.* isTestClass for %c)|"
+            // E.g.: Caused by: java.lang.RuntimeException: Intentional Java Crash
+            + "(?:Caused by: %c:.*)|"
+            // Quoted values and lines that end with a class / class+method:
+            // E.g.: The class: Foo
+            // E.g.: INSTRUMENTATION_STATUS: class=Foo
+            // E.g.: NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+            // E.g.: Could not find class 'SomeFrameworkClass', referenced from method Foo.bar
+            // E.g.: Could not find method SomeFrameworkMethod, referenced from method Foo.bar
+            // E.g.: The member "Foo.bar"
+            // E.g.: The class "Foobar"
+            // Be careful about matching %c without %m since language tags look like class names.
+            + "(?:.*?%c\\.%m)|"
+            + "(?:.*?\"%c\\.%m\".*)|"
+            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?\"%c\".*)|"
+            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?%c)|"
+            // E.g.: java.lang.RuntimeException: Intentional Java Crash
+            + "(?:%c:.*)|"
+            // See if entire line matches a class name (e.g. for manual deobfuscation)
+            + "(?:%c)"
+            + ")";
+
+    private static void usage() {
+        System.err.println("Usage: echo $OBFUSCATED_CLASS | java_deobfuscate Foo.apk.mapping");
+        System.err.println("Usage: java_deobfuscate Foo.apk.mapping < foo.log");
+        System.err.println("Note: Deobfuscation of symbols outside the context of stack "
+                + "traces will work only when lines match the regular expression defined "
+                + "in FlushingReTrace.java.");
+        System.err.println("Also: Deobfuscation of method names without associated line "
+                + "numbers does not seem to work.");
+        System.exit(1);
+    }
+
+    public static void main(String[] args) {
+        if (args.length != 1 || args[0].startsWith("-")) {
+            usage();
+        }
+
+        File mappingFile = new File(args[0]);
+        try {
+            LineNumberReader reader = new LineNumberReader(
+                    new BufferedReader(new InputStreamReader(System.in, "UTF-8")));
+
+            // Enabling autoFlush is the main difference from ReTrace.main().
+            boolean autoFlush = true;
+            PrintWriter writer =
+                    new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"), autoFlush);
+
+            boolean verbose = false;
+            new ReTrace(LINE_PARSE_REGEX, verbose, mappingFile).retrace(reader, writer);
+        } catch (IOException ex) {
+            // Print a verbose stack trace.
+            ex.printStackTrace();
+            System.exit(1);
+        }
+
+        System.exit(0);
+    }
+}
diff --git a/src/build/android/stacktrace/java_deobfuscate.jar b/src/build/android/stacktrace/java_deobfuscate.jar
new file mode 100644
index 0000000..36a1b70
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate.jar
Binary files differ
diff --git a/src/build/android/stacktrace/java_deobfuscate.py b/src/build/android/stacktrace/java_deobfuscate.py
new file mode 100755
index 0000000..8c231ec
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper script for java_deobfuscate.
+
+This is also a buildable target, but having it pre-built here simplifies usage.
+"""
+
+import os
+import sys
+
+DIR_SOURCE_ROOT = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '../../../'))
+
+
+def main():
+  classpath = [
+      os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'stacktrace',
+                   'java_deobfuscate.jar'),
+      os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib',
+                   'proguard603.jar'),
+      os.path.join(DIR_SOURCE_ROOT, 'third_party', 'proguard', 'lib',
+                   'retrace603.jar'),
+  ]
+  java_path = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
+                           'bin', 'java')
+
+  cmd = [
+      java_path, '-classpath', ':'.join(classpath),
+      'org.chromium.build.FlushingReTrace'
+  ]
+  cmd.extend(sys.argv[1:])
+  os.execvp(cmd[0], cmd)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/stacktrace/java_deobfuscate_test.py b/src/build/android/stacktrace/java_deobfuscate_test.py
new file mode 100755
index 0000000..1bf81c9
--- /dev/null
+++ b/src/build/android/stacktrace/java_deobfuscate_test.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_deobfuscate."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+# Set by command-line argument.
+_JAVA_DEOBFUSCATE_PATH = None
+
+LINE_PREFIXES = [
+    '',
+    # logcat -v threadtime
+    '09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ',
+    # logcat
+    'W/GCM     (15158): ',
+    'W/GCM     (  158): ',
+]
+
+TEST_MAP = """\
+this.was.Deobfuscated -> FOO:
+    int[] mFontFamily -> a
+    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+never.Deobfuscated -> NOTFOO:
+    int[] mFontFamily -> a
+    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+"""
+
+TEST_DATA = [
+    '',
+    'FOO',
+    'FOO.bar',
+    'Here is a FOO',
+    'Here is a class FOO',
+    'Here is a class FOO baz',
+    'Here is a "FOO" baz',
+    'Here is a type "FOO" baz',
+    'Here is a "FOO.bar" baz',
+    'SomeError: SomeFrameworkClass in isTestClass for FOO',
+    'Here is a FOO.bar',
+    'Here is a FOO.bar baz',
+    'END FOO#bar',
+    'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
+    'FOO: Error message',
+    'Caused by: FOO: Error message',
+    '\tat FOO.bar(PG:1)',
+    '\t at\t FOO.bar\t (\t PG:\t 1\t )',
+    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+     ' java.lang.NullPointerException: Attempt to invoke interface method'
+     ' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
+    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+     ' \'int[] FOO.a\' on a null object reference'),
+    'java.lang.VerifyError: FOO',
+    ('java.lang.NoSuchFieldError: No instance field a of type '
+     'Ljava/lang/Class; in class LFOO;'),
+    'NOTFOO: Object of type FOO was not destroyed...',
+]
+
+EXPECTED_OUTPUT = [
+    '',
+    'this.was.Deobfuscated',
+    'this.was.Deobfuscated.someMethod',
+    'Here is a FOO',
+    'Here is a class this.was.Deobfuscated',
+    'Here is a class FOO baz',
+    'Here is a "FOO" baz',
+    'Here is a type "this.was.Deobfuscated" baz',
+    'Here is a "this.was.Deobfuscated.someMethod" baz',
+    'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
+    'Here is a this.was.Deobfuscated.someMethod',
+    'Here is a FOO.bar baz',
+    'END this.was.Deobfuscated#someMethod',
+    'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
+    'this.was.Deobfuscated: Error message',
+    'Caused by: this.was.Deobfuscated: Error message',
+    '\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
+    ('\t at\t this.was.Deobfuscated.someMethod\t '
+     '(\t Deobfuscated.java:\t 65\t )'),
+    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+     ' java.lang.NullPointerException: Attempt to invoke interface method'
+     ' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
+     ' null object reference'),
+    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+     ' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
+    'java.lang.VerifyError: this.was.Deobfuscated',
+    ('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
+     'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
+    'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
+]
+TEST_DATA = [s + '\n' for s in TEST_DATA]
+EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
+
+
+class JavaDeobfuscateTest(unittest.TestCase):
+
+  def __init__(self, *args, **kwargs):
+    super(JavaDeobfuscateTest, self).__init__(*args, **kwargs)
+    self._map_file = None
+
+  def setUp(self):
+    self._map_file = tempfile.NamedTemporaryFile()
+    self._map_file.write(TEST_MAP)
+    self._map_file.flush()
+
+  def tearDown(self):
+    if self._map_file:
+      self._map_file.close()
+
+  def _testImpl(self, input_lines=None, expected_output_lines=None,
+                prefix=''):
+    self.assertTrue(bool(input_lines) == bool(expected_output_lines))
+
+    if not input_lines:
+      input_lines = [prefix + x for x in TEST_DATA]
+    if not expected_output_lines:
+      expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
+
+    cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
+    proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+    proc_output, _ = proc.communicate(''.join(input_lines))
+    actual_output_lines = proc_output.splitlines(True)
+    for actual, expected in zip(actual_output_lines, expected_output_lines):
+      self.assertTrue(
+          actual == expected or actual.replace('bar', 'someMethod') == expected,
+          msg=''.join([
+              'Deobfuscation failed.\n',
+              '  actual:   %s' % actual,
+              '  expected: %s' % expected]))
+
+  def testNoPrefix(self):
+    self._testImpl(prefix='')
+
+  def testThreadtimePrefix(self):
+    self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
+
+  def testStandardPrefix(self):
+    self._testImpl(prefix='W/GCM     (15158): ')
+
+  def testStandardPrefixWithPadding(self):
+    self._testImpl(prefix='W/GCM     (  158): ')
+
+  @unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
+  def testIndefiniteHang(self):
+    # Test for crbug.com/876539.
+    self._testImpl(
+        input_lines=[
+            'VFY: unable to resolve virtual method 2: LFOO;'
+                + '.onDescendantInvalidated '
+                + '(Landroid/view/View;Landroid/view/View;)V',
+        ],
+        expected_output_lines=[
+            'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
+                + '.onDescendantInvalidated '
+                + '(Landroid/view/View;Landroid/view/View;)V',
+        ])
+
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
+                      required=True)
+  known_args, unittest_args = parser.parse_known_args()
+  _JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
+  unittest_args = [sys.argv[0]] + unittest_args
+  unittest.main(argv=unittest_args)
diff --git a/src/build/android/stacktrace/stackwalker.py b/src/build/android/stacktrace/stackwalker.py
new file mode 100755
index 0000000..4f2782f
--- /dev/null
+++ b/src/build/android/stacktrace/stackwalker.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import sys
+import tempfile
+
+if __name__ == '__main__':
+  sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+
+_MICRODUMP_BEGIN = re.compile(
+    '.*google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----')
+_MICRODUMP_END = re.compile(
+    '.*google-breakpad: -----END BREAKPAD MICRODUMP-----')
+
+""" Example Microdump
+<timestamp>  6270  6131 F google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----
+<timestamp>  6270  6131 F google-breakpad: V Chrome_Android:54.0.2790.0
+...
+<timestamp>  6270  6131 F google-breakpad: -----END BREAKPAD MICRODUMP-----
+
+"""
+
+
+def GetMicroDumps(dump_path):
+  """Returns all microdumps found in given log file
+
+  Args:
+    dump_path: Path to the log file.
+
+  Returns:
+    List of all microdumps as lists of lines.
+  """
+  with open(dump_path, 'r') as d:
+    data = d.read()
+  all_dumps = []
+  current_dump = None
+  for line in data.splitlines():
+    if current_dump is not None:
+      if _MICRODUMP_END.match(line):
+        current_dump.append(line)
+        all_dumps.append(current_dump)
+        current_dump = None
+      else:
+        current_dump.append(line)
+    elif _MICRODUMP_BEGIN.match(line):
+      current_dump = []
+      current_dump.append(line)
+  return all_dumps
+
+
+def SymbolizeMicroDump(stackwalker_binary_path, dump, symbols_path):
+  """Runs stackwalker on microdump.
+
+  Runs the stackwalker binary at stackwalker_binary_path on a given microdump
+  using the symbols at symbols_path.
+
+  Args:
+    stackwalker_binary_path: Path to the stackwalker binary.
+    dump: The microdump to run the stackwalker on.
+    symbols_path: Path the the symbols file to use.
+
+  Returns:
+    Output from stackwalker tool.
+  """
+  with tempfile.NamedTemporaryFile() as tf:
+    for l in dump:
+      tf.write('%s\n' % l)
+    cmd = [stackwalker_binary_path, tf.name, symbols_path]
+    return cmd_helper.GetCmdOutput(cmd)
+
+
+def AddArguments(parser):
+  parser.add_argument('--stackwalker-binary-path', required=True,
+                      help='Path to stackwalker binary.')
+  parser.add_argument('--stack-trace-path', required=True,
+                      help='Path to stacktrace containing microdump.')
+  parser.add_argument('--symbols-path', required=True,
+                      help='Path to symbols file.')
+  parser.add_argument('--output-file',
+                      help='Path to dump stacktrace output to')
+
+
+def _PrintAndLog(line, fp):
+  if fp:
+    fp.write('%s\n' % line)
+  print(line)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  AddArguments(parser)
+  args = parser.parse_args()
+
+  micro_dumps = GetMicroDumps(args.stack_trace_path)
+  if not micro_dumps:
+    print('No microdump found. Exiting.')
+    return 0
+
+  symbolized_dumps = []
+  for micro_dump in micro_dumps:
+    symbolized_dumps.append(SymbolizeMicroDump(
+        args.stackwalker_binary_path, micro_dump, args.symbols_path))
+
+  try:
+    fp = open(args.output_file, 'w') if args.output_file else None
+    _PrintAndLog('%d microdumps found.' % len(micro_dumps), fp)
+    _PrintAndLog('---------- Start output from stackwalker ----------', fp)
+    for index, symbolized_dump in list(enumerate(symbolized_dumps)):
+      _PrintAndLog(
+          '------------------ Start dump %d ------------------' % index, fp)
+      _PrintAndLog(symbolized_dump, fp)
+      _PrintAndLog(
+          '------------------- End dump %d -------------------' % index, fp)
+    _PrintAndLog('----------- End output from stackwalker -----------', fp)
+  except Exception:
+    if fp:
+      fp.close()
+    raise
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test/BUILD.gn b/src/build/android/test/BUILD.gn
new file mode 100644
index 0000000..d5f8609
--- /dev/null
+++ b/src/build/android/test/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/android_nocompile.gni")
+import("nocompile_gn/nocompile_sources.gni")
+
+if (enable_java_templates) {
+  android_nocompile_test_suite("android_lint_test") {
+    # Depend on lint Python script so that the action is re-run whenever the lint script is
+    # modified.
+    pydeps = [ "//build/android/gyp/lint.pydeps" ]
+    tests = [
+      {
+        target = "nocompile_gn:default_locale_lint_test"
+        nocompile_sources =
+            rebase_path(default_locale_lint_test_nocompile_sources,
+                        "",
+                        "nocompile_gn")
+        expected_compile_output_regex = "Warning:.*DefaultLocale"
+      },
+      {
+        target = "nocompile_gn:new_api_lint_test"
+        nocompile_sources =
+            rebase_path(new_api_lint_test_nocompile_sources, "", "nocompile_gn")
+        expected_compile_output_regex = "Error:.*NewApi"
+      },
+    ]
+  }
+}
diff --git a/src/build/android/test/nocompile_gn/BUILD.gn b/src/build/android/test/nocompile_gn/BUILD.gn
new file mode 100644
index 0000000..d3262fe
--- /dev/null
+++ b/src/build/android/test/nocompile_gn/BUILD.gn
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/android_nocompile.gni")
+import("//build/config/android/rules.gni")
+import("nocompile_sources.gni")
+
+template("lint_test") {
+  _library_target_name = "${target_name}_test_java"
+  _apk_target_name = "${target_name}_apk"
+
+  android_library(_library_target_name) {
+    sources = [ "//tools/android/errorprone_plugin/test/src/org/chromium/tools/errorprone/plugin/Empty.java" ]
+    not_needed(invoker, [ "sources" ])
+    if (enable_android_nocompile_tests) {
+      sources += invoker.sources
+    }
+  }
+
+  android_apk(_apk_target_name) {
+    # This cannot be marked testonly since lint has special ignores for testonly
+    # targets. We need to test linting a normal apk target.
+    apk_name = _apk_target_name
+    deps = [ ":$_library_target_name" ]
+    android_manifest = "//build/android/AndroidManifest.xml"
+  }
+
+  android_lint(target_name) {
+    _apk_target = ":${_apk_target_name}"
+    deps = [ "${_apk_target}__java" ]
+    build_config_dep = "$_apk_target$build_config_target_suffix"
+    build_config = get_label_info(_apk_target, "target_gen_dir") + "/" +
+                   get_label_info(_apk_target, "name") + ".build_config"
+    if (enable_android_nocompile_tests) {
+      skip_build_server = true
+    }
+  }
+}
+
+lint_test("default_locale_lint_test") {
+  sources = default_locale_lint_test_nocompile_sources
+}
+
+lint_test("new_api_lint_test") {
+  sources = new_api_lint_test_nocompile_sources
+}
diff --git a/src/build/android/test/nocompile_gn/nocompile_sources.gni b/src/build/android/test/nocompile_gn/nocompile_sources.gni
new file mode 100644
index 0000000..8fc049e
--- /dev/null
+++ b/src/build/android/test/nocompile_gn/nocompile_sources.gni
@@ -0,0 +1,8 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+default_locale_lint_test_nocompile_sources =
+    [ "../../java/test/DefaultLocaleLintTest.java" ]
+
+new_api_lint_test_nocompile_sources = [ "../../java/test/NewApiLintTest.java" ]
diff --git a/src/build/android/test_runner.py b/src/build/android/test_runner.py
new file mode 100755
index 0000000..84010c3
--- /dev/null
+++ b/src/build/android/test_runner.py
@@ -0,0 +1,1176 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+from __future__ import absolute_import
+import argparse
+import collections
+import contextlib
+import itertools
+import logging
+import os
+import re
+import shutil
+import signal
+import sys
+import tempfile
+import threading
+import traceback
+import unittest
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See http://crbug.com/724524 and https://bugs.python.org/issue7980.
+import _strptime  # pylint: disable=unused-import
+
+# pylint: disable=redefined-builtin
+from six.moves import range  # Needed for python 3 compatibility.
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+  sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import base_error
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import output_manager
+from pylib.base import output_manager_factory
+from pylib.base import result_sink
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.results.presentation import test_results_presentation
+from pylib.utils import local_utils
+from pylib.utils import logdog_helper
+from pylib.utils import logging_utils
+from pylib.utils import test_filter
+
+from py_utils import contextlib_ext
+
+_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
+    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
+
+
+def _RealPath(arg):
+  if arg.startswith('//'):
+    arg = os.path.abspath(os.path.join(host_paths.DIR_SOURCE_ROOT,
+                                       arg[2:].replace('/', os.sep)))
+  return os.path.realpath(arg)
+
+
+def AddTestLauncherOptions(parser):
+  """Adds arguments mirroring //base/test/launcher.
+
+  Args:
+    parser: The parser to which arguments should be added.
+  Returns:
+    The given parser.
+  """
+  parser.add_argument(
+      '--test-launcher-retry-limit',
+      '--test_launcher_retry_limit',
+      '--num_retries', '--num-retries',
+      '--isolated-script-test-launcher-retry-limit',
+      dest='num_retries', type=int, default=2,
+      help='Number of retries for a test before '
+           'giving up (default: %(default)s).')
+  parser.add_argument(
+      '--test-launcher-summary-output',
+      '--json-results-file',
+      dest='json_results_file', type=os.path.realpath,
+      help='If set, will dump results in JSON form to the specified file. '
+           'Note that this will also trigger saving per-test logcats to '
+           'logdog.')
+  parser.add_argument(
+      '--test-launcher-shard-index',
+      type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+      help='Index of the external shard to run.')
+  parser.add_argument(
+      '--test-launcher-total-shards',
+      type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+      help='Total number of external shards.')
+
+  test_filter.AddFilterOptions(parser)
+
+  return parser
+
+
+def AddCommandLineOptions(parser):
+  """Adds arguments to support passing command-line flags to the device."""
+  parser.add_argument(
+      '--device-flags-file',
+      type=os.path.realpath,
+      help='The relative filepath to a file containing '
+           'command-line flags to set on the device')
+  parser.add_argument(
+      '--use-apk-under-test-flags-file',
+      action='store_true',
+      help='Wether to use the flags file for the apk under test. If set, '
+           "the filename will be looked up in the APK's PackageInfo.")
+  parser.set_defaults(allow_unknown=True)
+  parser.set_defaults(command_line_flags=None)
+
+
+def AddTracingOptions(parser):
+  # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
+  # for all test types.
+  parser.add_argument(
+      '--trace-output',
+      metavar='FILENAME', type=os.path.realpath,
+      help='Path to save test_runner trace json output to.')
+
+  parser.add_argument(
+      '--trace-all',
+      action='store_true',
+      help='Whether to trace all function calls.')
+
+
+def AddCommonOptions(parser):
+  """Adds all common options to |parser|."""
+
+  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+  debug_or_release_group = parser.add_mutually_exclusive_group()
+  debug_or_release_group.add_argument(
+      '--debug',
+      action='store_const', const='Debug', dest='build_type',
+      default=default_build_type,
+      help='If set, run test suites under out/Debug. '
+           'Default is env var BUILDTYPE or Debug.')
+  debug_or_release_group.add_argument(
+      '--release',
+      action='store_const', const='Release', dest='build_type',
+      help='If set, run test suites under out/Release. '
+           'Default is env var BUILDTYPE or Debug.')
+
+  parser.add_argument(
+      '--break-on-failure', '--break_on_failure',
+      dest='break_on_failure', action='store_true',
+      help='Whether to break on failure.')
+
+  # TODO(jbudorick): Remove this once everything has switched to platform
+  # mode.
+  parser.add_argument(
+      '--enable-platform-mode',
+      action='store_true',
+      help='Run the test scripts in platform mode, which '
+           'conceptually separates the test runner from the '
+           '"device" (local or remote, real or emulated) on '
+           'which the tests are running. [experimental]')
+
+  parser.add_argument(
+      '-e', '--environment',
+      default='local', choices=constants.VALID_ENVIRONMENTS,
+      help='Test environment to run in (default: %(default)s).')
+
+  parser.add_argument(
+      '--local-output',
+      action='store_true',
+      help='Whether to archive test output locally and generate '
+           'a local results detail page.')
+
+  class FastLocalDevAction(argparse.Action):
+    def __call__(self, parser, namespace, values, option_string=None):
+      namespace.enable_concurrent_adb = True
+      namespace.enable_device_cache = True
+      namespace.extract_test_list_from_filter = True
+      namespace.local_output = True
+      namespace.num_retries = 0
+      namespace.skip_clear_data = True
+
+  parser.add_argument(
+      '--fast-local-dev',
+      type=bool,
+      nargs=0,
+      action=FastLocalDevAction,
+      help='Alias for: --num-retries=0 --enable-device-cache '
+      '--enable-concurrent-adb --skip-clear-data '
+      '--extract-test-list-from-filter --local-output')
+
+  # TODO(jbudorick): Remove this once downstream bots have switched to
+  # api.test_results.
+  parser.add_argument(
+      '--flakiness-dashboard-server',
+      dest='flakiness_dashboard_server',
+      help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--gs-results-bucket',
+      help='Google Storage bucket to upload results to.')
+
+  parser.add_argument(
+      '--output-directory',
+      dest='output_directory', type=os.path.realpath,
+      help='Path to the directory in which build files are'
+           ' located (must include build type). This will take'
+           ' precedence over --debug and --release')
+  parser.add_argument(
+      '-v', '--verbose',
+      dest='verbose_count', default=0, action='count',
+      help='Verbose level (multiple times for more)')
+
+  parser.add_argument(
+      '--repeat', '--gtest_repeat', '--gtest-repeat',
+      '--isolated-script-test-repeat',
+      dest='repeat', type=int, default=0,
+      help='Number of times to repeat the specified set of tests.')
+
+  # This is currently only implemented for gtests and instrumentation tests.
+  parser.add_argument(
+      '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
+      '--isolated-script-test-also-run-disabled-tests',
+      dest='run_disabled', action='store_true',
+      help='Also run disabled tests if applicable.')
+
+  # These are currently only implemented for gtests.
+  parser.add_argument('--isolated-script-test-output',
+                      help='If present, store test results on this path.')
+  parser.add_argument('--isolated-script-test-perf-output',
+                      help='If present, store chartjson results on this path.')
+
+  AddTestLauncherOptions(parser)
+
+
+def ProcessCommonOptions(args):
+  """Processes and handles all common options."""
+  run_tests_helper.SetLogLevel(args.verbose_count, add_handler=False)
+  # pylint: disable=redefined-variable-type
+  if args.verbose_count > 0:
+    handler = logging_utils.ColorStreamHandler()
+  else:
+    handler = logging.StreamHandler(sys.stdout)
+  # pylint: enable=redefined-variable-type
+  handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(handler)
+
+  constants.SetBuildType(args.build_type)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+
+def AddDeviceOptions(parser):
+  """Adds device options to |parser|."""
+
+  parser = parser.add_argument_group('device arguments')
+
+  parser.add_argument(
+      '--adb-path',
+      type=os.path.realpath,
+      help='Specify the absolute path of the adb binary that '
+           'should be used.')
+  parser.add_argument('--denylist-file',
+                      type=os.path.realpath,
+                      help='Device denylist file.')
+  parser.add_argument(
+      '-d', '--device', nargs='+',
+      dest='test_devices',
+      help='Target device(s) for the test suite to run on.')
+  parser.add_argument(
+      '--enable-concurrent-adb',
+      action='store_true',
+      help='Run multiple adb commands at the same time, even '
+           'for the same device.')
+  parser.add_argument(
+      '--enable-device-cache',
+      action='store_true',
+      help='Cache device state to disk between runs')
+  parser.add_argument(
+      '--skip-clear-data',
+      action='store_true',
+      help='Do not wipe app data between tests. Use this to '
+           'speed up local development and never on bots '
+                     '(increases flakiness)')
+  parser.add_argument(
+      '--recover-devices',
+      action='store_true',
+      help='Attempt to recover devices prior to the final retry. Warning: '
+           'this will cause all devices to reboot.')
+  parser.add_argument(
+      '--tool',
+      dest='tool',
+      help='Run the test under a tool '
+           '(use --tool help to list them)')
+
+  parser.add_argument(
+      '--upload-logcats-file',
+      action='store_true',
+      dest='upload_logcats_file',
+      help='Whether to upload logcat file to logdog.')
+
+  logcat_output_group = parser.add_mutually_exclusive_group()
+  logcat_output_group.add_argument(
+      '--logcat-output-dir', type=os.path.realpath,
+      help='If set, will dump logcats recorded during test run to directory. '
+           'File names will be the device ids with timestamps.')
+  logcat_output_group.add_argument(
+      '--logcat-output-file', type=os.path.realpath,
+      help='If set, will merge logcats recorded during test run and dump them '
+           'to the specified file.')
+
+
+def AddEmulatorOptions(parser):
+  """Adds emulator-specific options to |parser|."""
+  parser = parser.add_argument_group('emulator arguments')
+
+  parser.add_argument(
+      '--avd-config',
+      type=os.path.realpath,
+      help='Path to the avd config textpb. '
+      '(See //tools/android/avd/proto/ for message definition'
+      ' and existing textpb files.)')
+  parser.add_argument(
+      '--emulator-count',
+      type=int,
+      default=1,
+      help='Number of emulators to use.')
+  parser.add_argument(
+      '--emulator-window',
+      action='store_true',
+      default=False,
+      help='Enable graphical window display on the emulator.')
+
+
+def AddGTestOptions(parser):
+  """Adds gtest options to |parser|."""
+
+  parser = parser.add_argument_group('gtest arguments')
+
+  parser.add_argument(
+      '--app-data-file',
+      action='append', dest='app_data_files',
+      help='A file path relative to the app data directory '
+           'that should be saved to the host.')
+  parser.add_argument(
+      '--app-data-file-dir',
+      help='Host directory to which app data files will be'
+           ' saved. Used with --app-data-file.')
+  parser.add_argument(
+      '--delete-stale-data',
+      dest='delete_stale_data', action='store_true',
+      help='Delete stale test data on the device.')
+  parser.add_argument(
+      '--enable-xml-result-parsing',
+      action='store_true', help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--executable-dist-dir',
+      type=os.path.realpath,
+      help="Path to executable's dist directory for native"
+           " (non-apk) tests.")
+  parser.add_argument(
+      '--extract-test-list-from-filter',
+      action='store_true',
+      help='When a test filter is specified, and the list of '
+           'tests can be determined from it, skip querying the '
+           'device for the list of all tests. Speeds up local '
+           'development, but is not safe to use on bots ('
+           'http://crbug.com/549214')
+  parser.add_argument(
+      '--gs-test-artifacts-bucket',
+      help=('If present, test artifacts will be uploaded to this Google '
+            'Storage bucket.'))
+  parser.add_argument(
+      '--render-test-output-dir',
+      help='If present, store rendering artifacts in this path.')
+  parser.add_argument(
+      '--runtime-deps-path',
+      dest='runtime_deps_path', type=os.path.realpath,
+      help='Runtime data dependency file from GN.')
+  parser.add_argument(
+      '-t', '--shard-timeout',
+      dest='shard_timeout', type=int, default=120,
+      help='Timeout to wait for each test (default: %(default)s).')
+  parser.add_argument(
+      '--store-tombstones',
+      dest='store_tombstones', action='store_true',
+      help='Add tombstones in results if crash.')
+  parser.add_argument(
+      '-s', '--suite',
+      dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
+      help='Executable name of the test suite to run.')
+  parser.add_argument(
+      '--test-apk-incremental-install-json',
+      type=os.path.realpath,
+      help='Path to install json for the test apk.')
+  parser.add_argument('--test-launcher-batch-limit',
+                      dest='test_launcher_batch_limit',
+                      type=int,
+                      help='The max number of tests to run in a shard. '
+                      'Ignores non-positive ints and those greater than '
+                      'MAX_SHARDS')
+  parser.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_true',
+      help='Wait for java debugger to attach before running any application '
+           'code. Also disables test timeouts and sets retries=0.')
+  parser.add_argument(
+      '--coverage-dir',
+      type=os.path.realpath,
+      help='Directory in which to place all generated coverage files.')
+
+
+def AddInstrumentationTestOptions(parser):
+  """Adds Instrumentation test options to |parser|."""
+
+  parser = parser.add_argument_group('instrumentation arguments')
+
+  parser.add_argument(
+      '--additional-apk',
+      action='append', dest='additional_apks', default=[],
+      type=_RealPath,
+      help='Additional apk that must be installed on '
+           'the device when the tests are run')
+  parser.add_argument(
+      '-A', '--annotation',
+      dest='annotation_str',
+      help='Comma-separated list of annotations. Run only tests with any of '
+           'the given annotations. An annotation can be either a key or a '
+           'key-values pair. A test that has no annotation is considered '
+           '"SmallTest".')
+  # TODO(jbudorick): Remove support for name-style APK specification once
+  # bots are no longer doing it.
+  parser.add_argument(
+      '--apk-under-test',
+      help='Path or name of the apk under test.')
+  parser.add_argument(
+      '--module',
+      action='append',
+      dest='modules',
+      help='Specify Android App Bundle modules to install in addition to the '
+      'base module.')
+  parser.add_argument(
+      '--fake-module',
+      action='append',
+      dest='fake_modules',
+      help='Specify Android App Bundle modules to fake install in addition to '
+      'the real modules.')
+  parser.add_argument(
+      '--additional-locale',
+      action='append',
+      dest='additional_locales',
+      help='Specify locales in addition to the device locale to install splits '
+      'for when --apk-under-test is an Android App Bundle.')
+  parser.add_argument(
+      '--coverage-dir',
+      type=os.path.realpath,
+      help='Directory in which to place all generated '
+      'Jacoco coverage files.')
+  parser.add_argument(
+      '--delete-stale-data',
+      action='store_true', dest='delete_stale_data',
+      help='Delete stale test data on the device.')
+  parser.add_argument(
+      '--disable-dalvik-asserts',
+      dest='set_asserts', action='store_false', default=True,
+      help='Removes the dalvik.vm.enableassertions property')
+  parser.add_argument(
+      '--enable-java-deobfuscation',
+      action='store_true',
+      help='Deobfuscate java stack traces in test output and logcat.')
+  parser.add_argument(
+      '-E', '--exclude-annotation',
+      dest='exclude_annotation_str',
+      help='Comma-separated list of annotations. Exclude tests with these '
+           'annotations.')
+  def package_replacement(arg):
+    split_arg = arg.split(',')
+    if len(split_arg) != 2:
+      raise argparse.ArgumentError(
+          arg,
+          'Expected two comma-separated strings for --replace-system-package, '
+          'received %d' % len(split_arg))
+    PackageReplacement = collections.namedtuple('PackageReplacement',
+                                                ['package', 'replacement_apk'])
+    return PackageReplacement(package=split_arg[0],
+                              replacement_apk=_RealPath(split_arg[1]))
+  parser.add_argument(
+      '--replace-system-package',
+      type=package_replacement, default=None,
+      help='Specifies a system package to replace with a given APK for the '
+           'duration of the test. Given as a comma-separated pair of strings, '
+           'the first element being the package and the second the path to the '
+           'replacement APK. Only supports replacing one package. Example: '
+           '--replace-system-package com.example.app,path/to/some.apk')
+  parser.add_argument(
+      '--remove-system-package',
+      default=[],
+      action='append',
+      dest='system_packages_to_remove',
+      help='Specifies a system package to remove before testing if it exists '
+      'on the system. WARNING: THIS WILL PERMANENTLY REMOVE THE SYSTEM APP. '
+      'Unlike --replace-system-package, the app will not be restored after '
+      'tests are finished.')
+
+  parser.add_argument(
+      '--use-webview-provider',
+      type=_RealPath, default=None,
+      help='Use this apk as the webview provider during test. '
+           'The original provider will be restored if possible, '
+           "on Nougat the provider can't be determined and so "
+           'the system will choose the default provider.')
+  parser.add_argument(
+      '--runtime-deps-path',
+      dest='runtime_deps_path', type=os.path.realpath,
+      help='Runtime data dependency file from GN.')
+  parser.add_argument(
+      '--screenshot-directory',
+      dest='screenshot_dir', type=os.path.realpath,
+      help='Capture screenshots of test failures')
+  parser.add_argument(
+      '--shared-prefs-file',
+      dest='shared_prefs_file', type=_RealPath,
+      help='The relative path to a file containing JSON list of shared '
+           'preference files to edit and how to do so. Example list: '
+           '[{'
+           '  "package": "com.package.example",'
+           '  "filename": "ExampleSettings.xml",'
+           '  "set": {'
+           '    "boolean_key_in_xml": true,'
+           '    "string_key_in_xml": "string_value"'
+           '  },'
+           '  "remove": ['
+           '    "key_in_xml_to_remove"'
+           '  ]'
+           '}]')
+  parser.add_argument(
+      '--store-tombstones',
+      action='store_true', dest='store_tombstones',
+      help='Add tombstones in results if crash.')
+  parser.add_argument(
+      '--strict-mode',
+      dest='strict_mode', default='testing',
+      help='StrictMode command-line flag set on the device, '
+           'death/testing to kill the process, off to stop '
+           'checking, flash to flash only. (default: %(default)s)')
+  parser.add_argument(
+      '--test-apk',
+      required=True,
+      help='Path or name of the apk containing the tests.')
+  parser.add_argument(
+      '--test-jar',
+      help='Path of jar containing test java files.')
+  parser.add_argument(
+      '--timeout-scale',
+      type=float,
+      help='Factor by which timeouts should be scaled.')
+  parser.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_true',
+      help='Wait for java debugger to attach before running any application '
+           'code. Also disables test timeouts and sets retries=0.')
+
+  # WPR record mode.
+  parser.add_argument('--wpr-enable-record',
+                      action='store_true',
+                      default=False,
+                      help='If true, WPR server runs in record mode.'
+                      'otherwise, runs in replay mode.')
+
+  # These arguments are suppressed from the help text because they should
+  # only ever be specified by an intermediate script.
+  parser.add_argument(
+      '--apk-under-test-incremental-install-json',
+      help=argparse.SUPPRESS)
+  parser.add_argument(
+      '--test-apk-incremental-install-json',
+      type=os.path.realpath,
+      help=argparse.SUPPRESS)
+
+
+def AddSkiaGoldTestOptions(parser):
+  """Adds Skia Gold test options to |parser|."""
+  parser = parser.add_argument_group("Skia Gold arguments")
+  parser.add_argument(
+      '--code-review-system',
+      help='A non-default code review system to pass to pass to Gold, if '
+      'applicable')
+  parser.add_argument(
+      '--git-revision', help='The git commit currently being tested.')
+  parser.add_argument(
+      '--gerrit-issue',
+      help='The Gerrit issue this test is being run on, if applicable.')
+  parser.add_argument(
+      '--gerrit-patchset',
+      help='The Gerrit patchset this test is being run on, if applicable.')
+  parser.add_argument(
+      '--buildbucket-id',
+      help='The Buildbucket build ID that this test was triggered from, if '
+      'applicable.')
+  local_group = parser.add_mutually_exclusive_group()
+  local_group.add_argument(
+      '--local-pixel-tests',
+      action='store_true',
+      default=None,
+      help='Specifies to run the Skia Gold pixel tests in local mode. When run '
+      'in local mode, uploading to Gold is disabled and traditional '
+      'generated/golden/diff images are output instead of triage links. '
+      'Running in local mode also implies --no-luci-auth. If both this '
+      'and --no-local-pixel-tests are left unset, the test harness will '
+      'attempt to detect whether it is running on a workstation or not '
+      'and set the options accordingly.')
+  local_group.add_argument(
+      '--no-local-pixel-tests',
+      action='store_false',
+      dest='local_pixel_tests',
+      help='Specifies to run the Skia Gold pixel tests in non-local (bot) '
+      'mode. When run in this mode, data is actually uploaded to Gold and '
+      'triage links are generated. If both this and --local-pixel-tests '
+      'are left unset, the test harness will attempt to detect whether '
+      'it is running on a workstation or not and set the options '
+      'accordingly.')
+  parser.add_argument(
+      '--no-luci-auth',
+      action='store_true',
+      default=False,
+      help="Don't use the serve account provided by LUCI for authentication "
+      'with Skia Gold, instead relying on gsutil to be pre-authenticated. '
+      'Meant for testing locally instead of on the bots.')
+  parser.add_argument(
+      '--bypass-skia-gold-functionality',
+      action='store_true',
+      default=False,
+      help='Bypass all interaction with Skia Gold, effectively disabling the '
+      'image comparison portion of any tests that use Gold. Only meant to be '
+      'used in case a Gold outage occurs and cannot be fixed quickly.')
+
+
+def AddJUnitTestOptions(parser):
+  """Adds junit test options to |parser|."""
+
+  parser = parser.add_argument_group('junit arguments')
+
+  parser.add_argument(
+      '--coverage-on-the-fly',
+      action='store_true',
+      help='Generate coverage data by Jacoco on-the-fly instrumentation.')
+  parser.add_argument(
+      '--coverage-dir', type=os.path.realpath,
+      help='Directory to store coverage info.')
+  parser.add_argument(
+      '--package-filter',
+      help='Filters tests by package.')
+  parser.add_argument(
+      '--runner-filter',
+      help='Filters tests by runner class. Must be fully qualified.')
+  parser.add_argument(
+      '--shards',
+      default=-1,
+      type=int,
+      help='Number of shards to run junit tests in parallel on. Only 1 shard '
+      'is supported when test-filter is specified. Values less than 1 will '
+      'use auto select.')
+  parser.add_argument(
+      '-s', '--test-suite', required=True,
+      help='JUnit test suite to run.')
+  debug_group = parser.add_mutually_exclusive_group()
+  debug_group.add_argument(
+      '-w', '--wait-for-java-debugger', action='store_const', const='8701',
+      dest='debug_socket', help='Alias for --debug-socket=8701')
+  debug_group.add_argument(
+      '--debug-socket',
+      help='Wait for java debugger to attach at specified socket address '
+           'before running any application code. Also disables test timeouts '
+           'and sets retries=0.')
+
+  # These arguments are for Android Robolectric tests.
+  parser.add_argument(
+      '--robolectric-runtime-deps-dir',
+      help='Path to runtime deps for Robolectric.')
+  parser.add_argument(
+      '--resource-apk',
+      required=True,
+      help='Path to .ap_ containing binary resources for Robolectric.')
+
+
+def AddLinkerTestOptions(parser):
+
+  parser = parser.add_argument_group('linker arguments')
+
+  parser.add_argument(
+      '--test-apk',
+      type=os.path.realpath,
+      help='Path to the linker test APK.')
+
+
+def AddMonkeyTestOptions(parser):
+  """Adds monkey test options to |parser|."""
+
+  parser = parser.add_argument_group('monkey arguments')
+
+  parser.add_argument('--browser',
+                      required=True,
+                      choices=list(constants.PACKAGE_INFO.keys()),
+                      metavar='BROWSER',
+                      help='Browser under test.')
+  parser.add_argument(
+      '--category',
+      nargs='*', dest='categories', default=[],
+      help='A list of allowed categories. Monkey will only visit activities '
+           'that are listed with one of the specified categories.')
+  parser.add_argument(
+      '--event-count',
+      default=10000, type=int,
+      help='Number of events to generate (default: %(default)s).')
+  parser.add_argument(
+      '--seed',
+      type=int,
+      help='Seed value for pseudo-random generator. Same seed value generates '
+           'the same sequence of events. Seed is randomized by default.')
+  parser.add_argument(
+      '--throttle',
+      default=100, type=int,
+      help='Delay between events (ms) (default: %(default)s). ')
+
+
+def AddPythonTestOptions(parser):
+
+  parser = parser.add_argument_group('python arguments')
+
+  parser.add_argument('-s',
+                      '--suite',
+                      dest='suite_name',
+                      metavar='SUITE_NAME',
+                      choices=list(constants.PYTHON_UNIT_TEST_SUITES.keys()),
+                      help='Name of the test suite to run.')
+
+
+def _CreateClassToFileNameDict(test_apk):
+  """Creates a dict mapping classes to file names from size-info apk."""
+  constants.CheckOutputDirectory()
+  test_apk_size_info = os.path.join(constants.GetOutDirectory(), 'size-info',
+                                    os.path.basename(test_apk) + '.jar.info')
+
+  class_to_file_dict = {}
+  # Some tests such as webview_cts_tests use a separately downloaded apk to run
+  # tests. This means the apk may not have been built by the system and hence
+  # no size info file exists.
+  if not os.path.exists(test_apk_size_info):
+    logging.debug('Apk size file not found. %s', test_apk_size_info)
+    return class_to_file_dict
+
+  with open(test_apk_size_info, 'r') as f:
+    for line in f:
+      file_class, file_name = line.rstrip().split(',', 1)
+      # Only want files that are not prebuilt.
+      if file_name.startswith('../../'):
+        class_to_file_dict[file_class] = str(
+            file_name.replace('../../', '//', 1))
+
+  return class_to_file_dict
+
+
+def _RunPythonTests(args):
+  """Subcommand of RunTestsCommand which runs python unit tests."""
+  suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+  suite_path = suite_vars['path']
+  suite_test_modules = suite_vars['test_modules']
+
+  sys.path = [suite_path] + sys.path
+  try:
+    suite = unittest.TestSuite()
+    suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+                   for m in suite_test_modules)
+    runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+    return 0 if runner.run(suite).wasSuccessful() else 1
+  finally:
+    sys.path = sys.path[1:]
+
+
+_DEFAULT_PLATFORM_MODE_TESTS = [
+    'gtest', 'instrumentation', 'junit', 'linker', 'monkey'
+]
+
+
+def RunTestsCommand(args, result_sink_client=None):
+  """Checks test type and dispatches to the appropriate function.
+
+  Args:
+    args: argparse.Namespace object.
+    result_sink_client: A ResultSinkClient object.
+
+  Returns:
+    Integer indicated exit code.
+
+  Raises:
+    Exception: Unknown command name passed in, or an exception from an
+        individual test runner.
+  """
+  command = args.command
+
+  ProcessCommonOptions(args)
+  logging.info('command: %s', ' '.join(sys.argv))
+  if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
+    return RunTestsInPlatformMode(args, result_sink_client)
+
+  if command == 'python':
+    return _RunPythonTests(args)
+  else:
+    raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+  # TODO(jbudorick): Add support for more test types.
+  'gtest',
+  'instrumentation',
+  'junit',
+  'linker',
+  'monkey',
+]
+
+
+def RunTestsInPlatformMode(args, result_sink_client=None):
+
+  def infra_error(message):
+    logging.fatal(message)
+    sys.exit(constants.INFRA_EXIT_CODE)
+
+  if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+    infra_error('%s is not yet supported in platform mode' % args.command)
+
+  ### Set up sigterm handler.
+
+  contexts_to_notify_on_sigterm = []
+  def unexpected_sigterm(_signum, _frame):
+    msg = [
+      'Received SIGTERM. Shutting down.',
+    ]
+    for live_thread in threading.enumerate():
+      # pylint: disable=protected-access
+      thread_stack = ''.join(traceback.format_stack(
+          sys._current_frames()[live_thread.ident]))
+      msg.extend([
+        'Thread "%s" (ident: %s) is currently running:' % (
+            live_thread.name, live_thread.ident),
+        thread_stack])
+
+    for context in contexts_to_notify_on_sigterm:
+      context.ReceivedSigterm()
+
+    infra_error('\n'.join(msg))
+
+  signal.signal(signal.SIGTERM, unexpected_sigterm)
+
+  ### Set up results handling.
+  # TODO(jbudorick): Rewrite results handling.
+
+  # all_raw_results is a list of lists of
+  # base_test_result.TestRunResults objects. Each instance of
+  # TestRunResults contains all test results produced by a single try,
+  # while each list of TestRunResults contains all tries in a single
+  # iteration.
+  all_raw_results = []
+
+  # all_iteration_results is a list of base_test_result.TestRunResults
+  # objects. Each instance of TestRunResults contains the last test
+  # result for each test run in that iteration.
+  all_iteration_results = []
+
+  global_results_tags = set()
+
+  json_file = tempfile.NamedTemporaryFile(delete=False)
+  json_file.close()
+
+  @contextlib.contextmanager
+  def json_finalizer():
+    try:
+      yield
+    finally:
+      if args.json_results_file and os.path.exists(json_file.name):
+        shutil.move(json_file.name, args.json_results_file)
+      elif args.isolated_script_test_output and os.path.exists(json_file.name):
+        shutil.move(json_file.name, args.isolated_script_test_output)
+      else:
+        os.remove(json_file.name)
+
+  @contextlib.contextmanager
+  def json_writer():
+    try:
+      yield
+    except Exception:
+      global_results_tags.add('UNRELIABLE_RESULTS')
+      raise
+    finally:
+      if args.isolated_script_test_output:
+        interrupted = 'UNRELIABLE_RESULTS' in global_results_tags
+        json_results.GenerateJsonTestResultFormatFile(all_raw_results,
+                                                      interrupted,
+                                                      json_file.name,
+                                                      indent=2)
+      else:
+        json_results.GenerateJsonResultsFile(
+            all_raw_results,
+            json_file.name,
+            global_tags=list(global_results_tags),
+            indent=2)
+
+      test_class_to_file_name_dict = {}
+      # Test Location is only supported for instrumentation tests as it
+      # requires the size-info file.
+      if test_instance.TestType() == 'instrumentation':
+        test_class_to_file_name_dict = _CreateClassToFileNameDict(args.test_apk)
+
+      if result_sink_client:
+        for run in all_raw_results:
+          for results in run:
+            for r in results.GetAll():
+              # Matches chrome.page_info.PageInfoViewTest#testChromePage
+              match = re.search(r'^(.+\..+)#', r.GetName())
+              test_file_name = test_class_to_file_name_dict.get(
+                  match.group(1)) if match else None
+              # Some tests put in non utf-8 char as part of the test
+              # which breaks uploads, so need to decode and re-encode.
+              result_sink_client.Post(
+                  r.GetName(), r.GetType(), r.GetDuration(),
+                  r.GetLog().decode('utf-8', 'replace').encode('utf-8'),
+                  test_file_name)
+
+  @contextlib.contextmanager
+  def upload_logcats_file():
+    try:
+      yield
+    finally:
+      if not args.logcat_output_file:
+        logging.critical('Cannot upload logcat file: no file specified.')
+      elif not os.path.exists(args.logcat_output_file):
+        logging.critical("Cannot upload logcat file: file doesn't exist.")
+      else:
+        with open(args.logcat_output_file) as src:
+          dst = logdog_helper.open_text('unified_logcats')
+          if dst:
+            shutil.copyfileobj(src, dst)
+            dst.close()
+            logging.critical(
+                'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats'))
+
+
+  logcats_uploader = contextlib_ext.Optional(
+      upload_logcats_file(),
+      'upload_logcats_file' in args and args.upload_logcats_file)
+
+  ### Set up test objects.
+
+  out_manager = output_manager_factory.CreateOutputManager(args)
+  env = environment_factory.CreateEnvironment(
+      args, out_manager, infra_error)
+  test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
+  test_run = test_run_factory.CreateTestRun(env, test_instance, infra_error)
+
+  contexts_to_notify_on_sigterm.append(env)
+  contexts_to_notify_on_sigterm.append(test_run)
+
+  ### Run.
+  with out_manager, json_finalizer():
+    with json_writer(), logcats_uploader, env, test_instance, test_run:
+
+      repetitions = (range(args.repeat +
+                           1) if args.repeat >= 0 else itertools.count())
+      result_counts = collections.defaultdict(
+          lambda: collections.defaultdict(int))
+      iteration_count = 0
+      for _ in repetitions:
+        # raw_results will be populated with base_test_result.TestRunResults by
+        # test_run.RunTests(). It is immediately added to all_raw_results so
+        # that in the event of an exception, all_raw_results will already have
+        # the up-to-date results and those can be written to disk.
+        raw_results = []
+        all_raw_results.append(raw_results)
+
+        test_run.RunTests(raw_results)
+        if not raw_results:
+          all_raw_results.pop()
+          continue
+
+        iteration_results = base_test_result.TestRunResults()
+        for r in reversed(raw_results):
+          iteration_results.AddTestRunResults(r)
+        all_iteration_results.append(iteration_results)
+        iteration_count += 1
+
+        for r in iteration_results.GetAll():
+          result_counts[r.GetName()][r.GetType()] += 1
+
+        report_results.LogFull(
+            results=iteration_results,
+            test_type=test_instance.TestType(),
+            test_package=test_run.TestPackage(),
+            annotation=getattr(args, 'annotations', None),
+            flakiness_server=getattr(args, 'flakiness_dashboard_server',
+                                     None))
+        if args.break_on_failure and not iteration_results.DidRunPass():
+          break
+
+      if iteration_count > 1:
+        # display summary results
+        # only display results for a test if at least one test did not pass
+        all_pass = 0
+        tot_tests = 0
+        for test_name in result_counts:
+          tot_tests += 1
+          if any(result_counts[test_name][x] for x in (
+              base_test_result.ResultType.FAIL,
+              base_test_result.ResultType.CRASH,
+              base_test_result.ResultType.TIMEOUT,
+              base_test_result.ResultType.UNKNOWN)):
+            logging.critical(
+                '%s: %s',
+                test_name,
+                ', '.join('%s %s' % (str(result_counts[test_name][i]), i)
+                          for i in base_test_result.ResultType.GetTypes()))
+          else:
+            all_pass += 1
+
+        logging.critical('%s of %s tests passed in all %s runs',
+                         str(all_pass),
+                         str(tot_tests),
+                         str(iteration_count))
+
+    if (args.local_output or not local_utils.IsOnSwarming()
+        ) and not args.isolated_script_test_output:
+      with out_manager.ArchivedTempfile(
+          'test_results_presentation.html',
+          'test_results_presentation',
+          output_manager.Datatype.HTML) as results_detail_file:
+        result_html_string, _, _ = test_results_presentation.result_details(
+            json_path=json_file.name,
+            test_name=args.command,
+            cs_base_url='http://cs.chromium.org',
+            local_output=True)
+        results_detail_file.write(result_html_string.encode('utf-8'))
+        results_detail_file.flush()
+      logging.critical('TEST RESULTS: %s', results_detail_file.Link())
+
+      ui_screenshots = test_results_presentation.ui_screenshot_set(
+          json_file.name)
+      if ui_screenshots:
+        with out_manager.ArchivedTempfile(
+            'ui_screenshots.json',
+            'ui_capture',
+            output_manager.Datatype.JSON) as ui_screenshot_file:
+          ui_screenshot_file.write(ui_screenshots)
+        logging.critical('UI Screenshots: %s', ui_screenshot_file.Link())
+
+  return (0 if all(r.DidRunPass() for r in all_iteration_results)
+          else constants.ERROR_EXIT_CODE)
+
+
+def DumpThreadStacks(_signal, _frame):
+  for thread in threading.enumerate():
+    reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+  signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+  parser = argparse.ArgumentParser()
+  command_parsers = parser.add_subparsers(
+      title='test types', dest='command')
+
+  subp = command_parsers.add_parser(
+      'gtest',
+      help='googletest-based C++ tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddGTestOptions(subp)
+  AddTracingOptions(subp)
+  AddCommandLineOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'instrumentation',
+      help='InstrumentationTestCase-based Java tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddInstrumentationTestOptions(subp)
+  AddSkiaGoldTestOptions(subp)
+  AddTracingOptions(subp)
+  AddCommandLineOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'junit',
+      help='JUnit4-based Java tests')
+  AddCommonOptions(subp)
+  AddJUnitTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'linker',
+      help='linker tests')
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddLinkerTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'monkey',
+      help="tests based on Android's monkey command")
+  AddCommonOptions(subp)
+  AddDeviceOptions(subp)
+  AddEmulatorOptions(subp)
+  AddMonkeyTestOptions(subp)
+
+  subp = command_parsers.add_parser(
+      'python',
+      help='python tests based on unittest.TestCase')
+  AddCommonOptions(subp)
+  AddPythonTestOptions(subp)
+
+  args, unknown_args = parser.parse_known_args()
+  if unknown_args:
+    if hasattr(args, 'allow_unknown') and args.allow_unknown:
+      args.command_line_flags = unknown_args
+    else:
+      parser.error('unrecognized arguments: %s' % ' '.join(unknown_args))
+
+  # --replace-system-package/--remove-system-package has the potential to cause
+  # issues if --enable-concurrent-adb is set, so disallow that combination.
+  concurrent_adb_enabled = (hasattr(args, 'enable_concurrent_adb')
+                            and args.enable_concurrent_adb)
+  replacing_system_packages = (hasattr(args, 'replace_system_package')
+                               and args.replace_system_package)
+  removing_system_packages = (hasattr(args, 'system_packages_to_remove')
+                              and args.system_packages_to_remove)
+  if (concurrent_adb_enabled
+      and (replacing_system_packages or removing_system_packages)):
+    parser.error('--enable-concurrent-adb cannot be used with either '
+                 '--replace-system-package or --remove-system-package')
+
+  # --use-webview-provider has the potential to cause issues if
+  # --enable-concurrent-adb is set, so disallow that combination
+  if (hasattr(args, 'use_webview_provider') and
+      hasattr(args, 'enable_concurrent_adb') and args.use_webview_provider and
+      args.enable_concurrent_adb):
+    parser.error('--use-webview-provider and --enable-concurrent-adb cannot '
+                 'be used together')
+
+  if (getattr(args, 'coverage_on_the_fly', False)
+      and not getattr(args, 'coverage_dir', '')):
+    parser.error('--coverage-on-the-fly requires --coverage-dir')
+
+  if (hasattr(args, 'debug_socket') or
+      (hasattr(args, 'wait_for_java_debugger') and
+      args.wait_for_java_debugger)):
+    args.num_retries = 0
+
+  # Result-sink may not exist in the environment if rdb stream is not enabled.
+  result_sink_client = result_sink.TryInitClient()
+
+  try:
+    return RunTestsCommand(args, result_sink_client)
+  except base_error.BaseError as e:
+    logging.exception('Error occurred.')
+    if e.is_infra_error:
+      return constants.INFRA_EXIT_CODE
+    return constants.ERROR_EXIT_CODE
+  except: # pylint: disable=W0702
+    logging.exception('Unrecognized error occurred.')
+    return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test_runner.pydeps b/src/build/android/test_runner.pydeps
new file mode 100644
index 0000000..660f8f8
--- /dev/null
+++ b/src/build/android/test_runner.pydeps
@@ -0,0 +1,226 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_proto_classes.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_time.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/atexit_with_log.py
+../../third_party/catapult/common/py_utils/py_utils/binary_manager.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/modules_util.py
+../../third_party/catapult/common/py_utils/py_utils/retry_util.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/common/py_utils/py_utils/ts_proxy_server.py
+../../third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/battery_utils.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/crash_handler.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_denylist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_list.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/forwarder.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/ports.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/bundletool.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/device_recovery.py
+../../third_party/catapult/devil/devil/android/tools/device_status.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/android/tools/system_app.py
+../../third_party/catapult/devil/devil/android/tools/webview_app.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/file_utils.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/colorama/src/colorama/__init__.py
+../../third_party/colorama/src/colorama/ansi.py
+../../third_party/colorama/src/colorama/ansitowin32.py
+../../third_party/colorama/src/colorama/initialise.py
+../../third_party/colorama/src/colorama/win32.py
+../../third_party/colorama/src/colorama/winterm.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../../tools/swarming_client/libs/__init__.py
+../../tools/swarming_client/libs/logdog/__init__.py
+../../tools/swarming_client/libs/logdog/bootstrap.py
+../../tools/swarming_client/libs/logdog/stream.py
+../../tools/swarming_client/libs/logdog/streamname.py
+../../tools/swarming_client/libs/logdog/varint.py
+../gn_helpers.py
+../print_python_deps.py
+../skia_gold_common/__init__.py
+../skia_gold_common/skia_gold_properties.py
+../skia_gold_common/skia_gold_session.py
+../skia_gold_common/skia_gold_session_manager.py
+../util/lib/common/chrome_test_server_spawner.py
+../util/lib/common/unittest_util.py
+convert_dex_profile.py
+devil_chromium.py
+gyp/dex.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+gyp/util/zipalign.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/base/__init__.py
+pylib/base/base_test_result.py
+pylib/base/environment.py
+pylib/base/environment_factory.py
+pylib/base/output_manager.py
+pylib/base/output_manager_factory.py
+pylib/base/result_sink.py
+pylib/base/test_collection.py
+pylib/base/test_exception.py
+pylib/base/test_instance.py
+pylib/base/test_instance_factory.py
+pylib/base/test_run.py
+pylib/base/test_run_factory.py
+pylib/base/test_server.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/gtest/__init__.py
+pylib/gtest/gtest_test_instance.py
+pylib/instrumentation/__init__.py
+pylib/instrumentation/instrumentation_parser.py
+pylib/instrumentation/instrumentation_test_instance.py
+pylib/instrumentation/test_result.py
+pylib/junit/__init__.py
+pylib/junit/junit_test_instance.py
+pylib/local/__init__.py
+pylib/local/device/__init__.py
+pylib/local/device/local_device_environment.py
+pylib/local/device/local_device_gtest_run.py
+pylib/local/device/local_device_instrumentation_test_run.py
+pylib/local/device/local_device_monkey_test_run.py
+pylib/local/device/local_device_test_run.py
+pylib/local/emulator/__init__.py
+pylib/local/emulator/avd.py
+pylib/local/emulator/ini.py
+pylib/local/emulator/local_emulator_environment.py
+pylib/local/emulator/proto/__init__.py
+pylib/local/emulator/proto/avd_pb2.py
+pylib/local/local_test_server_spawner.py
+pylib/local/machine/__init__.py
+pylib/local/machine/local_machine_environment.py
+pylib/local/machine/local_machine_junit_test_run.py
+pylib/monkey/__init__.py
+pylib/monkey/monkey_test_instance.py
+pylib/output/__init__.py
+pylib/output/local_output_manager.py
+pylib/output/noop_output_manager.py
+pylib/output/remote_output_manager.py
+pylib/results/__init__.py
+pylib/results/flakiness_dashboard/__init__.py
+pylib/results/flakiness_dashboard/json_results_generator.py
+pylib/results/flakiness_dashboard/results_uploader.py
+pylib/results/json_results.py
+pylib/results/presentation/__init__.py
+pylib/results/presentation/standard_gtest_merge.py
+pylib/results/presentation/test_results_presentation.py
+pylib/results/report_results.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/symbols/stack_symbolizer.py
+pylib/utils/__init__.py
+pylib/utils/chrome_proxy_utils.py
+pylib/utils/decorators.py
+pylib/utils/device_dependencies.py
+pylib/utils/dexdump.py
+pylib/utils/gold_utils.py
+pylib/utils/google_storage_helper.py
+pylib/utils/instrumentation_tracing.py
+pylib/utils/local_utils.py
+pylib/utils/logdog_helper.py
+pylib/utils/logging_utils.py
+pylib/utils/proguard.py
+pylib/utils/repo_utils.py
+pylib/utils/shared_preference_utils.py
+pylib/utils/test_filter.py
+pylib/utils/time_profile.py
+pylib/valgrind_tools.py
+test_runner.py
+tombstones.py
diff --git a/src/build/android/test_wrapper/logdog_wrapper.py b/src/build/android/test_wrapper/logdog_wrapper.py
new file mode 100755
index 0000000..782d5d8
--- /dev/null
+++ b/src/build/android/test_wrapper/logdog_wrapper.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for adding logdog streaming support to swarming tasks."""
+
+import argparse
+import contextlib
+import logging
+import os
+import signal
+import subprocess
+import sys
+
+_SRC_PATH = os.path.abspath(os.path.join(
+    os.path.dirname(__file__), '..', '..', '..'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
+                             'py_utils'))
+
+from devil.utils import signal_handler
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+
+PROJECT = 'chromium'
+OUTPUT = 'logdog'
+COORDINATOR_HOST = 'luci-logdog.appspot.com'
+SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
+                        '/service-account-luci-logdog-publisher.json')
+LOGDOG_TERMINATION_TIMEOUT = 30
+
+
+def CommandParser():
+  # Parses the command line arguments being passed in
+  parser = argparse.ArgumentParser()
+  wrapped = parser.add_mutually_exclusive_group()
+  wrapped.add_argument(
+      '--target',
+      help='The test target to be run. If neither target nor script are set,'
+      ' any extra args passed to this script are assumed to be the'
+      ' full test command to run.')
+  wrapped.add_argument(
+      '--script',
+      help='The script target to be run. If neither target nor script are set,'
+      ' any extra args passed to this script are assumed to be the'
+      ' full test command to run.')
+  parser.add_argument('--logdog-bin-cmd', required=True,
+                      help='The logdog bin cmd.')
+  return parser
+
+
+def CreateStopTestsMethod(proc):
+  def StopTests(signum, _frame):
+    logging.error('Forwarding signal %s to test process', str(signum))
+    proc.send_signal(signum)
+  return StopTests
+
+
+@contextlib.contextmanager
+def NoLeakingProcesses(popen):
+  try:
+    yield popen
+  finally:
+    if popen is not None:
+      try:
+        if popen.poll() is None:
+          popen.kill()
+      except OSError:
+        logging.warning('Failed to kill %s. Process may be leaked.',
+                        str(popen.pid))
+
+
+def main():
+  parser = CommandParser()
+  args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
+
+  logging.basicConfig(level=logging.INFO)
+  if args.target:
+    test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v']
+    test_cmd += extra_cmd_args
+  elif args.script:
+    test_cmd = [args.script]
+    test_cmd += extra_cmd_args
+  else:
+    test_cmd = extra_cmd_args
+
+  test_env = dict(os.environ)
+  logdog_cmd = []
+
+  with tempfile_ext.NamedTemporaryDirectory(
+      prefix='tmp_android_logdog_wrapper') as temp_directory:
+    if not os.path.exists(args.logdog_bin_cmd):
+      logging.error(
+          'Logdog binary %s unavailable. Unable to create logdog client',
+          args.logdog_bin_cmd)
+    else:
+      streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
+                                                  'butler.sock')
+      prefix = os.path.join('android', 'swarming', 'logcats',
+                            os.environ.get('SWARMING_TASK_ID'))
+
+      logdog_cmd = [
+          args.logdog_bin_cmd,
+          '-project', PROJECT,
+          '-output', OUTPUT,
+          '-prefix', prefix,
+          '--service-account-json', SERVICE_ACCOUNT_JSON,
+          '-coordinator-host', COORDINATOR_HOST,
+          'serve',
+          '-streamserver-uri', streamserver_uri]
+      test_env.update({
+          'LOGDOG_STREAM_PROJECT': PROJECT,
+          'LOGDOG_STREAM_PREFIX': prefix,
+          'LOGDOG_STREAM_SERVER_PATH': streamserver_uri,
+          'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST,
+      })
+
+    logdog_proc = None
+    if logdog_cmd:
+      logdog_proc = subprocess.Popen(logdog_cmd)
+
+    with NoLeakingProcesses(logdog_proc):
+      with NoLeakingProcesses(
+          subprocess.Popen(test_cmd, env=test_env)) as test_proc:
+        with signal_handler.SignalHandler(signal.SIGTERM,
+                                          CreateStopTestsMethod(test_proc)):
+          result = test_proc.wait()
+          if logdog_proc:
+            def logdog_stopped():
+              return logdog_proc.poll() is not None
+
+            logdog_proc.terminate()
+            timeout_retry.WaitFor(logdog_stopped, wait_period=1,
+                                  max_tries=LOGDOG_TERMINATION_TIMEOUT)
+
+            # If logdog_proc hasn't finished by this point, allow
+            # NoLeakingProcesses to kill it.
+
+
+  return result
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/test_wrapper/logdog_wrapper.pydeps b/src/build/android/test_wrapper/logdog_wrapper.pydeps
new file mode 100644
index 0000000..0e8d039
--- /dev/null
+++ b/src/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -0,0 +1,12 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/android/test_wrapper --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py
+../../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+logdog_wrapper.py
diff --git a/src/build/android/tests/symbolize/Makefile b/src/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000..4fc53da
--- /dev/null
+++ b/src/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+	$(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/src/build/android/tests/symbolize/a.cc b/src/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000..f0c7ca4
--- /dev/null
+++ b/src/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+  A();
+  void Foo(int i);
+  void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/src/build/android/tests/symbolize/b.cc b/src/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000..db87520
--- /dev/null
+++ b/src/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+  B();
+  void Baz(float f);
+  void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/src/build/android/tests/symbolize/liba.so b/src/build/android/tests/symbolize/liba.so
new file mode 100644
index 0000000..79cb739
--- /dev/null
+++ b/src/build/android/tests/symbolize/liba.so
Binary files differ
diff --git a/src/build/android/tests/symbolize/libb.so b/src/build/android/tests/symbolize/libb.so
new file mode 100644
index 0000000..7cf01d4
--- /dev/null
+++ b/src/build/android/tests/symbolize/libb.so
Binary files differ
diff --git a/src/build/android/tombstones.py b/src/build/android/tombstones.py
new file mode 100755
index 0000000..082e7c1
--- /dev/null
+++ b/src/build/android/tombstones.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import argparse
+import datetime
+import logging
+import os
+import sys
+
+from multiprocessing.pool import ThreadPool
+
+import devil_chromium
+
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.symbols import stack_symbolizer
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+
+def _ListTombstones(device):
+  """List the tombstone files on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Yields:
+    Tuples of (tombstone filename, date time of file on device).
+  """
+  try:
+    if not device.PathExists('/data/tombstones', as_root=True):
+      return
+    entries = device.StatDirectory('/data/tombstones', as_root=True)
+    for entry in entries:
+      if 'tombstone' in entry['filename']:
+        yield (entry['filename'],
+               datetime.datetime.fromtimestamp(entry['st_mtime']))
+  except device_errors.CommandFailedError:
+    logging.exception('Could not retrieve tombstones.')
+  except device_errors.DeviceUnreachableError:
+    logging.exception('Device unreachable retrieving tombstones.')
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timed out retrieving tombstones.')
+
+
+def _GetDeviceDateTime(device):
+  """Determine the date time on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Returns:
+    A datetime instance.
+  """
+  device_now_string = device.RunShellCommand(
+      ['date'], check_return=True, env=_TZ_UTC)
+  return datetime.datetime.strptime(
+      device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+  """Retrieve the tombstone data from the device
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to retrieve
+
+  Returns:
+    A list of lines
+  """
+  return device.ReadFile(
+      '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+  """Deletes a tombstone from the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to delete.
+  """
+  return device.RunShellCommand(
+      ['rm', '/data/tombstones/' + tombstone_file],
+      as_root=True, check_return=True)
+
+
+def _ResolveTombstone(args):
+  tombstone = args[0]
+  tombstone_symbolizer = args[1]
+  lines = []
+  lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+            ', about this long ago: ' +
+            (str(tombstone['device_now'] - tombstone['time']) +
+            ' Device: ' + tombstone['serial'])]
+  logging.info('\n'.join(lines))
+  logging.info('Resolving...')
+  lines += tombstone_symbolizer.ExtractAndResolveNativeStackTraces(
+      tombstone['data'],
+      tombstone['device_abi'],
+      tombstone['stack'])
+  return lines
+
+
+def _ResolveTombstones(jobs, tombstones, tombstone_symbolizer):
+  """Resolve a list of tombstones.
+
+  Args:
+    jobs: the number of jobs to use with multithread.
+    tombstones: a list of tombstones.
+  """
+  if not tombstones:
+    logging.warning('No tombstones to resolve.')
+    return []
+  if len(tombstones) == 1:
+    data = [_ResolveTombstone([tombstones[0], tombstone_symbolizer])]
+  else:
+    pool = ThreadPool(jobs)
+    data = pool.map(
+        _ResolveTombstone,
+        [[tombstone, tombstone_symbolizer] for tombstone in tombstones])
+    pool.close()
+    pool.join()
+  resolved_tombstones = []
+  for tombstone in data:
+    resolved_tombstones.extend(tombstone)
+  return resolved_tombstones
+
+
+def _GetTombstonesForDevice(device, resolve_all_tombstones,
+                            include_stack_symbols,
+                            wipe_tombstones):
+  """Returns a list of tombstones on a given device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    resolve_all_tombstone: Whether to resolve every tombstone.
+    include_stack_symbols: Whether to include symbols for stack data.
+    wipe_tombstones: Whether to wipe tombstones.
+  """
+  ret = []
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones.')
+    return ret
+
+  # Sort the tombstones in date order, descending
+  all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+  # Only resolve the most recent unless --all-tombstones given.
+  tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]]
+
+  device_now = _GetDeviceDateTime(device)
+  try:
+    for tombstone_file, tombstone_time in tombstones:
+      ret += [{'serial': str(device),
+               'device_abi': device.product_cpu_abi,
+               'device_now': device_now,
+               'time': tombstone_time,
+               'file': tombstone_file,
+               'stack': include_stack_symbols,
+               'data': _GetTombstoneData(device, tombstone_file)}]
+  except device_errors.CommandFailedError:
+    for entry in device.StatDirectory(
+        '/data/tombstones', as_root=True, timeout=60):
+      logging.info('%s: %s', str(device), entry)
+    raise
+
+  # Erase all the tombstones if desired.
+  if wipe_tombstones:
+    for tombstone_file, _ in all_tombstones:
+      _EraseTombstone(device, tombstone_file)
+
+  return ret
+
+
+def ClearAllTombstones(device):
+  """Clear all tombstones in the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+  """
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones to clear.')
+
+  for tombstone_file, _ in all_tombstones:
+    _EraseTombstone(device, tombstone_file)
+
+
+def ResolveTombstones(device, resolve_all_tombstones, include_stack_symbols,
+                      wipe_tombstones, jobs=4, apk_under_test=None,
+                      tombstone_symbolizer=None):
+  """Resolve tombstones in the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    resolve_all_tombstone: Whether to resolve every tombstone.
+    include_stack_symbols: Whether to include symbols for stack data.
+    wipe_tombstones: Whether to wipe tombstones.
+    jobs: Number of jobs to use when processing multiple crash stacks.
+
+  Returns:
+    A list of resolved tombstones.
+  """
+  return _ResolveTombstones(jobs,
+                            _GetTombstonesForDevice(device,
+                                                    resolve_all_tombstones,
+                                                    include_stack_symbols,
+                                                    wipe_tombstones),
+                            (tombstone_symbolizer
+                             or stack_symbolizer.Symbolizer(apk_under_test)))
+
+
+def main():
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
+  logging.getLogger().setLevel(logging.INFO)
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--device',
+                      help='The serial number of the device. If not specified '
+                           'will use all devices.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  parser.add_argument('-a', '--all-tombstones', action='store_true',
+                      help='Resolve symbols for all tombstones, rather than '
+                           'just the most recent.')
+  parser.add_argument('-s', '--stack', action='store_true',
+                      help='Also include symbols for stack data')
+  parser.add_argument('-w', '--wipe-tombstones', action='store_true',
+                      help='Erase all tombstones from device after processing')
+  parser.add_argument('-j', '--jobs', type=int,
+                      default=4,
+                      help='Number of jobs to use when processing multiple '
+                           'crash stacks.')
+  parser.add_argument('--output-directory',
+                      help='Path to the root build directory.')
+  parser.add_argument('--adb-path', type=os.path.abspath,
+                      help='Path to the adb binary.')
+  args = parser.parse_args()
+
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+
+  devil_chromium.Initialize(output_directory=constants.GetOutDirectory(),
+                            adb_path=args.adb_path)
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+
+  if args.device:
+    devices = [device_utils.DeviceUtils(args.device)]
+  else:
+    devices = device_utils.DeviceUtils.HealthyDevices(denylist)
+
+  # This must be done serially because strptime can hit a race condition if
+  # used for the first time in a multithreaded environment.
+  # http://bugs.python.org/issue7980
+  for device in devices:
+    resolved_tombstones = ResolveTombstones(
+        device, args.all_tombstones,
+        args.stack, args.wipe_tombstones, args.jobs)
+    for line in resolved_tombstones:
+      logging.info(line)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/update_deps/update_third_party_deps.py b/src/build/android/update_deps/update_third_party_deps.py
new file mode 100755
index 0000000..3a869c4
--- /dev/null
+++ b/src/build/android/update_deps/update_third_party_deps.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Uploads or downloads third party libraries to or from google cloud storage.
+
+This script will only work for Android checkouts.
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+
+sys.path.append(os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(
+    os.path.abspath(
+        os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools')))
+import download_from_google_storage
+import upload_to_google_storage
+
+
+def _AddBasicArguments(parser):
+  parser.add_argument(
+      '--sdk-root', default=constants.ANDROID_SDK_ROOT,
+      help='base path to the Android SDK root')
+  parser.add_argument(
+      '-v', '--verbose', action='store_true', help='print debug information')
+  parser.add_argument(
+      '-b', '--bucket-path', required=True,
+      help='The path of the lib file in Google Cloud Storage.')
+  parser.add_argument(
+      '-l', '--local-path', required=True,
+      help='The base path of the third_party directory')
+
+
+def _CheckPaths(bucket_path, local_path):
+  if bucket_path.startswith('gs://'):
+    bucket_url = bucket_path
+  else:
+    bucket_url = 'gs://%s' % bucket_path
+  local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path)
+  if not os.path.isdir(local_path):
+    raise IOError(
+        'The library local path is not a valid directory: %s' % local_path)
+  return bucket_url, local_path
+
+
+def _CheckFileList(local_path, file_list):
+  local_path = os.path.abspath(local_path)
+  abs_path_list = [os.path.abspath(f) for f in file_list]
+  for f in abs_path_list:
+    if os.path.commonprefix([f, local_path]) != local_path:
+      raise IOError(
+          '%s in the arguments is not descendant of the specified directory %s'
+          % (f, local_path))
+  return abs_path_list
+
+
+def _PurgeSymlinks(local_path):
+  for dirpath, _, filenames in os.walk(local_path):
+    for f in filenames:
+      path = os.path.join(dirpath, f)
+      if os.path.islink(path):
+        os.remove(path)
+
+
+def Upload(arguments):
+  """Upload files in a third_party directory to google storage"""
+  bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+                                       arguments.local_path)
+  file_list = _CheckFileList(local_path, arguments.file_list)
+  return upload_to_google_storage.upload_to_google_storage(
+      input_filenames=file_list,
+      base_url=bucket_url,
+      gsutil=arguments.gsutil,
+      force=False,
+      use_md5=False,
+      num_threads=1,
+      skip_hashing=False,
+      gzip=None)
+
+
+def Download(arguments):
+  """Download files based on sha1 files in a third_party dir from gcs"""
+  bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+                                       arguments.local_path)
+  _PurgeSymlinks(local_path)
+  return download_from_google_storage.download_from_google_storage(
+      local_path,
+      bucket_url,
+      gsutil=arguments.gsutil,
+      num_threads=1,
+      directory=True,
+      recursive=True,
+      force=False,
+      output=None,
+      ignore_errors=False,
+      sha1_file=None,
+      verbose=arguments.verbose,
+      auto_platform=False,
+      extract=False)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  subparsers = parser.add_subparsers(title='commands')
+  download_parser = subparsers.add_parser(
+      'download', help='download the library from the cloud storage')
+  _AddBasicArguments(download_parser)
+  download_parser.set_defaults(func=Download)
+
+  upload_parser = subparsers.add_parser(
+      'upload', help='find all jar files in a third_party directory and ' +
+                     'upload them to cloud storage')
+  _AddBasicArguments(upload_parser)
+  upload_parser.set_defaults(func=Upload)
+  upload_parser.add_argument(
+      '-f', '--file-list', nargs='+', required=True,
+      help='A list of base paths for files in third_party to upload.')
+
+  arguments = parser.parse_args(argv)
+  if not os.path.isdir(arguments.sdk_root):
+    logging.debug('Did not find the Android SDK root directory at "%s".',
+                  arguments.sdk_root)
+    logging.info('Skipping, not on an android checkout.')
+    return 0
+
+  arguments.gsutil = download_from_google_storage.Gsutil(
+      download_from_google_storage.GSUTIL_DEFAULT_PATH)
+  return arguments.func(arguments)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/android/update_verification.py b/src/build/android/update_verification.py
new file mode 100755
index 0000000..3d478f4
--- /dev/null
+++ b/src/build/android/update_verification.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import apk_helper
+from devil.android import device_denylist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+  device.Install(old_apk)
+  raw_input('Set the application state. Once ready, press enter and '
+            'select "Backup my data" on the device.')
+  device.adb.Backup(app_data, packages=[package_name])
+  logging.critical('Application data saved to %s', app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+  device.Install(old_apk)
+  device.adb.Restore(app_data)
+  # Restore command is not synchronous
+  raw_input('Select "Restore my data" on the device. Then press enter to '
+            'continue.')
+  if not device.IsApplicationInstalled(package_name):
+    raise Exception('Expected package %s to already be installed. '
+                    'Package name might have changed!' % package_name)
+
+  logging.info('Verifying that %s can be overinstalled.', new_apk)
+  device.adb.Install(new_apk, reinstall=True)
+  logging.critical('Successfully updated to the new apk. Please verify that '
+                   'the application data is preserved.')
+
+def main():
+  parser = argparse.ArgumentParser(
+      description="Script to do semi-automated upgrade testing.")
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Print verbose log information.')
+  parser.add_argument('--denylist-file', help='Device denylist JSON file.')
+  command_parsers = parser.add_subparsers(dest='command')
+
+  subparser = command_parsers.add_parser('create_app_data')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup should be '
+                           'saved to.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  subparser = command_parsers.add_parser('test_update')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--new-apk', required=True,
+                         help='Path to apk to update to.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup is saved.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  args = parser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devil_chromium.Initialize()
+
+  denylist = (device_denylist.Denylist(args.denylist_file)
+              if args.denylist_file else None)
+
+  devices = device_utils.DeviceUtils.HealthyDevices(denylist)
+  if not devices:
+    raise device_errors.NoDevicesError()
+  device = devices[0]
+  logging.info('Using device %s for testing.', str(device))
+
+  package_name = (args.package_name if args.package_name
+                  else apk_helper.GetPackageName(args.old_apk))
+  if args.command == 'create_app_data':
+    CreateAppData(device, args.old_apk, args.app_data, package_name)
+  elif args.command == 'test_update':
+    TestUpdate(
+        device, args.old_apk, args.new_apk, args.app_data, package_name)
+  else:
+    raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/video_recorder.py b/src/build/android/video_recorder.py
new file mode 100755
index 0000000..6c54e7a
--- /dev/null
+++ b/src/build/android/video_recorder.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env vpython
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import video_recorder
+
+if __name__ == '__main__':
+  devil_chromium.Initialize()
+  sys.exit(video_recorder.main())
diff --git a/src/build/apple/README.md b/src/build/apple/README.md
new file mode 100644
index 0000000..f60185d
--- /dev/null
+++ b/src/build/apple/README.md
@@ -0,0 +1,12 @@
+# About
+
+`//build/apple` contains:
+  * GN templates and configurations shared by Apple platforms
+  * Python build scripts shared by Apple platforms
+
+This directory should only contain templates, configurations and scripts
+that are used exclusively on Apple platforms (currently iOS and macOS).
+They must also be independent of the specific platform.
+
+If a template, configuration or script is limited to only iOS or macOS,
+then they should instead be located in `//build/ios` or `//build/mac`.
diff --git a/src/build/apple/apple_info_plist.gni b/src/build/apple/apple_info_plist.gni
new file mode 100644
index 0000000..fe51773
--- /dev/null
+++ b/src/build/apple/apple_info_plist.gni
@@ -0,0 +1,60 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/apple/compile_plist.gni")
+
+# The base template used to generate Info.plist files for iOS and Mac apps and
+# frameworks.
+#
+# Arguments
+#
+#     plist_templates:
+#         string array, paths to plist files which will be used for the bundle.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     format:
+#         string, the format to `plutil -convert` the plist to when
+#         generating the output.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+#
+#     output_name:
+#         (optional) string, name of the generated plist file, default to
+#         "$target_gen_dir/$target_name.plist".
+template("apple_info_plist") {
+  assert(defined(invoker.executable_name),
+         "The executable_name must be specified for $target_name")
+  executable_name = invoker.executable_name
+
+  compile_plist(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "plist_templates",
+                             "testonly",
+                             "deps",
+                             "visibility",
+                             "format",
+                           ])
+
+    if (defined(invoker.output_name)) {
+      output_name = invoker.output_name
+    } else {
+      output_name = "$target_gen_dir/$target_name.plist"
+    }
+
+    substitutions = [
+      "EXECUTABLE_NAME=$executable_name",
+      "GCC_VERSION=com.apple.compilers.llvm.clang.1_0",
+      "PRODUCT_NAME=$executable_name",
+    ]
+    if (defined(invoker.extra_substitutions)) {
+      substitutions += invoker.extra_substitutions
+    }
+  }
+}
diff --git a/src/build/apple/compile_entitlements.gni b/src/build/apple/compile_entitlements.gni
new file mode 100644
index 0000000..006d5ac
--- /dev/null
+++ b/src/build/apple/compile_entitlements.gni
@@ -0,0 +1,51 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/apple/compile_plist.gni")
+
+# Template to merge multiple .entitlements files performing variable
+# substitutions.
+#
+# Arguments
+#
+#     entitlements_templates:
+#         string array, paths to entitlements files which will be used for the
+#         bundle.
+#
+#     substitutions:
+#         string array, 'key=value' pairs used to replace ${key} by value
+#         when generating the output plist file.
+#
+#     output_name:
+#         string, name of the generated entitlements file.
+template("compile_entitlements") {
+  assert(defined(invoker.entitlements_templates),
+         "A list of template plist files must be specified for $target_name")
+
+  compile_plist(target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "entitlements_templates",
+                             "format",
+                             "plist_templates",
+                           ])
+
+    plist_templates = invoker.entitlements_templates
+
+    # Entitlements files are always encoded in xml1.
+    format = "xml1"
+
+    # Entitlements files use unsubstitued variables, so define substitutions
+    # to leave those variables untouched.
+    if (!defined(substitutions)) {
+      substitutions = []
+    }
+
+    substitutions += [
+      "AppIdentifierPrefix=\$(AppIdentifierPrefix)",
+      "CFBundleIdentifier=\$(CFBundleIdentifier)",
+    ]
+  }
+}
diff --git a/src/build/apple/compile_plist.gni b/src/build/apple/compile_plist.gni
new file mode 100644
index 0000000..90485b6
--- /dev/null
+++ b/src/build/apple/compile_plist.gni
@@ -0,0 +1,76 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Template to merge multiple plist files and perform variable substitutions.
+#
+# Arguments
+#
+#     plist_templates:
+#         string array, paths to plist files which will be used for the bundle.
+#
+#     format:
+#         string, the format to `plutil -convert` the plist to when
+#         generating the output.
+#
+#     substitutions:
+#         string array, 'key=value' pairs used to replace ${key} by value
+#         when generating the output plist file.
+#
+#     output_name:
+#         string, name of the generated plist file.
+template("compile_plist") {
+  assert(defined(invoker.plist_templates),
+         "A list of template plist files must be specified for $target_name")
+  assert(defined(invoker.format),
+         "The plist format must be specified for $target_name")
+  assert(defined(invoker.substitutions),
+         "A list of key=value pairs must be specified for $target_name")
+  assert(defined(invoker.output_name),
+         "The name of the output file must be specified for $target_name")
+
+  _output_name = invoker.output_name
+  _merged_name = get_path_info(_output_name, "dir") + "/" +
+                 get_path_info(_output_name, "name") + "_merged." +
+                 get_path_info(_output_name, "extension")
+
+  _merge_target = target_name + "_merge"
+
+  action(_merge_target) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "testonly",
+                           ])
+
+    script = "//build/apple/plist_util.py"
+    sources = invoker.plist_templates
+    outputs = [ _merged_name ]
+    args = [
+             "merge",
+             "-f=" + invoker.format,
+             "-o=" + rebase_path(_merged_name, root_build_dir),
+           ] + rebase_path(invoker.plist_templates, root_build_dir)
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    script = "//build/apple/plist_util.py"
+    sources = [ _merged_name ]
+    outputs = [ _output_name ]
+    args = [
+      "substitute",
+      "-f=" + invoker.format,
+      "-o=" + rebase_path(_output_name, root_build_dir),
+      "-t=" + rebase_path(_merged_name, root_build_dir),
+    ]
+    foreach(_substitution, invoker.substitutions) {
+      args += [ "-s=$_substitution" ]
+    }
+    deps = [ ":$_merge_target" ]
+  }
+}
diff --git a/src/build/apple/convert_plist.gni b/src/build/apple/convert_plist.gni
new file mode 100644
index 0000000..a1134d9
--- /dev/null
+++ b/src/build/apple/convert_plist.gni
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Convert plist file to given format.
+#
+# Arguments
+#
+#   source:
+#     string, path to the plist file to convert
+#
+#   output:
+#     string, path to the converted plist, must be under $root_build_dir
+#
+#   format:
+#     string, the format to convert the plist to. Either "binary1" or "xml1".
+template("convert_plist") {
+  assert(defined(invoker.source), "source must be defined for $target_name")
+  assert(defined(invoker.output), "output must be defined for $target_name")
+  assert(defined(invoker.format), "format must be defined for $target_name")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "visibility",
+                             "testonly",
+                             "deps",
+                           ])
+
+    script = "//build/apple/plist_util.py"
+    sources = [ invoker.source ]
+    outputs = [ invoker.output ]
+    args = [
+      "merge",
+      "--format=${invoker.format}",
+      "-o",
+      rebase_path(invoker.output, root_build_dir),
+      rebase_path(invoker.source, root_build_dir),
+    ]
+  }
+}
diff --git a/src/build/apple/plist_util.py b/src/build/apple/plist_util.py
new file mode 100644
index 0000000..54cf461
--- /dev/null
+++ b/src/build/apple/plist_util.py
@@ -0,0 +1,265 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import codecs
+import plistlib
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import shlex
+
+if sys.version_info.major < 3:
+  basestring_compat = basestring
+else:
+  basestring_compat = str
+
+# Xcode substitutes variables like ${PRODUCT_NAME} or $(PRODUCT_NAME) when
+# compiling Info.plist. It also supports supports modifiers like :identifier
+# or :rfc1034identifier. SUBSTITUTION_REGEXP_LIST is a list of regular
+# expressions matching a variable substitution pattern with an optional
+# modifier, while INVALID_CHARACTER_REGEXP matches all characters that are
+# not valid in an "identifier" value (used when applying the modifier).
+INVALID_CHARACTER_REGEXP = re.compile(r'[_/\s]')
+SUBSTITUTION_REGEXP_LIST = (
+    re.compile(r'\$\{(?P<id>[^}]*?)(?P<modifier>:[^}]*)?\}'),
+    re.compile(r'\$\((?P<id>[^}]*?)(?P<modifier>:[^}]*)?\)'),
+)
+
+
+class SubstitutionError(Exception):
+  def __init__(self, key):
+    super(SubstitutionError, self).__init__()
+    self.key = key
+
+  def __str__(self):
+    return "SubstitutionError: {}".format(self.key)
+
+
+def InterpolateString(value, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    value: a string
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new string with all variables references ${VARIABLES} replaced by their
+    value in |substitutions|. Raises SubstitutionError if a variable has no
+    substitution.
+  """
+
+  def repl(match):
+    variable = match.group('id')
+    if variable not in substitutions:
+      raise SubstitutionError(variable)
+    # Some values need to be identifier and thus the variables references may
+    # contains :modifier attributes to indicate how they should be converted
+    # to identifiers ("identifier" replaces all invalid characters by '_' and
+    # "rfc1034identifier" replaces them by "-" to make valid URI too).
+    modifier = match.group('modifier')
+    if modifier == ':identifier':
+      return INVALID_CHARACTER_REGEXP.sub('_', substitutions[variable])
+    elif modifier == ':rfc1034identifier':
+      return INVALID_CHARACTER_REGEXP.sub('-', substitutions[variable])
+    else:
+      return substitutions[variable]
+
+  for substitution_regexp in SUBSTITUTION_REGEXP_LIST:
+    value = substitution_regexp.sub(repl, value)
+  return value
+
+
+def Interpolate(value, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    value: a value, can be a dictionary, list, string or other
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new value with all variables references ${VARIABLES} replaced by their
+    value in |substitutions|. Raises SubstitutionError if a variable has no
+    substitution.
+  """
+  if isinstance(value, dict):
+    return {k: Interpolate(v, substitutions) for k, v in value.items()}
+  if isinstance(value, list):
+    return [Interpolate(v, substitutions) for v in value]
+  if isinstance(value, basestring_compat):
+    return InterpolateString(value, substitutions)
+  return value
+
+
+def LoadPList(path):
+  """Loads Plist at |path| and returns it as a dictionary."""
+  if sys.version_info.major == 2:
+    fd, name = tempfile.mkstemp()
+    try:
+      subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
+      with os.fdopen(fd, 'rb') as f:
+        return plistlib.readPlist(f)
+    finally:
+      os.unlink(name)
+  else:
+    with open(path, 'rb') as f:
+      return plistlib.load(f)
+
+
+def SavePList(path, format, data):
+  """Saves |data| as a Plist to |path| in the specified |format|."""
+  # The below does not replace the destination file but update it in place,
+  # so if more than one hardlink points to destination all of them will be
+  # modified. This is not what is expected, so delete destination file if
+  # it does exist.
+  if os.path.exists(path):
+    os.unlink(path)
+  if sys.version_info.major == 2:
+    fd, name = tempfile.mkstemp()
+    try:
+      with os.fdopen(fd, 'wb') as f:
+        plistlib.writePlist(data, f)
+      subprocess.check_call(['plutil', '-convert', format, '-o', path, name])
+    finally:
+      os.unlink(name)
+  else:
+    with open(path, 'wb') as f:
+      plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML}
+      plistlib.dump(data, f, fmt=plist_format[format])
+
+
+def MergePList(plist1, plist2):
+  """Merges |plist1| with |plist2| recursively.
+
+  Creates a new dictionary representing a Property List (.plist) files by
+  merging the two dictionary |plist1| and |plist2| recursively (only for
+  dictionary values). List value will be concatenated.
+
+  Args:
+    plist1: a dictionary representing a Property List (.plist) file
+    plist2: a dictionary representing a Property List (.plist) file
+
+  Returns:
+    A new dictionary representing a Property List (.plist) file by merging
+    |plist1| with |plist2|. If any value is a dictionary, they are merged
+    recursively, otherwise |plist2| value is used. If values are list, they
+    are concatenated.
+  """
+  result = plist1.copy()
+  for key, value in plist2.items():
+    if isinstance(value, dict):
+      old_value = result.get(key)
+      if isinstance(old_value, dict):
+        value = MergePList(old_value, value)
+    if isinstance(value, list):
+      value = plist1.get(key, []) + plist2.get(key, [])
+    result[key] = value
+  return result
+
+
+class Action(object):
+  """Class implementing one action supported by the script."""
+
+  @classmethod
+  def Register(cls, subparsers):
+    parser = subparsers.add_parser(cls.name, help=cls.help)
+    parser.set_defaults(func=cls._Execute)
+    cls._Register(parser)
+
+
+class MergeAction(Action):
+  """Class to merge multiple plist files."""
+
+  name = 'merge'
+  help = 'merge multiple plist files'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument('-o',
+                        '--output',
+                        required=True,
+                        help='path to the output plist file')
+    parser.add_argument('-f',
+                        '--format',
+                        required=True,
+                        choices=('xml1', 'binary1'),
+                        help='format of the plist file to generate')
+    parser.add_argument(
+        '-x',
+        '--xcode-version',
+        help='version of Xcode, ignored (can be used to force rebuild)')
+    parser.add_argument('path', nargs="+", help='path to plist files to merge')
+
+  @staticmethod
+  def _Execute(args):
+    data = {}
+    for filename in args.path:
+      data = MergePList(data, LoadPList(filename))
+    SavePList(args.output, args.format, data)
+
+
+class SubstituteAction(Action):
+  """Class implementing the variable substitution in a plist file."""
+
+  name = 'substitute'
+  help = 'perform pattern substitution in a plist file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument('-o',
+                        '--output',
+                        required=True,
+                        help='path to the output plist file')
+    parser.add_argument('-t',
+                        '--template',
+                        required=True,
+                        help='path to the template file')
+    parser.add_argument('-s',
+                        '--substitution',
+                        action='append',
+                        default=[],
+                        help='substitution rule in the format key=value')
+    parser.add_argument('-f',
+                        '--format',
+                        required=True,
+                        choices=('xml1', 'binary1'),
+                        help='format of the plist file to generate')
+    parser.add_argument(
+        '-x',
+        '--xcode-version',
+        help='version of Xcode, ignored (can be used to force rebuild)')
+
+  @staticmethod
+  def _Execute(args):
+    substitutions = {}
+    for substitution in args.substitution:
+      key, value = substitution.split('=', 1)
+      substitutions[key] = value
+    data = Interpolate(LoadPList(args.template), substitutions)
+    SavePList(args.output, args.format, data)
+
+
+def Main():
+  # Cache this codec so that plistlib can find it. See
+  # https://crbug.com/1005190#c2 for more details.
+  codecs.lookup('utf-8')
+
+  parser = argparse.ArgumentParser(description='manipulate plist files')
+  subparsers = parser.add_subparsers()
+
+  for action in [MergeAction, SubstituteAction]:
+    action.Register(subparsers)
+
+  args = parser.parse_args()
+  args.func(args)
+
+
+if __name__ == '__main__':
+  # TODO(https://crbug.com/941669): Temporary workaround until all scripts use
+  # python3 by default.
+  if sys.version_info[0] < 3:
+    os.execvp('python3', ['python3'] + sys.argv)
+  sys.exit(Main())
diff --git a/src/build/apple/tweak_info_plist.gni b/src/build/apple/tweak_info_plist.gni
new file mode 100644
index 0000000..33f22ca
--- /dev/null
+++ b/src/build/apple/tweak_info_plist.gni
@@ -0,0 +1,86 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/lastchange.gni")
+
+# Template to run the tweak_info_plist.py script on a plist.
+#
+# Arguments:
+#
+#     info_plist:
+#         (optional), string, the plist to tweak.
+#
+#     info_plists:
+#         (optional), list of string, the plist files to merge and tweak.
+#
+#     args:
+#         (optional), list of string, the arguments to pass to the
+#         tweak_info_plist.py script.
+#
+# Callers should use get_target_outputs() to get the output name. One of
+# info_plist or info_plists must be specified.
+template("tweak_info_plist") {
+  _output_name = "$target_gen_dir/${target_name}_tweaked.plist"
+
+  if (defined(invoker.info_plists)) {
+    assert(!defined(invoker.info_plist),
+           "Cannot have both info_plist and info_plists for $target_name")
+
+    _source_name = "$target_gen_dir/${target_name}_merged.plist"
+    _deps = [ ":" + target_name + "_merge_plist" ]
+
+    action(target_name + "_merge_plist") {
+      forward_variables_from(invoker,
+                             [
+                               "testonly",
+                               "deps",
+                             ])
+      script = "//build/apple/plist_util.py"
+      sources = invoker.info_plists
+      outputs = [ _source_name ]
+      args = [
+               "merge",
+               "-f=xml1",
+               "-o=" + rebase_path(_source_name, root_build_dir),
+             ] + rebase_path(invoker.info_plists, root_build_dir)
+    }
+  } else {
+    assert(defined(invoker.info_plist),
+           "The info_plist must be specified in $target_name")
+
+    _source_name = invoker.info_plist
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps += invoker.deps
+    }
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "args",
+                             "testonly",
+                           ])
+    script = "//build/apple/tweak_info_plist.py"
+    inputs = [
+      script,
+      "//build/util/version.py",
+      lastchange_file,
+      "//chrome/VERSION",
+    ]
+    sources = [ _source_name ]
+    outputs = [ _output_name ]
+    if (!defined(args)) {
+      args = []
+    }
+    args += [
+      "--plist",
+      rebase_path(_source_name, root_build_dir),
+      "--output",
+      rebase_path(_output_name, root_build_dir),
+      "--platform=$current_os",
+    ]
+    deps = _deps
+  }
+}
diff --git a/src/build/apple/tweak_info_plist.py b/src/build/apple/tweak_info_plist.py
new file mode 100755
index 0000000..76f64dc
--- /dev/null
+++ b/src/build/apple/tweak_info_plist.py
@@ -0,0 +1,447 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+#    we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+#    the Info.plist has changed.  So even if we updated it, it's only looking
+#    at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place.  This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+from __future__ import print_function
+
+import optparse
+import os
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
+
+def _ConvertPlist(source_plist, output_plist, fmt):
+  """Convert |source_plist| to |fmt| and save as |output_plist|."""
+  assert sys.version_info.major == 2, "Use plistlib directly in Python 3"
+  return subprocess.call(
+      ['plutil', '-convert', fmt, '-o', output_plist, source_plist])
+
+
+def _GetOutput(args):
+  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+  of the process. stderr is attached to the parent."""
+  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  stdout, _ = proc.communicate()
+  return stdout.decode('UTF-8'), proc.returncode
+
+
+def _RemoveKeys(plist, *keys):
+  """Removes a varargs of keys from the plist."""
+  for key in keys:
+    try:
+      del plist[key]
+    except KeyError:
+      pass
+
+
+def _ApplyVersionOverrides(version, keys, overrides, separator='.'):
+  """Applies version overrides.
+
+  Given a |version| string as "a.b.c.d" (assuming a default separator) with
+  version components named by |keys| then overrides any value that is present
+  in |overrides|.
+
+  >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'})
+  'a.d'
+  """
+  if not overrides:
+    return version
+  version_values = version.split(separator)
+  for i, (key, value) in enumerate(zip(keys, version_values)):
+    if key in overrides:
+      version_values[i] = overrides[key]
+  return separator.join(version_values)
+
+
+def _GetVersion(version_format, values, overrides=None):
+  """Generates a version number according to |version_format| using the values
+  from |values| or |overrides| if given."""
+  result = version_format
+  for key in values:
+    if overrides and key in overrides:
+      value = overrides[key]
+    else:
+      value = values[key]
+    result = result.replace('@%s@' % key, value)
+  return result
+
+
+def _AddVersionKeys(plist, version_format_for_key, version=None,
+                    overrides=None):
+  """Adds the product version number into the plist. Returns True on success and
+  False on error. The error will be printed to stderr."""
+  if not version:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+    (stdout, retval) = _GetOutput([
+        VERSION_TOOL, '-f', VERSION_FILE, '-t',
+        '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+    ])
+
+    # If the command finished with a non-zero return code, then report the
+    # error up.
+    if retval != 0:
+      return False
+
+    version = stdout.strip()
+
+  # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH
+  # format (where each value is a number). Note that str.isdigit() returns
+  # True if the string is composed only of digits (and thus match \d+ regexp).
+  groups = version.split('.')
+  if len(groups) != 4 or not all(element.isdigit() for element in groups):
+    print('Invalid version string specified: "%s"' % version, file=sys.stderr)
+    return False
+  values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups))
+
+  for key in version_format_for_key:
+    plist[key] = _GetVersion(version_format_for_key[key], values, overrides)
+
+  # Return with no error.
+  return True
+
+
+def _DoSCMKeys(plist, add_keys):
+  """Adds the SCM information, visible in about:version, to property list. If
+  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+  scm_revision = None
+  if add_keys:
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput(
+        [VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t', '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
+
+  # See if the operation failed.
+  _RemoveKeys(plist, 'SCMRevision')
+  if scm_revision != None:
+    plist['SCMRevision'] = scm_revision
+  elif add_keys:
+    print('Could not determine SCM revision.  This may be OK.', file=sys.stderr)
+
+  return True
+
+
+def _AddBreakpadKeys(plist, branding, platform, staging):
+  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |branding| argument."""
+  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
+  plist['BreakpadProduct'] = '%s_%s' % (branding, platform)
+  plist['BreakpadProductDisplay'] = branding
+  if staging:
+    plist['BreakpadURL'] = 'https://clients2.google.com/cr/staging_report'
+  else:
+    plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+
+  # These are both deliberately strings and not boolean.
+  plist['BreakpadSendAndExit'] = 'YES'
+  plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+  """Removes any set Breakpad keys."""
+  _RemoveKeys(plist, 'BreakpadURL', 'BreakpadReportInterval', 'BreakpadProduct',
+              'BreakpadProductDisplay', 'BreakpadVersion',
+              'BreakpadSendAndExit', 'BreakpadSkipConfirm')
+
+
+def _TagSuffixes():
+  # Keep this list sorted in the order that tag suffix components are to
+  # appear in a tag value. That is to say, it should be sorted per ASCII.
+  components = ('full', )
+  assert tuple(sorted(components)) == components
+
+  components_len = len(components)
+  combinations = 1 << components_len
+  tag_suffixes = []
+  for combination in range(0, combinations):
+    tag_suffix = ''
+    for component_index in range(0, components_len):
+      if combination & (1 << component_index):
+        tag_suffix += '-' + components[component_index]
+    tag_suffixes.append(tag_suffix)
+  return tag_suffixes
+
+
+def _AddKeystoneKeys(plist, bundle_identifier, base_tag):
+  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |bundle_identifier| argument (com.example.product)."""
+  plist['KSVersion'] = plist['CFBundleShortVersionString']
+  plist['KSProductID'] = bundle_identifier
+  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+  _RemoveKeys(plist, 'KSChannelID')
+  if base_tag != '':
+    plist['KSChannelID'] = base_tag
+  for tag_suffix in _TagSuffixes():
+    if tag_suffix:
+      plist['KSChannelID' + tag_suffix] = base_tag + tag_suffix
+
+
+def _RemoveKeystoneKeys(plist):
+  """Removes any set Keystone keys."""
+  _RemoveKeys(plist, 'KSVersion', 'KSProductID', 'KSUpdateURL')
+
+  tag_keys = ['KSChannelID']
+  for tag_suffix in _TagSuffixes():
+    tag_keys.append('KSChannelID' + tag_suffix)
+  _RemoveKeys(plist, *tag_keys)
+
+
+def _AddGTMKeys(plist, platform):
+  """Adds the GTM metadata keys. This must be called AFTER _AddVersionKeys()."""
+  plist['GTMUserAgentID'] = plist['CFBundleName']
+  if platform == 'ios':
+    plist['GTMUserAgentVersion'] = plist['CFBundleVersion']
+  else:
+    plist['GTMUserAgentVersion'] = plist['CFBundleShortVersionString']
+
+
+def _RemoveGTMKeys(plist):
+  """Removes any set GTM metadata keys."""
+  _RemoveKeys(plist, 'GTMUserAgentID', 'GTMUserAgentVersion')
+
+
+def Main(argv):
+  parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--plist',
+                    dest='plist_path',
+                    action='store',
+                    type='string',
+                    default=None,
+                    help='The path of the plist to tweak.')
+  parser.add_option('--output', dest='plist_output', action='store',
+      type='string', default=None, help='If specified, the path to output ' + \
+      'the tweaked plist, rather than overwriting the input.')
+  parser.add_option('--breakpad',
+                    dest='use_breakpad',
+                    action='store',
+                    type='int',
+                    default=False,
+                    help='Enable Breakpad [1 or 0]')
+  parser.add_option(
+      '--breakpad_staging',
+      dest='use_breakpad_staging',
+      action='store_true',
+      default=False,
+      help='Use staging breakpad to upload reports. Ignored if --breakpad=0.')
+  parser.add_option('--keystone',
+                    dest='use_keystone',
+                    action='store',
+                    type='int',
+                    default=False,
+                    help='Enable Keystone [1 or 0]')
+  parser.add_option('--keystone-base-tag',
+                    default='',
+                    help='Base Keystone tag to set')
+  parser.add_option('--scm',
+                    dest='add_scm_info',
+                    action='store',
+                    type='int',
+                    default=True,
+                    help='Add SCM metadata [1 or 0]')
+  parser.add_option('--branding',
+                    dest='branding',
+                    action='store',
+                    type='string',
+                    default=None,
+                    help='The branding of the binary')
+  parser.add_option('--bundle_id',
+                    dest='bundle_identifier',
+                    action='store',
+                    type='string',
+                    default=None,
+                    help='The bundle id of the binary')
+  parser.add_option('--platform',
+                    choices=('ios', 'mac'),
+                    default='mac',
+                    help='The target platform of the bundle')
+  parser.add_option('--add-gtm-metadata',
+                    dest='add_gtm_info',
+                    action='store',
+                    type='int',
+                    default=False,
+                    help='Add GTM metadata [1 or 0]')
+  # TODO(crbug.com/1140474): Remove once iOS 14.2 reaches mass adoption.
+  parser.add_option('--lock-to-version',
+                    help='Set CFBundleVersion to given value + @MAJOR@@PATH@')
+  parser.add_option(
+      '--version-overrides',
+      action='append',
+      help='Key-value pair to override specific component of version '
+      'like key=value (can be passed multiple time to configure '
+      'more than one override)')
+  parser.add_option('--format',
+                    choices=('binary1', 'xml1'),
+                    default='xml1',
+                    help='Format to use when writing property list '
+                    '(default: %(default)s)')
+  parser.add_option('--version',
+                    dest='version',
+                    action='store',
+                    type='string',
+                    default=None,
+                    help='The version string [major.minor.build.patch]')
+  (options, args) = parser.parse_args(argv)
+
+  if len(args) > 0:
+    print(parser.get_usage(), file=sys.stderr)
+    return 1
+
+  if not options.plist_path:
+    print('No --plist specified.', file=sys.stderr)
+    return 1
+
+  # Read the plist into its parsed format. Convert the file to 'xml1' as
+  # plistlib only supports that format in Python 2.7.
+  with tempfile.NamedTemporaryFile() as temp_info_plist:
+    if sys.version_info.major == 2:
+      retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1')
+      if retcode != 0:
+        return retcode
+      plist = plistlib.readPlist(temp_info_plist.name)
+    else:
+      with open(options.plist_path, 'rb') as f:
+        plist = plistlib.load(f)
+
+  # Convert overrides.
+  overrides = {}
+  if options.version_overrides:
+    for pair in options.version_overrides:
+      if not '=' in pair:
+        print('Invalid value for --version-overrides:', pair, file=sys.stderr)
+        return 1
+      key, value = pair.split('=', 1)
+      overrides[key] = value
+      if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'):
+        print('Unsupported key for --version-overrides:', key, file=sys.stderr)
+        return 1
+
+  if options.platform == 'mac':
+    version_format_for_key = {
+        # Add public version info so "Get Info" works.
+        'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@',
+
+        # Honor the 429496.72.95 limit.  The maximum comes from splitting
+        # 2^32 - 1 into  6, 2, 2 digits.  The limitation was present in Tiger,
+        # but it could have been fixed in later OS release, but hasn't been
+        # tested (it's easy enough to find out with "lsregister -dump).
+        # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+        # BUILD will always be an increasing value, so BUILD_PATH gives us
+        # something unique that meetings what LS wants.
+        'CFBundleVersion': '@BUILD@.@PATCH@',
+    }
+  else:
+    # TODO(crbug.com/1140474): Remove once iOS 14.2 reaches mass adoption.
+    if options.lock_to_version:
+      # Pull in the PATCH number and format it to 3 digits.
+      VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+      VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+      (stdout,
+       retval) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t', '@PATCH@'])
+      if retval != 0:
+        return 2
+      patch = '{:03d}'.format(int(stdout))
+      version_format_for_key = {
+          'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@',
+          'CFBundleVersion': options.lock_to_version + '.@MAJOR@' + patch
+      }
+    else:
+      version_format_for_key = {
+          'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@',
+          'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+      }
+
+  if options.use_breakpad:
+    version_format_for_key['BreakpadVersion'] = \
+        '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+
+  # Insert the product version.
+  if not _AddVersionKeys(plist,
+                         version_format_for_key,
+                         version=options.version,
+                         overrides=overrides):
+    return 2
+
+  # Add Breakpad if configured to do so.
+  if options.use_breakpad:
+    if options.branding is None:
+      print('Use of Breakpad requires branding.', file=sys.stderr)
+      return 1
+    # Map "target_os" passed from gn via the --platform parameter
+    # to the platform as known by breakpad.
+    platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform]
+    _AddBreakpadKeys(plist, options.branding, platform,
+                     options.use_breakpad_staging)
+  else:
+    _RemoveBreakpadKeys(plist)
+
+  # Add Keystone if configured to do so.
+  if options.use_keystone:
+    if options.bundle_identifier is None:
+      print('Use of Keystone requires the bundle id.', file=sys.stderr)
+      return 1
+    _AddKeystoneKeys(plist, options.bundle_identifier,
+                     options.keystone_base_tag)
+  else:
+    _RemoveKeystoneKeys(plist)
+
+  # Adds or removes any SCM keys.
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
+
+  # Add GTM metadata keys.
+  if options.add_gtm_info:
+    _AddGTMKeys(plist, options.platform)
+  else:
+    _RemoveGTMKeys(plist)
+
+  output_path = options.plist_path
+  if options.plist_output is not None:
+    output_path = options.plist_output
+
+  # Now that all keys have been mutated, rewrite the file.
+  # Convert Info.plist to the format requested by the --format flag. Any
+  # format would work on Mac but iOS requires specific format.
+  if sys.version_info.major == 2:
+    with tempfile.NamedTemporaryFile() as temp_info_plist:
+      plistlib.writePlist(plist, temp_info_plist.name)
+      return _ConvertPlist(temp_info_plist.name, output_path, options.format)
+  with open(output_path, 'wb') as f:
+    plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML}
+    plistlib.dump(plist, f, fmt=plist_format[options.format])
+
+
+if __name__ == '__main__':
+  # TODO(https://crbug.com/941669): Temporary workaround until all scripts use
+  # python3 by default.
+  if sys.version_info[0] < 3:
+    os.execvp('python3', ['python3'] + sys.argv)
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/apple/write_pkg_info.py b/src/build/apple/write_pkg_info.py
new file mode 100644
index 0000000..8d07cdb
--- /dev/null
+++ b/src/build/apple/write_pkg_info.py
@@ -0,0 +1,54 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import plist_util
+import sys
+
+# This script creates a PkgInfo file for an OS X .app bundle's plist.
+# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \
+#           --output Foo.app/Contents/PkgInfo
+
+
+def Main():
+  parser = argparse.ArgumentParser(
+      description='A script to write PkgInfo files for .app bundles.')
+  parser.add_argument('--plist',
+                      required=True,
+                      help='Path to the Info.plist for the .app.')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Path to the desired output file.')
+  args = parser.parse_args()
+
+  # Remove the output if it exists already.
+  if os.path.exists(args.output):
+    os.unlink(args.output)
+
+  plist = plist_util.LoadPList(args.plist)
+  package_type = plist['CFBundlePackageType']
+  if package_type != 'APPL':
+    raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \
+        ('APPL', package_type))
+
+  # The format of PkgInfo is eight characters, representing the bundle type
+  # and bundle signature, each four characters. If that is missing, four
+  # '?' characters are used instead.
+  signature_code = plist.get('CFBundleSignature', '????')
+  if len(signature_code) != 4:
+    raise ValueError('CFBundleSignature should be exactly four characters, ' +
+                     'got %s' % signature_code)
+
+  with open(args.output, 'w') as fp:
+    fp.write('%s%s' % (package_type, signature_code))
+  return 0
+
+
+if __name__ == '__main__':
+  # TODO(https://crbug.com/941669): Temporary workaround until all scripts use
+  # python3 by default.
+  if sys.version_info[0] < 3:
+    os.execvp('python3', ['python3'] + sys.argv)
+  sys.exit(Main())
diff --git a/src/build/apple/xcrun.py b/src/build/apple/xcrun.py
new file mode 100755
index 0000000..71bf50c
--- /dev/null
+++ b/src/build/apple/xcrun.py
@@ -0,0 +1,52 @@
+#!/usr/bin/python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Wrapper around xcrun adding support for --developer-dir parameter to set
+the DEVELOPER_DIR environment variable, and for converting paths relative
+to absolute (since this is required by most of the tool run via xcrun).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+def xcrun(command, developer_dir):
+  environ = dict(os.environ)
+  if developer_dir:
+    environ['DEVELOPER_DIR'] = os.path.abspath(developer_dir)
+
+  processed_args = ['/usr/bin/xcrun']
+  for arg in command:
+    if os.path.exists(arg):
+      arg = os.path.abspath(arg)
+    processed_args.append(arg)
+
+  process = subprocess.Popen(processed_args,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE,
+                             universal_newlines=True,
+                             env=environ)
+
+  stdout, stderr = process.communicate()
+  sys.stdout.write(stdout)
+  if process.returncode:
+    sys.stderr.write(stderr)
+    sys.exit(process.returncode)
+
+
+def main(args):
+  parser = argparse.ArgumentParser(add_help=False)
+  parser.add_argument(
+      '--developer-dir',
+      help='path to developer dir to use for the invocation of xcrun')
+
+  parsed, remaining_args = parser.parse_known_args(args)
+  xcrun(remaining_args, parsed.developer_dir)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/args/README.txt b/src/build/args/README.txt
new file mode 100644
index 0000000..825bf64
--- /dev/null
+++ b/src/build/args/README.txt
@@ -0,0 +1,31 @@
+This directory is here to hold .gni files that contain sets of GN build
+arguments for given configurations.
+
+(Currently this directory is empty because we removed the only thing here, but
+this has come up several times so I'm confident we'll need this again. If this
+directory is still empty by 2017, feel free to delete it. --Brett)
+
+Some projects or bots may have build configurations with specific combinations
+of flags. Rather than making a new global flag for your specific project and
+adding it all over the build to each arg it should affect, you can add a .gni
+file here with the variables.
+
+For example, for project foo you may put in build/args/foo.gni:
+
+  target_os = "android"
+  use_pulseaudio = false
+  use_ozone = true
+  system_libdir = "foo"
+
+Users wanting to build this configuration would run:
+
+  $ gn args out/mybuild
+
+And add the following line to their args for that build directory:
+
+  import("//build/args/foo.gni")
+  # You can set any other args here like normal.
+  is_component_build = false
+
+This way everybody can agree on a set of flags for a project, and their builds
+stay in sync as the flags in foo.gni are modified.
diff --git a/src/build/args/chromeos/README.md b/src/build/args/chromeos/README.md
new file mode 100644
index 0000000..e02e185
--- /dev/null
+++ b/src/build/args/chromeos/README.md
@@ -0,0 +1,52 @@
+This directory is used to store GN arg mapping for Chrome OS boards.
+
+Files in this directory are populated by running `gclient sync` with specific
+arguments set in the .gclient file. Specifically:
+* The file must have a top-level variable set: `target_os = ["chromeos"]`
+* The `"custom_vars"` parameter of the chromium/src.git solution must include
+  the parameter: `"cros_boards": "{BOARD_NAMES}"` where `{BOARD_NAMES}` is a
+  colon-separated list of boards you'd like to checkout.
+* If you'd like to a checkout a QEMU-bootable image for a given board, include
+  it in the `cros_boards_with_qemu_images` var rather than the `cros_boards`
+  var.
+
+A typical .gclient file is a sibling of the src/ directory, and might look like
+this:
+```
+solutions = [
+  {
+    "url": "https://chromium.googlesource.com/chromium/src.git",
+    "managed": False,
+    "name": "src",
+    "custom_deps": {},
+    "custom_vars" : {
+        "checkout_src_internal": True,
+        "cros_boards": "eve:kevin",
+        # If a QEMU-bootable image is desired for any board, move it from
+        # the previous var to the following:
+        "cros_boards_with_qemu_images": "amd64-generic",
+    },
+  },
+]
+target_os = ["chromeos"]
+```
+
+To use these files in a build, simply add the following line to your GN args:
+```
+import("//build/args/chromeos/${some_board}.gni")
+```
+
+That will produce a Chrome OS build of Chrome very similar to what is shipped
+for that device. You can also supply additional args or even overwrite ones
+supplied in the .gni file after the `import()` line. For example, the following
+args will produce a debug build of Chrome for board=eve using goma:
+```
+import("//build/args/chromeos/eve.gni")
+
+is_debug = true
+use_goma = true
+goma_dir = "/path/to/goma/"
+```
+
+TODO(bpastene): Make 'cros_boards' a first class citizen in gclient and replace
+it with 'target_boards' instead.
diff --git a/src/build/args/headless.gn b/src/build/args/headless.gn
new file mode 100644
index 0000000..9b8392c
--- /dev/null
+++ b/src/build/args/headless.gn
@@ -0,0 +1,56 @@
+# GN args template for the Headless Chrome library
+#
+# Add import to arg.gn in out directory and run gn gen on the directory to use.
+# E.g. for out directory out/foo:
+# echo 'import("//build/args/headless.gn")' > out/foo/args.gn
+# gn gen out/foo
+#
+# Use gn args to add your own build preference args.
+
+use_ozone = true
+ozone_auto_platforms = false
+ozone_platform = "headless"
+ozone_platform_headless = true
+
+# Embed resource.pak into binary to simplify deployment.
+headless_use_embedded_resources = true
+
+# Expose headless bindings for freetype library bundled with Chromium.
+headless_fontconfig_utils = true
+
+# Don't use Prefs component, disabling access to Local State prefs.
+headless_use_prefs = false
+
+# Don't use Policy component, disabling all policies.
+headless_use_policy = false
+
+# Remove a dependency on a system fontconfig library.
+use_bundled_fontconfig = true
+
+# In order to simplify deployment we build ICU data file
+# into binary.
+icu_use_data_file = false
+
+# Use embedded data instead external files for headless in order
+# to simplify deployment.
+v8_use_external_startup_data = false
+
+enable_nacl = false
+enable_print_preview = false
+enable_remoting = false
+use_alsa = false
+use_cups = false
+use_dbus = false
+use_gio = false
+use_kerberos = false
+use_libpci = false
+use_pulseaudio = false
+use_udev = false
+rtc_use_pipewire = false
+v8_enable_lazy_source_positions = false
+use_glib = false
+use_gtk = false
+use_pangocairo = false
+
+# TODO(1096425): Remove this once use_x11 goes away.
+use_x11 = false
diff --git a/src/build/build-ctags.sh b/src/build/build-ctags.sh
new file mode 100755
index 0000000..61e017e
--- /dev/null
+++ b/src/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+  cat <<EOF
+  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+  Debian or a related flavor of Linux, you may want to try running
+  apt-get install exuberant-ctags.
+EOF
+  exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+  echo "Failed to create ctags for $1"
+  exit 1
+}
+
+ctags_cmd() {
+  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+  local extraexcludes=""
+  if [[ a"$1" == "a--extra-excludes" ]]; then
+    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+    shift
+  fi
+
+  cd "$CHROME_SRC_DIR/$1" || fail $1
+  # Redirect error messages so they aren't seen because they are almost always
+  # errors about components that you just happen to have not built (NaCl, for
+  # example).
+  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+  mv -f .tmp_tags tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+  build_dir "$1"
+  shift
+done
diff --git a/src/build/build_config.h b/src/build/build_config.h
index c640843..9d3f852 100644
--- a/src/build/build_config.h
+++ b/src/build/build_config.h
@@ -3,96 +3,123 @@
 // found in the LICENSE file.
 
 // This file adds defines about the platform we're currently building on.
+//
 //  Operating System:
-//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX)
+//    OS_AIX / OS_ANDROID / OS_ASMJS / OS_FREEBSD / OS_FUCHSIA / OS_IOS /
+//    OS_LINUX / OS_MAC / OS_NACL (SFI or NONSFI) / OS_NETBSD / OS_OPENBSD /
+//    OS_QNX / OS_SOLARIS / OS_WIN
+//  Operating System family:
+//    OS_APPLE: IOS or MAC
+//    OS_BSD: FREEBSD or NETBSD or OPENBSD
+//    OS_POSIX: AIX or ANDROID or ASMJS or CHROMEOS or FREEBSD or IOS or LINUX
+//              or MAC or NACL or NETBSD or OPENBSD or QNX or SOLARIS
+//
+//  /!\ Note: OS_CHROMEOS is set by the build system, not this file
+//
 //  Compiler:
 //    COMPILER_MSVC / COMPILER_GCC
+//
 //  Processor:
-//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
-//    ARCH_CPU_ARM / ARCH_CPU_ARMEL / ARCH_CPU_ARM_FAMILY
-//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+//    ARCH_CPU_ARM64 / ARCH_CPU_ARMEL / ARCH_CPU_MIPS / ARCH_CPU_MIPS64 /
+//    ARCH_CPU_MIPS64EL / ARCH_CPU_MIPSEL / ARCH_CPU_PPC64 / ARCH_CPU_S390 /
+//    ARCH_CPU_S390X / ARCH_CPU_X86 / ARCH_CPU_X86_64
+//  Processor family:
+//    ARCH_CPU_ARM_FAMILY: ARMEL or ARM64
+//    ARCH_CPU_MIPS_FAMILY: MIPS64EL or MIPSEL or MIPS64 or MIPS
+//    ARCH_CPU_PPC64_FAMILY: PPC64
+//    ARCH_CPU_S390_FAMILY: S390 or S390X
+//    ARCH_CPU_X86_FAMILY: X86 or X86_64
+//  Processor features:
+//    ARCH_CPU_31_BITS / ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
 //    ARCH_CPU_BIG_ENDIAN / ARCH_CPU_LITTLE_ENDIAN
 
 #ifndef BUILD_BUILD_CONFIG_H_
 #define BUILD_BUILD_CONFIG_H_
 
-#if defined(__APPLE__)
-#include <TargetConditionals.h>
-#endif
-
 // A set of macros to use for platform detection.
 #if defined(STARBOARD)
 // noop
-#elif defined(__APPLE__)
-#define OS_MACOSX 1
-#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
-#define OS_IOS 1
-#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
+// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
+// mode, while it does not in SFI build mode.
+#if defined(__native_client_nonsfi__)
+#define OS_NACL_NONSFI
+#else
+#define OS_NACL_SFI
+#endif
 #elif defined(ANDROID)
 #define OS_ANDROID 1
-#elif defined(__native_client__)
-#define OS_NACL 1
+#elif defined(__APPLE__)
+// Only include TargetConditionals after testing ANDROID as some Android builds
+// on the Mac have this header available and it's not needed unless the target
+// is really an Apple platform.
+#include <TargetConditionals.h>
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#else
+#define OS_MAC 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
 #elif defined(__linux__)
+#if !defined(OS_CHROMEOS)
+// Do not define OS_LINUX on Chrome OS build.
+// The OS_CHROMEOS macro is defined in GN.
 #define OS_LINUX 1
-// Use TOOLKIT_GTK on linux if TOOLKIT_VIEWS isn't defined.
-#if !defined(TOOLKIT_VIEWS)
-#define TOOLKIT_GTK
+#endif  // !defined(OS_CHROMEOS)
+// Include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// We really are using glibc, not uClibc pretending to be glibc.
+#define LIBC_GLIBC 1
 #endif
-#elif defined(__LB_SHELL__)
-// NO toolkit!
 #elif defined(_WIN32)
 #define OS_WIN 1
-#define TOOLKIT_VIEWS 1
+#elif defined(__Fuchsia__)
+#define OS_FUCHSIA 1
 #elif defined(__FreeBSD__)
 #define OS_FREEBSD 1
-#define TOOLKIT_GTK
+#elif defined(__NetBSD__)
+#define OS_NETBSD 1
 #elif defined(__OpenBSD__)
 #define OS_OPENBSD 1
-#define TOOLKIT_GTK
 #elif defined(__sun)
 #define OS_SOLARIS 1
-#define TOOLKIT_GTK
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#elif defined(_AIX)
+#define OS_AIX 1
+#elif defined(__asmjs__) || defined(__wasm__)
+#define OS_ASMJS 1
 #else
 #error Please add support for your platform in build/build_config.h
 #endif
+// NOTE: Adding a new port? Please follow
+// https://chromium.googlesource.com/chromium/src/+/master/docs/new_port_policy.md
 
-#if defined(USE_OPENSSL) && defined(USE_NSS)
-#error Cannot use both OpenSSL and NSS
+#if defined(OS_MAC) || defined(OS_IOS)
+#define OS_APPLE 1
 #endif
 
 // For access to standard BSD features, use OS_BSD instead of a
 // more specific macro.
-#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD)
 #define OS_BSD 1
 #endif
 
 // For access to standard POSIXish features, use OS_POSIX instead of a
 // more specific macro.
-#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
-    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
-    defined(OS_NACL) || defined(__LB_SHELL__)
+#if defined(OS_AIX) || defined(OS_ANDROID) || defined(OS_ASMJS) ||  \
+    defined(OS_FREEBSD) || defined(OS_IOS) || defined(OS_LINUX) ||  \
+    defined(OS_CHROMEOS) || defined(OS_MAC) || defined(OS_NACL) ||  \
+    defined(OS_NETBSD) || defined(OS_OPENBSD) || defined(OS_QNX) || \
+    defined(OS_SOLARIS)
 #define OS_POSIX 1
 #endif
 
-#if defined(OS_POSIX) && !defined(OS_MACOSX) && !defined(OS_ANDROID) && \
-    !defined(OS_NACL) && !defined(__LB_SHELL__)
-#define USE_X11 1  // Use X for graphics.
-#endif
-
-// Use tcmalloc
-#if (defined(OS_WIN) || defined(OS_LINUX)) && !defined(NO_TCMALLOC)
-#define USE_TCMALLOC 1
-#endif
-
-// Compiler detection.
-#if defined(__SNC__)
-#define COMPILER_SNC
-#endif
-
-#if defined(__ghs) || defined(__ghs__)
-#define COMPILER_GHS 1
-#endif
-
+// Compiler detection. Note: clang masquerades as GCC on POSIX and as MSVC on
+// Windows.
 #if defined(__GNUC__)
 #define COMPILER_GCC 1
 #elif defined(_MSC_VER)
@@ -108,28 +135,28 @@
 #if defined(STARBOARD)
 #include "starboard/configuration.h"
 #if SB_IS(32_BIT)
-# define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_32_BITS 1
 #elif SB_IS(64_BIT)
-# define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_64_BITS 1
 #endif  // SB_IS(32_BIT)
 #if SB_IS(BIG_ENDIAN)
-# define ARCH_CPU_BIG_ENDIAN 1
+#define ARCH_CPU_BIG_ENDIAN 1
 #else   // SB_IS(BIG_ENDIAN)
-# define ARCH_CPU_LITTLE_ENDIAN 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
 #endif  // SB_IS(BIG_ENDIAN)
 #if SB_IS(ARCH_X86)
-# define ARCH_CPU_X86_FAMILY 1
-# define ARCH_CPU_X86 1
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
 #elif SB_IS(ARCH_X64)
-# define ARCH_CPU_X86_FAMILY 1
-# define ARCH_CPU_X86_64 1
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
 #elif SB_IS(ARCH_ARM) || SB_IS(ARCH_ARM64)
-# define ARCH_CPU_ARM_FAMILY 1
-# if SB_IS(BIG_ENDIAN)
-#  define ARCH_CPU_ARM 1
-# else   // SB_IS(BIG_ENDIAN)
-#  define ARCH_CPU_ARMEL 1
-# endif  // SB_IS(BIG_ENDIAN)
+#define ARCH_CPU_ARM_FAMILY 1
+#if SB_IS(BIG_ENDIAN)
+#define ARCH_CPU_ARM 1
+#else   // SB_IS(BIG_ENDIAN)
+#define ARCH_CPU_ARMEL 1
+#endif  // SB_IS(BIG_ENDIAN)
 #endif
 #elif defined(_M_X64) || defined(__x86_64__)
 #define ARCH_CPU_X86_FAMILY 1
@@ -141,60 +168,97 @@
 #define ARCH_CPU_X86 1
 #define ARCH_CPU_32_BITS 1
 #define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__s390x__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390X 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__s390__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390 1
+#define ARCH_CPU_31_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif (defined(__PPC64__) || defined(__PPC__)) && defined(__BIG_ENDIAN__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__PPC64__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
 #elif defined(__ARMEL__)
 #define ARCH_CPU_ARM_FAMILY 1
 #define ARCH_CPU_ARMEL 1
 #define ARCH_CPU_32_BITS 1
 #define ARCH_CPU_LITTLE_ENDIAN 1
-#elif defined(__pnacl__)
+#elif defined(__aarch64__) || defined(_M_ARM64)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__) || defined(__asmjs__) || defined(__wasm__)
 #define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#elif defined(__MIPSEB__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#endif
 #else
 #error Please add support for your architecture in build/build_config.h
 #endif
 
 // Type detection for wchar_t.
 #if defined(STARBOARD)
-#  if SB_IS(WCHAR_T_UTF16)
-#    define WCHAR_T_IS_UTF16 1
-#  elif SB_IS(WCHAR_T_UTF32)
-#    define WCHAR_T_IS_UTF32 1
-#  endif
-#elif defined(OS_WIN) || \
-    (defined(__LB_SHELL__) && \
-        !(defined(__LB_LINUX__) || defined(__LB_ANDROID__)))
-#define WCHAR_T_IS_UTF16 1
-#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
-    defined(__WCHAR_MAX__) && \
+#if SB_IS(WCHAR_T_UTF16)
+#define WCHAR_T_IS_UTF16
+#elif SB_IS(WCHAR_T_UTF32)
+#define WCHAR_T_IS_UTF32
+#endif
+#elif defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_FUCHSIA)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
     (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
-#define WCHAR_T_IS_UTF32 1
-#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
-    defined(__WCHAR_MAX__) && \
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
     (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
 // On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
 // compile in this mode (in particular, Chrome doesn't). This is intended for
 // other projects using base who manage their own dependencies and make sure
 // short wchar works for them.
-#define WCHAR_T_IS_UTF16 1
+#define WCHAR_T_IS_UTF16
 #else
 #error Please add support for your compiler in build/build_config.h
 #endif
 
-#if defined(STARBOARD)
-#  if SB_IS(WCHAR_T_UNSIGNED)
-#    define WCHAR_T_IS_UNSIGNED 1
-#  elif SB_IS(WCHAR_T_SIGNED)
-#    define WCHAR_T_IS_UNSIGNED 0
-#  endif
-#elif defined(__ARMEL__) && !defined(OS_IOS)
-#define WCHAR_T_IS_UNSIGNED 1
-#endif
-
-// TODO: Worry about these defines if/when we need to support Android.
 #if defined(OS_ANDROID)
 // The compiler thinks std::string::const_iterator and "const char*" are
 // equivalent types.
 #define STD_STRING_ITERATOR_IS_CHAR_POINTER
-// The compiler thinks base::string16::const_iterator and "char16*" are
+// The compiler thinks std::u16string::const_iterator and "char16*" are
 // equivalent types.
 #define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
 #endif
diff --git a/src/build/buildflag.h b/src/build/buildflag.h
new file mode 100644
index 0000000..5776a75
--- /dev/null
+++ b/src/build/buildflag.h
@@ -0,0 +1,47 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_BUILDFLAG_H_
+#define BUILD_BUILDFLAG_H_
+
+// These macros un-mangle the names of the build flags in a way that looks
+// natural, and gives errors if the flag is not defined. Normally in the
+// preprocessor it's easy to make mistakes that interpret "you haven't done
+// the setup to know what the flag is" as "flag is off". Normally you would
+// include the generated header rather than include this file directly.
+//
+// This is for use with generated headers. See build/buildflag_header.gni.
+
+// This dance of two macros does a concatenation of two preprocessor args using
+// ## doubly indirectly because using ## directly prevents macros in that
+// parameter from being expanded.
+#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b
+#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b)
+
+// Accessor for build flags.
+//
+// To test for a value, if the build file specifies:
+//
+//   ENABLE_FOO=true
+//
+// Then you would check at build-time in source code with:
+//
+//   #include "foo_flags.h"  // The header the build file specified.
+//
+//   #if BUILDFLAG(ENABLE_FOO)
+//     ...
+//   #endif
+//
+// There will no #define called ENABLE_FOO so if you accidentally test for
+// whether that is defined, it will always be negative. You can also use
+// the value in expressions:
+//
+//   const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME);
+//
+// Because the flag is accessed as a preprocessor macro with (), an error
+// will be thrown if the proper header defining the internal flag value has
+// not been included.
+#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)())
+
+#endif  // BUILD_BUILDFLAG_H_
diff --git a/src/build/buildflag_header.gni b/src/build/buildflag_header.gni
new file mode 100644
index 0000000..821c4ef
--- /dev/null
+++ b/src/build/buildflag_header.gni
@@ -0,0 +1,137 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a header with preprocessor defines specified by the build file.
+#
+# The flags are converted to function-style defines with mangled names and
+# code uses an accessor macro to access the values. This is to try to
+# minimize bugs where code checks whether something is defined or not, and
+# the proper header isn't included, meaning the answer will always be silently
+# false or might vary across the code base.
+#
+# In the GN template, specify build flags in the template as a list
+# of strings that encode key/value pairs like this:
+#
+#   flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ]
+#
+# The GN values "true" and "false" will be mapped to 0 and 1 for boolean
+# #if flags to be expressed naturally. This means you can't directly make a
+# define that generates C++ value of true or false for use in code. If you
+# REALLY need this, you can also use the string "(true)" and "(false)" to
+# prevent the rewriting.
+
+# To check the value of the flag in C code:
+#
+#   #include "path/to/here/header_file.h"
+#
+#   #if BUILDFLAG(ENABLE_FOO)
+#   ...
+#   #endif
+#
+#   const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL);
+#
+# There will be no #define called ENABLE_FOO so if you accidentally test for
+# that in an ifdef it will always be negative.
+#
+#
+# Template parameters
+#
+#   flags [required, list of strings]
+#       Flag values as described above.
+#
+#   header [required, string]
+#       File name for generated header. By default, this will go in the
+#       generated file directory for this target, and you would include it
+#       with:
+#         #include "<path_to_this_BUILD_file>/<header>"
+#
+#   header_dir [optional, string]
+#       Override the default location of the generated header. The string will
+#       be treated as a subdirectory of the root_gen_dir. For example:
+#         header_dir = "foo/bar"
+#       Then you can include the header as:
+#         #include "foo/bar/baz.h"
+#
+#   deps, public_deps, testonly, visibility
+#       Normal meaning.
+#
+#
+# Grit defines
+#
+# If one .grd file uses a flag, just add to the grit target:
+#
+#   defines = [
+#     "enable_doom_melon=$enable_doom_melon",
+#   ]
+#
+# If multiple .grd files use it, you'll want to put the defines in a .gni file
+# so it can be shared. Generally this .gni file should include all grit defines
+# for a given module (for some definition of "module"). Then do:
+#
+#   defines = ui_grit_defines
+#
+# If you forget to do this, the flag will be implicitly false in the .grd file
+# and those resources won't be compiled. You'll know because the resource
+# #define won't be generated and any code that uses it won't compile. If you
+# see a missing IDS_* string, this is probably the reason.
+#
+#
+# Example
+#
+#   buildflag_header("foo_buildflags") {
+#     header = "foo_buildflags.h"
+#
+#     flags = [
+#       # This uses the GN build flag enable_doom_melon as the definition.
+#       "ENABLE_DOOM_MELON=$enable_doom_melon",
+#
+#       # This force-enables the flag.
+#       "ENABLE_SPACE_LASER=true",
+#
+#       # This will expand to the quoted C string when used in source code.
+#       "SPAM_SERVER_URL=\"http://www.example.com/\"",
+#     ]
+#   }
+template("buildflag_header") {
+  action(target_name) {
+    script = "//build/write_buildflag_header.py"
+
+    if (defined(invoker.header_dir)) {
+      header_file = "${invoker.header_dir}/${invoker.header}"
+    } else {
+      # Compute the path from the root to this file.
+      header_file = rebase_path(".", "//") + "/${invoker.header}"
+    }
+
+    outputs = [ "$root_gen_dir/$header_file" ]
+
+    # Always write --flags to the file so it's not empty. Empty will confuse GN
+    # into thinking the response file isn't used.
+    response_file_contents = [ "--flags" ]
+    if (defined(invoker.flags)) {
+      response_file_contents += invoker.flags
+    }
+
+    args = [
+      "--output",
+      header_file,  # Not rebased, Python script puts it inside gen-dir.
+      "--rulename",
+      get_label_info(":$target_name", "label_no_toolchain"),
+      "--gen-dir",
+      rebase_path(root_gen_dir, root_build_dir),
+      "--definitions",
+      "{{response_file_name}}",
+    ]
+
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+
+    public_deps = [ "//build:buildflag_header_h" ]
+  }
+}
diff --git a/src/build/check_gn_headers.py b/src/build/check_gn_headers.py
new file mode 100755
index 0000000..9bdbba8
--- /dev/null
+++ b/src/build/check_gn_headers.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Find header files missing in GN.
+
+This script gets all the header files from ninja_deps, which is from the true
+dependency generated by the compiler, and report if they don't exist in GN.
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from multiprocessing import Process, Queue
+
+SRC_DIR = os.path.abspath(
+    os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir))
+DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools')
+
+
+def GetHeadersFromNinja(out_dir, skip_obj, q):
+  """Return all the header files from ninja_deps"""
+
+  def NinjaSource():
+    cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps']
+    # A negative bufsize means to use the system default, which usually
+    # means fully buffered.
+    popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1)
+    for line in iter(popen.stdout.readline, ''):
+      yield line.rstrip()
+
+    popen.stdout.close()
+    return_code = popen.wait()
+    if return_code:
+      raise subprocess.CalledProcessError(return_code, cmd)
+
+  ans, err = set(), None
+  try:
+    ans = ParseNinjaDepsOutput(NinjaSource(), out_dir, skip_obj)
+  except Exception as e:
+    err = str(e)
+  q.put((ans, err))
+
+
+def ParseNinjaDepsOutput(ninja_out, out_dir, skip_obj):
+  """Parse ninja output and get the header files"""
+  all_headers = {}
+
+  # Ninja always uses "/", even on Windows.
+  prefix = '../../'
+
+  is_valid = False
+  obj_file = ''
+  for line in ninja_out:
+    if line.startswith('    '):
+      if not is_valid:
+        continue
+      if line.endswith('.h') or line.endswith('.hh'):
+        f = line.strip()
+        if f.startswith(prefix):
+          f = f[6:]  # Remove the '../../' prefix
+          # build/ only contains build-specific files like build_config.h
+          # and buildflag.h, and system header files, so they should be
+          # skipped.
+          if f.startswith(out_dir) or f.startswith('out'):
+            continue
+          if not f.startswith('build'):
+            all_headers.setdefault(f, [])
+            if not skip_obj:
+              all_headers[f].append(obj_file)
+    else:
+      is_valid = line.endswith('(VALID)')
+      obj_file = line.split(':')[0]
+
+  return all_headers
+
+
+def GetHeadersFromGN(out_dir, q):
+  """Return all the header files from GN"""
+
+  tmp = None
+  ans, err = set(), None
+  try:
+    # Argument |dir| is needed to make sure it's on the same drive on Windows.
+    # dir='' means dir='.', but doesn't introduce an unneeded prefix.
+    tmp = tempfile.mkdtemp(dir='')
+    shutil.copy2(os.path.join(out_dir, 'args.gn'),
+                 os.path.join(tmp, 'args.gn'))
+    # Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
+    gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn'
+    subprocess.check_call([
+        os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q'])
+    gn_json = json.load(open(os.path.join(tmp, 'project.json')))
+    ans = ParseGNProjectJSON(gn_json, out_dir, tmp)
+  except Exception as e:
+    err = str(e)
+  finally:
+    if tmp:
+      shutil.rmtree(tmp)
+  q.put((ans, err))
+
+
+def ParseGNProjectJSON(gn, out_dir, tmp_out):
+  """Parse GN output and get the header files"""
+  all_headers = set()
+
+  for _target, properties in gn['targets'].iteritems():
+    sources = properties.get('sources', [])
+    public = properties.get('public', [])
+    # Exclude '"public": "*"'.
+    if type(public) is list:
+      sources += public
+    for f in sources:
+      if f.endswith('.h') or f.endswith('.hh'):
+        if f.startswith('//'):
+          f = f[2:]  # Strip the '//' prefix.
+          if f.startswith(tmp_out):
+            f = out_dir + f[len(tmp_out):]
+          all_headers.add(f)
+
+  return all_headers
+
+
+def GetDepsPrefixes(q):
+  """Return all the folders controlled by DEPS file"""
+  prefixes, err = set(), None
+  try:
+    gclient_exe = 'gclient.bat' if sys.platform == 'win32' else 'gclient'
+    gclient_out = subprocess.check_output([
+        os.path.join(DEPOT_TOOLS_DIR, gclient_exe),
+        'recurse', '--no-progress', '-j1',
+        'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'],
+        universal_newlines=True)
+    for i in gclient_out.split('\n'):
+      if i.startswith('src/'):
+        i = i[4:]
+        prefixes.add(i)
+  except Exception as e:
+    err = str(e)
+  q.put((prefixes, err))
+
+
+def IsBuildClean(out_dir):
+  cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-n']
+  try:
+    out = subprocess.check_output(cmd)
+    return 'no work to do.' in out
+  except Exception as e:
+    print(e)
+    return False
+
+def ParseWhiteList(whitelist):
+  out = set()
+  for line in whitelist.split('\n'):
+    line = re.sub(r'#.*', '', line).strip()
+    if line:
+      out.add(line)
+  return out
+
+
+def FilterOutDepsedRepo(files, deps):
+  return {f for f in files if not any(f.startswith(d) for d in deps)}
+
+
+def GetNonExistingFiles(lst):
+  out = set()
+  for f in lst:
+    if not os.path.isfile(f):
+      out.add(f)
+  return out
+
+
+def main():
+
+  def DumpJson(data):
+    if args.json:
+      with open(args.json, 'w') as f:
+        json.dump(data, f)
+
+  def PrintError(msg):
+    DumpJson([])
+    parser.error(msg)
+
+  parser = argparse.ArgumentParser(description='''
+      NOTE: Use ninja to build all targets in OUT_DIR before running
+      this script.''')
+  parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
+                      help='output directory of the build')
+  parser.add_argument('--json',
+                      help='JSON output filename for missing headers')
+  parser.add_argument('--whitelist', help='file containing whitelist')
+  parser.add_argument('--skip-dirty-check', action='store_true',
+                      help='skip checking whether the build is dirty')
+  parser.add_argument('--verbose', action='store_true',
+                      help='print more diagnostic info')
+
+  args, _extras = parser.parse_known_args()
+
+  if not os.path.isdir(args.out_dir):
+    parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
+
+  if not args.skip_dirty_check and not IsBuildClean(args.out_dir):
+    dirty_msg = 'OUT_DIR looks dirty. You need to build all there.'
+    if args.json:
+      # Assume running on the bots. Silently skip this step.
+      # This is possible because "analyze" step can be wrong due to
+      # underspecified header files. See crbug.com/725877
+      print(dirty_msg)
+      DumpJson([])
+      return 0
+    else:
+      # Assume running interactively.
+      parser.error(dirty_msg)
+
+  d_q = Queue()
+  d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, True, d_q,))
+  d_p.start()
+
+  gn_q = Queue()
+  gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,))
+  gn_p.start()
+
+  deps_q = Queue()
+  deps_p = Process(target=GetDepsPrefixes, args=(deps_q,))
+  deps_p.start()
+
+  d, d_err = d_q.get()
+  gn, gn_err = gn_q.get()
+  missing = set(d.keys()) - gn
+  nonexisting = GetNonExistingFiles(gn)
+
+  deps, deps_err = deps_q.get()
+  missing = FilterOutDepsedRepo(missing, deps)
+  nonexisting = FilterOutDepsedRepo(nonexisting, deps)
+
+  d_p.join()
+  gn_p.join()
+  deps_p.join()
+
+  if d_err:
+    PrintError(d_err)
+  if gn_err:
+    PrintError(gn_err)
+  if deps_err:
+    PrintError(deps_err)
+  if len(GetNonExistingFiles(d)) > 0:
+    print('Non-existing files in ninja deps:', GetNonExistingFiles(d))
+    PrintError('Found non-existing files in ninja deps. You should ' +
+               'build all in OUT_DIR.')
+  if len(d) == 0:
+    PrintError('OUT_DIR looks empty. You should build all there.')
+  if any((('/gen/' in i) for i in nonexisting)):
+    PrintError('OUT_DIR looks wrong. You should build all there.')
+
+  if args.whitelist:
+    whitelist = ParseWhiteList(open(args.whitelist).read())
+    missing -= whitelist
+    nonexisting -= whitelist
+
+  missing = sorted(missing)
+  nonexisting = sorted(nonexisting)
+
+  DumpJson(sorted(missing + nonexisting))
+
+  if len(missing) == 0 and len(nonexisting) == 0:
+    return 0
+
+  if len(missing) > 0:
+    print('\nThe following files should be included in gn files:')
+    for i in missing:
+      print(i)
+
+  if len(nonexisting) > 0:
+    print('\nThe following non-existing files should be removed from gn files:')
+    for i in nonexisting:
+      print(i)
+
+  if args.verbose:
+    # Only get detailed obj dependency here since it is slower.
+    GetHeadersFromNinja(args.out_dir, False, d_q)
+    d, d_err = d_q.get()
+    print('\nDetailed dependency info:')
+    for f in missing:
+      print(f)
+      for cc in d[f]:
+        print('  ', cc)
+
+    print('\nMissing headers sorted by number of affected object files:')
+    count = {k: len(v) for (k, v) in d.iteritems()}
+    for f in sorted(count, key=count.get, reverse=True):
+      if f in missing:
+        print(count[f], f)
+
+  if args.json:
+    # Assume running on the bots. Temporarily return 0 before
+    # https://crbug.com/937847 is fixed.
+    return 0
+  return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/check_gn_headers_unittest.py b/src/build/check_gn_headers_unittest.py
new file mode 100755
index 0000000..20c3b13
--- /dev/null
+++ b/src/build/check_gn_headers_unittest.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import json
+import unittest
+import check_gn_headers
+
+
+ninja_input = r'''
+obj/a.o: #deps 1, deps mtime 123 (VALID)
+    ../../a.cc
+    ../../dir/path/b.h
+    ../../c.hh
+
+obj/b.o: #deps 1, deps mtime 123 (STALE)
+    ../../b.cc
+    ../../dir2/path/b.h
+    ../../c2.hh
+
+obj/c.o: #deps 1, deps mtime 123 (VALID)
+    ../../c.cc
+    ../../build/a.h
+    gen/b.h
+    ../../out/Release/gen/no.h
+    ../../dir3/path/b.h
+    ../../c3.hh
+'''
+
+
+gn_input = json.loads(r'''
+{
+   "others": [],
+   "targets": {
+      "//:All": {
+      },
+      "//:base": {
+         "public": [ "//base/p.h" ],
+         "sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ],
+         "visibility": [ "*" ]
+      },
+      "//:star_public": {
+         "public": "*",
+         "sources": [ "//base/c.h", "//tmp/gen/a.h" ],
+         "visibility": [ "*" ]
+      }
+    }
+}
+''')
+
+
+whitelist = r'''
+   white-front.c
+a/b/c/white-end.c # comment
+ dir/white-both.c  #more comment
+
+# empty line above
+a/b/c
+'''
+
+
+class CheckGnHeadersTest(unittest.TestCase):
+  def testNinja(self):
+    headers = check_gn_headers.ParseNinjaDepsOutput(
+        ninja_input.split('\n'), 'out/Release', False)
+    expected = {
+        'dir/path/b.h': ['obj/a.o'],
+        'c.hh': ['obj/a.o'],
+        'dir3/path/b.h': ['obj/c.o'],
+        'c3.hh': ['obj/c.o'],
+    }
+    self.assertEquals(headers, expected)
+
+  def testGn(self):
+    headers = check_gn_headers.ParseGNProjectJSON(gn_input,
+                                                  'out/Release', 'tmp')
+    expected = set([
+        'base/a.h',
+        'base/b.hh',
+        'base/c.h',
+        'base/p.h',
+        'out/Release/gen/a.h',
+    ])
+    self.assertEquals(headers, expected)
+
+  def testWhitelist(self):
+    output = check_gn_headers.ParseWhiteList(whitelist)
+    expected = set([
+        'white-front.c',
+        'a/b/c/white-end.c',
+        'dir/white-both.c',
+        'a/b/c',
+    ])
+    self.assertEquals(output, expected)
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/src/build/check_gn_headers_whitelist.txt b/src/build/check_gn_headers_whitelist.txt
new file mode 100644
index 0000000..2acf1b7
--- /dev/null
+++ b/src/build/check_gn_headers_whitelist.txt
@@ -0,0 +1,264 @@
+# Do not add files to this whitelist unless you are adding a new OS or
+# changing the GN arguments on bots.
+
+ash/accelerators/accelerator_table.h
+ash/ash_export.h
+ash/metrics/task_switch_metrics_recorder.h
+ash/metrics/task_switch_source.h
+ash/metrics/user_metrics_action.h
+ash/metrics/user_metrics_recorder.h
+ash/public/cpp/ash_public_export.h
+ash/public/cpp/ash_switches.h
+ash/public/cpp/shelf_types.h
+ash/session/session_observer.h
+ash/shell.h
+ash/wm/system_modal_container_event_filter_delegate.h
+cc/cc_export.h
+cc/input/browser_controls_state.h
+cc/input/event_listener_properties.h
+cc/input/scrollbar.h
+cc/input/scroller_size_metrics.h
+cc/layers/performance_properties.h
+chrome/browser/android/android_theme_resources.h
+chrome/browser/android/resource_id.h
+chrome/browser/ash/certificate_provider/certificate_info.h
+chrome/browser/ash/certificate_provider/certificate_provider.h
+chrome/browser/ash/certificate_provider/certificate_provider_service.h
+chrome/browser/ash/certificate_provider/certificate_provider_service_factory.h
+chrome/browser/ash/certificate_provider/certificate_requests.h
+chrome/browser/ash/certificate_provider/pin_dialog_manager.h
+chrome/browser/ash/certificate_provider/sign_requests.h
+chrome/browser/ash/certificate_provider/thread_safe_certificate_map.h
+chrome/browser/ash/login/signin/oauth2_login_manager.h
+chrome/browser/ash/login/signin/oauth2_login_verifier.h
+chrome/browser/ash/login/signin/oauth2_token_fetcher.h
+chrome/browser/ash/notifications/request_pin_view.h
+chrome/browser/ash/profiles/profile_helper.h
+chrome/browser/ash/settings/cros_settings.h
+chrome/browser/component_updater/component_installer_errors.h
+chrome/browser/download/download_file_icon_extractor.h
+chrome/browser/extensions/api/networking_cast_private/chrome_networking_cast_private_delegate.h
+chrome/browser/mac/bluetooth_utility.h
+chrome/browser/media/router/mojo/media_route_provider_util_win.h
+chrome/browser/media/webrtc/desktop_media_list_ash.h
+chrome/browser/media/webrtc/desktop_media_list_observer.h
+chrome/browser/media/webrtc/rtp_dump_type.h
+chrome/browser/media_galleries/media_file_system_context.h
+chrome/browser/notifications/displayed_notifications_dispatch_callback.h
+chrome/browser/ui/app_icon_loader_delegate.h
+chrome/browser/ui/app_list/app_list_syncable_service_factory.h
+chrome/browser/ui/ash/ash_util.h
+chrome/browser/ui/ash/multi_user/multi_user_util.h
+chrome/browser/ui/network_profile_bubble.h
+chrome/browser/ui/views/frame/browser_frame_header_ash.h
+chrome/install_static/chromium_install_modes.h
+chrome/install_static/install_constants.h
+chrome/install_static/install_details.h
+chrome/install_static/install_modes.h
+chrome/install_static/install_util.h
+chrome/install_static/test/scoped_install_details.h
+chrome/installer/util/google_update_settings.h
+components/browser_watcher/features.h
+components/browser_watcher/stability_paths.h
+components/cast_certificate/cast_crl_root_ca_cert_der-inc.h
+components/cdm/browser/cdm_message_filter_android.h
+components/device_event_log/device_event_log_export.h
+components/login/login_export.h
+components/nacl/browser/nacl_browser_delegate.h
+components/nacl/renderer/ppb_nacl_private.h
+components/omnibox/browser/autocomplete_i18n.h
+components/omnibox/browser/autocomplete_provider_client.h
+components/omnibox/browser/autocomplete_provider_listener.h
+components/policy/core/browser/configuration_policy_handler_parameters.h
+components/policy/proto/policy_proto_export.h
+components/rlz/rlz_tracker_delegate.h
+components/session_manager/session_manager_types.h
+components/sessions/core/sessions_export.h
+components/sync/engine/connection_status.h
+components/sync/engine/net/network_time_update_callback.h
+components/translate/core/browser/translate_infobar_delegate.h
+components/user_manager/user.h
+components/user_manager/user_image/user_image.h
+components/user_manager/user_manager.h
+components/wifi/wifi_export.h
+components/wifi/wifi_service.h
+content/browser/background_fetch/background_fetch_constants.h
+content/common/mac/attributed_string_coder.h
+content/public/browser/context_factory.h
+content/public/browser/media_observer.h
+content/public/common/gpu_stream_constants.h
+content/renderer/external_popup_menu.h
+content/shell/android/shell_descriptors.h
+extensions/browser/api/clipboard/clipboard_api.h
+extensions/browser/api/webcam_private/webcam.h
+extensions/browser/api/webcam_private/webcam_private_api.h
+extensions/browser/entry_info.h
+extensions/browser/extension_event_histogram_value.h
+extensions/browser/extension_function_histogram_value.h
+google_apis/gcm/base/encryptor.h
+google_apis/gcm/base/gcm_export.h
+gpu/GLES2/gl2chromium.h
+gpu/GLES2/gl2chromium_autogen.h
+gpu/GLES2/gl2extchromium.h
+gpu/command_buffer/client/context_support.h
+gpu/command_buffer/client/gles2_implementation_unittest_autogen.h
+gpu/command_buffer/client/gles2_interface_autogen.h
+gpu/command_buffer/client/gles2_interface_stub_autogen.h
+gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h
+gpu/command_buffer/client/gpu_control_client.h
+gpu/command_buffer/client/ref_counted.h
+gpu/command_buffer/client/shared_memory_limits.h
+gpu/command_buffer/common/command_buffer_shared.h
+gpu/command_buffer/common/gles2_cmd_utils_autogen.h
+gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h
+gpu/command_buffer/common/gpu_memory_allocation.h
+gpu/command_buffer/service/gles2_cmd_decoder_unittest_extensions_autogen.h
+gpu/command_buffer/service/memory_tracking.h
+gpu/config/gpu_lists_version.h
+gpu/gles2_conform_support/gtf/gtf_stubs.h
+gpu/gpu_export.h
+headless/lib/headless_macros.h
+ipc/ipc_channel_proxy_unittest_messages.h
+ipc/ipc_message_null_macros.h
+media/audio/audio_logging.h
+media/base/routing_token_callback.h
+media/base/video_renderer_sink.h
+media/cast/common/mod_util.h
+media/cast/net/rtcp/rtcp_session.h
+media/filters/ffmpeg_aac_bitstream_converter.h
+media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
+media/filters/h264_to_annex_b_bitstream_converter.h
+media/formats/mp4/avc.h
+media/formats/mp4/bitstream_converter.h
+media/formats/mp4/fourccs.h
+media/formats/mp4/rcheck.h
+media/formats/mpeg/adts_stream_parser.h
+media/formats/mpeg/mpeg1_audio_stream_parser.h
+media/formats/mpeg/mpeg_audio_stream_parser_base.h
+media/gpu/media_gpu_export.h
+mojo/core/broker_messages.h
+mojo/core/system_impl_export.h
+mojo/public/cpp/bindings/strong_associated_binding_set.h
+mojo/public/cpp/bindings/tests/mojo_test_blink_export.h
+mojo/public/cpp/test_support/test_support.h
+net/base/winsock_init.h
+net/cert/cert_type.h
+net/cert/cert_verify_proc_android.h
+net/cert/scoped_nss_types.h
+net/dns/notify_watcher_mac.h
+net/http/http_status_code_list.h
+ppapi/cpp/pass_ref.h
+ppapi/lib/gl/include/GLES2/gl2.h
+ppapi/lib/gl/include/GLES2/gl2ext.h
+ppapi/lib/gl/include/GLES2/gl2platform.h
+ppapi/lib/gl/include/KHR/khrplatform.h
+ppapi/nacl_irt/irt_manifest.h
+ppapi/nacl_irt/public/irt_ppapi.h
+ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/irt_shim_ppapi.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/shim_ppapi.h
+ppapi/proxy/dispatch_reply_message.h
+ppapi/proxy/plugin_proxy_delegate.h
+ppapi/proxy/plugin_resource_callback.h
+ppapi/proxy/ppapi_proxy_export.h
+ppapi/proxy/resource_message_filter.h
+ppapi/proxy/video_decoder_constants.h
+ppapi/shared_impl/api_id.h
+ppapi/shared_impl/dir_contents.h
+ppapi/shared_impl/ppapi_shared_export.h
+ppapi/shared_impl/singleton_resource_id.h
+remoting/base/chromoting_event_log_writer.h
+remoting/base/logging.h
+remoting/client/display/gl_renderer_delegate.h
+remoting/client/display/gl_texture_ids.h
+remoting/codec/webrtc_video_encoder.h
+remoting/host/linux/x11_keyboard.h
+remoting/host/worker_process_ipc_delegate.h
+remoting/protocol/audio_source.h
+remoting/protocol/audio_stream.h
+remoting/protocol/cursor_shape_stub.h
+remoting/protocol/message_channel_factory.h
+remoting/protocol/test_event_matchers.h
+remoting/protocol/video_feedback_stub.h
+remoting/protocol/video_stream.h
+sandbox/linux/system_headers/capability.h
+skia/ext/convolver_mips_dspr2.h
+skia/ext/skia_commit_hash.h
+third_party/hunspell/src/hunspell/hunvisapi.h
+third_party/khronos/EGL/egl.h
+third_party/khronos/EGL/eglext.h
+third_party/khronos/EGL/eglplatform.h
+third_party/khronos/GLES2/gl2.h
+third_party/khronos/GLES2/gl2ext.h
+third_party/khronos/GLES2/gl2platform.h
+third_party/khronos/GLES3/gl3.h
+third_party/khronos/GLES3/gl3platform.h
+third_party/khronos/KHR/khrplatform.h
+third_party/leveldatabase/chromium_logger.h
+third_party/libaddressinput/chromium/addressinput_util.h
+third_party/libphonenumber/phonenumber_api.h
+third_party/libudev/libudev0.h
+third_party/libudev/libudev1.h
+third_party/libvpx/source/config/linux/x64/vp8_rtcd.h
+third_party/libvpx/source/config/linux/x64/vp9_rtcd.h
+third_party/libvpx/source/config/linux/x64/vpx_config.h
+third_party/libvpx/source/config/linux/x64/vpx_dsp_rtcd.h
+third_party/libvpx/source/config/linux/x64/vpx_scale_rtcd.h
+third_party/libvpx/source/config/nacl/vp8_rtcd.h
+third_party/libvpx/source/config/nacl/vp9_rtcd.h
+third_party/libvpx/source/config/nacl/vpx_config.h
+third_party/libvpx/source/config/nacl/vpx_dsp_rtcd.h
+third_party/libvpx/source/config/nacl/vpx_scale_rtcd.h
+third_party/libvpx/source/config/vpx_version.h
+third_party/opus/src/src/opus_private.h
+third_party/opus/src/tests/test_opus_common.h
+third_party/protobuf/src/google/protobuf/compiler/csharp/csharp_names.h
+third_party/qcms/src/halffloat.h
+third_party/qcms/src/tests/qcms_test_util.h
+third_party/qcms/src/tests/timing.h
+third_party/snappy/linux/config.h
+third_party/speech-dispatcher/libspeechd.h
+third_party/sqlite/sqlite3.h
+third_party/tcmalloc/chromium/src/addressmap-inl.h
+third_party/tcmalloc/chromium/src/base/basictypes.h
+third_party/tcmalloc/chromium/src/base/dynamic_annotations.h
+third_party/tcmalloc/chromium/src/base/googleinit.h
+third_party/tcmalloc/chromium/src/base/linux_syscall_support.h
+third_party/tcmalloc/chromium/src/base/spinlock_linux-inl.h
+third_party/tcmalloc/chromium/src/base/stl_allocator.h
+third_party/tcmalloc/chromium/src/base/thread_annotations.h
+third_party/tcmalloc/chromium/src/base/thread_lister.h
+third_party/tcmalloc/chromium/src/gperftools/malloc_extension_c.h
+third_party/tcmalloc/chromium/src/gperftools/malloc_hook_c.h
+third_party/tcmalloc/chromium/src/gperftools/tcmalloc.h
+third_party/tcmalloc/chromium/src/heap-profile-stats.h
+third_party/tcmalloc/chromium/src/libc_override.h
+third_party/tcmalloc/chromium/src/malloc_hook_mmap_linux.h
+third_party/tcmalloc/chromium/src/packed-cache-inl.h
+third_party/tcmalloc/chromium/src/page_heap_allocator.h
+third_party/tcmalloc/chromium/src/pagemap.h
+third_party/tcmalloc/chromium/src/stacktrace_x86-inl.h
+third_party/tcmalloc/chromium/src/system-alloc.h
+third_party/tcmalloc/chromium/src/tcmalloc_guard.h
+third_party/wayland/include/config.h
+third_party/wayland/include/src/wayland-version.h
+third_party/woff2/src/port.h
+third_party/yasm/source/config/linux/config.h
+third_party/yasm/source/config/linux/libyasm-stdint.h
+third_party/zlib/contrib/minizip/crypt.h
+tools/ipc_fuzzer/message_lib/all_message_null_macros.h
+ui/base/clipboard/clipboard_test_template.h
+ui/events/keycodes/keyboard_codes_posix.h
+ui/gfx/overlay_transform.h
+ui/gfx/scoped_ns_graphics_context_save_gstate_mac.h
+ui/gfx/swap_result.h
+ui/gfx/sys_color_change_listener.h
+ui/gl/GL/glextchromium.h
+ui/gl/gl_bindings_api_autogen_egl.h
+ui/gl/gl_bindings_api_autogen_gl.h
+ui/gl/gl_bindings_api_autogen_glx.h
+ui/gl/gpu_preference.h
+ui/gl/gpu_switching_observer.h
+ui/gl/progress_reporter.h
+ui/ozone/public/ozone_switches.h
diff --git a/src/build/check_return_value.py b/src/build/check_return_value.py
new file mode 100755
index 0000000..9caa15f
--- /dev/null
+++ b/src/build/check_return_value.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+  print(1)
+else:
+  print(0)
diff --git a/src/build/chromeos/.style.yapf b/src/build/chromeos/.style.yapf
new file mode 100644
index 0000000..de0c6a7
--- /dev/null
+++ b/src/build/chromeos/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = chromium
diff --git a/src/build/chromeos/pylintrc b/src/build/chromeos/pylintrc
new file mode 100644
index 0000000..2a721bf
--- /dev/null
+++ b/src/build/chromeos/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/src/build/chromeos/test_runner.py b/src/build/chromeos/test_runner.py
new file mode 100755
index 0000000..c669e35
--- /dev/null
+++ b/src/build/chromeos/test_runner.py
@@ -0,0 +1,1008 @@
+#!/usr/bin/env vpython3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import re
+import shutil
+import signal
+import socket
+import sys
+import tempfile
+
+# The following non-std imports are fetched via vpython. See the list at
+# //.vpython
+import dateutil.parser  # pylint: disable=import-error
+import jsonlines  # pylint: disable=import-error
+import psutil  # pylint: disable=import-error
+import six
+
+CHROMIUM_SRC_PATH = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..'))
+
+# Use the android test-runner's gtest results support library for generating
+# output json ourselves.
+sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android'))
+from pylib.base import base_test_result  # pylint: disable=import-error
+from pylib.base import result_sink  # pylint: disable=import-error
+from pylib.results import json_results  # pylint: disable=import-error
+
+if six.PY2:
+  import subprocess32 as subprocess  # pylint: disable=import-error
+else:
+  import subprocess  # pylint: disable=import-error,wrong-import-order
+
+DEFAULT_CROS_CACHE = os.path.abspath(
+    os.path.join(CHROMIUM_SRC_PATH, 'build', 'cros_cache'))
+CHROMITE_PATH = os.path.abspath(
+    os.path.join(CHROMIUM_SRC_PATH, 'third_party', 'chromite'))
+CROS_RUN_TEST_PATH = os.path.abspath(
+    os.path.join(CHROMITE_PATH, 'bin', 'cros_run_test'))
+
+LACROS_LAUNCHER_SCRIPT_PATH = os.path.abspath(
+    os.path.join(CHROMIUM_SRC_PATH, 'build', 'lacros',
+                 'mojo_connection_lacros_launcher.py'))
+
+# This is a special hostname that resolves to a different DUT in the lab
+# depending on which lab machine you're on.
+LAB_DUT_HOSTNAME = 'variable_chromeos_device_hostname'
+
+SYSTEM_LOG_LOCATIONS = [
+    '/var/log/chrome/',
+    '/var/log/messages',
+    '/var/log/ui/',
+]
+
+TAST_DEBUG_DOC = 'https://bit.ly/2LgvIXz'
+
+
+class TestFormatError(Exception):
+  pass
+
+
+class RemoteTest(object):
+
+  # This is a basic shell script that can be appended to in order to invoke the
+  # test on the device.
+  BASIC_SHELL_SCRIPT = [
+      '#!/bin/sh',
+
+      # /home and /tmp are mounted with "noexec" in the device, but some of our
+      # tools and tests use those dirs as a workspace (eg: vpython downloads
+      # python binaries to ~/.vpython-root and /tmp/vpython_bootstrap).
+      # /usr/local/tmp doesn't have this restriction, so change the location of
+      # the home and temp dirs for the duration of the test.
+      'export HOME=/usr/local/tmp',
+      'export TMPDIR=/usr/local/tmp',
+  ]
+
+  def __init__(self, args, unknown_args):
+    self._additional_args = unknown_args
+    self._path_to_outdir = args.path_to_outdir
+    self._test_launcher_summary_output = args.test_launcher_summary_output
+    self._logs_dir = args.logs_dir
+    self._use_vm = args.use_vm
+    self._rdb_client = result_sink.TryInitClient()
+
+    self._retries = 0
+    self._timeout = None
+    self._test_launcher_shard_index = args.test_launcher_shard_index
+    self._test_launcher_total_shards = args.test_launcher_total_shards
+
+    # The location on disk of a shell script that can be optionally used to
+    # invoke the test on the device. If it's not set, we assume self._test_cmd
+    # contains the test invocation.
+    self._on_device_script = None
+
+    self._test_cmd = [
+        CROS_RUN_TEST_PATH,
+        '--board',
+        args.board,
+        '--cache-dir',
+        args.cros_cache,
+    ]
+    if args.use_vm:
+      self._test_cmd += [
+          '--start',
+          # Don't persist any filesystem changes after the VM shutsdown.
+          '--copy-on-write',
+      ]
+    else:
+      self._test_cmd += [
+          '--device', args.device if args.device else LAB_DUT_HOSTNAME
+      ]
+    if args.logs_dir:
+      for log in SYSTEM_LOG_LOCATIONS:
+        self._test_cmd += ['--results-src', log]
+      self._test_cmd += [
+          '--results-dest-dir',
+          os.path.join(args.logs_dir, 'system_logs')
+      ]
+    if args.flash:
+      self._test_cmd += ['--flash']
+      if args.public_image:
+        self._test_cmd += ['--public-image']
+
+    # This environment variable is set for tests that have been instrumented
+    # for code coverage. Its incoming value is expected to be a location
+    # inside a subdirectory of result_dir above. This is converted to an
+    # absolute path that the vm is able to write to, and passed in the
+    # --results-src flag to cros_run_vm_test for copying out of the vm before
+    # its termination.
+    self._llvm_profile_var = None
+    if os.environ.get('LLVM_PROFILE_FILE'):
+      _, llvm_profile_file = os.path.split(os.environ['LLVM_PROFILE_FILE'])
+      self._llvm_profile_var = '/tmp/profraw/%s' % llvm_profile_file
+
+      # This should make the vm test runner exfil the profiling data.
+      self._test_cmd += ['--results-src', '/tmp/profraw']
+
+    self._test_env = setup_env()
+
+  @property
+  def suite_name(self):
+    raise NotImplementedError('Child classes need to define suite name.')
+
+  @property
+  def test_cmd(self):
+    return self._test_cmd
+
+  def write_test_script_to_disk(self, script_contents):
+    # Since we're using an on_device_script to invoke the test, we'll need to
+    # set cwd.
+    self._test_cmd += [
+        '--remote-cmd',
+        '--cwd',
+        os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
+    ]
+    logging.info('Running the following command on the device:')
+    logging.info('\n' + '\n'.join(script_contents))
+    fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir)
+    os.fchmod(fd, 0o755)
+    with os.fdopen(fd, 'w') as f:
+      f.write('\n'.join(script_contents) + '\n')
+    return tmp_path
+
+  def run_test(self):
+    # Traps SIGTERM and kills all child processes of cros_run_test when it's
+    # caught. This will allow us to capture logs from the device if a test hangs
+    # and gets timeout-killed by swarming. See also:
+    # https://chromium.googlesource.com/infra/luci/luci-py/+/master/appengine/swarming/doc/Bot.md#graceful-termination_aka-the-sigterm-and-sigkill-dance
+    test_proc = None
+
+    def _kill_child_procs(trapped_signal, _):
+      logging.warning('Received signal %d. Killing child processes of test.',
+                      trapped_signal)
+      if not test_proc or not test_proc.pid:
+        # This shouldn't happen?
+        logging.error('Test process not running.')
+        return
+      for child in psutil.Process(test_proc.pid).children():
+        logging.warning('Killing process %s', child)
+        child.kill()
+
+    signal.signal(signal.SIGTERM, _kill_child_procs)
+
+    for i in range(self._retries + 1):
+      logging.info('########################################')
+      logging.info('Test attempt #%d', i)
+      logging.info('########################################')
+      test_proc = subprocess.Popen(
+          self._test_cmd,
+          stdout=sys.stdout,
+          stderr=sys.stderr,
+          env=self._test_env)
+      try:
+        test_proc.wait(timeout=self._timeout)
+      except subprocess.TimeoutExpired:  # pylint: disable=no-member
+        logging.error('Test timed out. Sending SIGTERM.')
+        # SIGTERM the proc and wait 10s for it to close.
+        test_proc.terminate()
+        try:
+          test_proc.wait(timeout=10)
+        except subprocess.TimeoutExpired:  # pylint: disable=no-member
+          # If it hasn't closed in 10s, SIGKILL it.
+          logging.error('Test did not exit in time. Sending SIGKILL.')
+          test_proc.kill()
+          test_proc.wait()
+      logging.info('Test exitted with %d.', test_proc.returncode)
+      if test_proc.returncode == 0:
+        break
+
+    ret = self.post_run(test_proc.returncode)
+    # Allow post_run to override test proc return code. (Useful when the host
+    # side Tast bin returns 0 even for failed tests.)
+    if ret is not None:
+      return ret
+    return test_proc.returncode
+
+  def post_run(self, return_code):
+    if self._on_device_script:
+      os.remove(self._on_device_script)
+    # Create a simple json results file for a test run. The results will contain
+    # only one test (suite_name), and will either be a PASS or FAIL depending on
+    # return_code.
+    if self._test_launcher_summary_output:
+      result = (
+          base_test_result.ResultType.FAIL
+          if return_code else base_test_result.ResultType.PASS)
+      suite_result = base_test_result.BaseTestResult(self.suite_name, result)
+      run_results = base_test_result.TestRunResults()
+      run_results.AddResult(suite_result)
+      with open(self._test_launcher_summary_output, 'w') as f:
+        json.dump(json_results.GenerateResultsDict([run_results]), f)
+
+  @staticmethod
+  def get_artifacts(path):
+    """Crawls a given directory for file artifacts to attach to a test.
+
+    Args:
+      path: Path to a directory to search for artifacts.
+    Returns:
+      A dict mapping name of the artifact to its absolute filepath.
+    """
+    artifacts = {}
+    for dirpath, _, filenames in os.walk(path):
+      for f in filenames:
+        artifact_path = os.path.join(dirpath, f)
+        artifacts[os.path.relpath(artifact_path, path)] = {
+            'filePath': artifact_path,
+        }
+    return artifacts
+
+
+class TastTest(RemoteTest):
+
+  def __init__(self, args, unknown_args):
+    super(TastTest, self).__init__(args, unknown_args)
+
+    self._suite_name = args.suite_name
+    self._tast_vars = args.tast_vars
+    self._tests = args.tests
+    # The CQ passes in '--gtest_filter' when specifying tests to skip. Store it
+    # here and parse it later to integrate it into Tast executions.
+    self._gtest_style_filter = args.gtest_filter
+    self._attr_expr = args.attr_expr
+    self._should_strip = args.strip_chrome
+    self._deploy_lacros = args.deploy_lacros
+
+    if self._deploy_lacros and self._should_strip:
+      raise TestFormatError(
+          '--strip-chrome is only applicable to ash-chrome because '
+          'lacros-chrome deployment uses --nostrip by default, so it cannot '
+          'be specificed with --deploy-lacros.')
+
+    if not self._llvm_profile_var and not self._logs_dir:
+      # The host-side Tast bin returns 0 when tests fail, so we need to capture
+      # and parse its json results to reliably determine if tests fail.
+      raise TestFormatError(
+          'When using the host-side Tast bin, "--logs-dir" must be passed in '
+          'order to parse its results.')
+
+    # If the first test filter is negative, it should be safe to assume all of
+    # them are, so just test the first filter.
+    if self._gtest_style_filter and self._gtest_style_filter[0] == '-':
+      raise TestFormatError('Negative test filters not supported for Tast.')
+
+  @property
+  def suite_name(self):
+    return self._suite_name
+
+  def build_test_command(self):
+    unsupported_args = [
+        '--test-launcher-retry-limit',
+        '--test-launcher-batch-limit',
+        '--gtest_repeat',
+    ]
+    for unsupported_arg in unsupported_args:
+      if any(arg.startswith(unsupported_arg) for arg in self._additional_args):
+        logging.info(
+            '%s not supported for Tast tests. The arg will be ignored.',
+            unsupported_arg)
+        self._additional_args = [
+            arg for arg in self._additional_args
+            if not arg.startswith(unsupported_arg)
+        ]
+
+    # Lacros deployment mounts itself by default.
+    self._test_cmd.extend([
+        '--deploy-lacros', '--lacros-launcher-script',
+        LACROS_LAUNCHER_SCRIPT_PATH
+    ] if self._deploy_lacros else ['--deploy', '--mount'])
+    self._test_cmd += [
+        '--build-dir',
+        os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH)
+    ] + self._additional_args
+
+    # Coverage tests require some special pre-test setup, so use an
+    # on_device_script in that case. For all other tests, use cros_run_test's
+    # built-in '--tast' option. This gives us much better results reporting.
+    if self._llvm_profile_var:
+      # Build the shell script that will be used on the device to invoke the
+      # test.
+      device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
+      device_test_script_contents += [
+          'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' %
+          (self._llvm_profile_var)
+      ]
+
+      local_test_runner_cmd = ['local_test_runner', '-waituntilready']
+      if self._use_vm:
+        # If we're running tests in VMs, tell the test runner to skip tests that
+        # aren't compatible.
+        local_test_runner_cmd.append('-extrauseflags=tast_vm')
+      if self._attr_expr:
+        local_test_runner_cmd.append(pipes.quote(self._attr_expr))
+      else:
+        local_test_runner_cmd.extend(self._tests)
+      device_test_script_contents.append(' '.join(local_test_runner_cmd))
+
+      self._on_device_script = self.write_test_script_to_disk(
+          device_test_script_contents)
+
+      self._test_cmd += [
+          '--files',
+          os.path.relpath(self._on_device_script), '--',
+          './' + os.path.relpath(self._on_device_script, self._path_to_outdir)
+      ]
+    else:
+      # Capture tast's results in the logs dir as well.
+      if self._logs_dir:
+        self._test_cmd += [
+            '--results-dir',
+            self._logs_dir,
+        ]
+      self._test_cmd += [
+          '--tast-total-shards=%d' % self._test_launcher_total_shards,
+          '--tast-shard-index=%d' % self._test_launcher_shard_index,
+      ]
+      # If we're using a test filter, replace the contents of the Tast
+      # conditional with a long list of "name:test" expressions, one for each
+      # test in the filter.
+      if self._gtest_style_filter:
+        if self._attr_expr or self._tests:
+          logging.warning(
+              'Presence of --gtest_filter will cause the specified Tast expr'
+              ' or test list to be ignored.')
+        names = []
+        for test in self._gtest_style_filter.split(':'):
+          names.append('"name:%s"' % test)
+        self._attr_expr = '(' + ' || '.join(names) + ')'
+
+      if self._attr_expr:
+        # Don't use pipes.quote() here. Something funky happens with the arg
+        # as it gets passed down from cros_run_test to tast. (Tast picks up the
+        # escaping single quotes and complains that the attribute expression
+        # "must be within parentheses".)
+        self._test_cmd.append('--tast=%s' % self._attr_expr)
+      else:
+        self._test_cmd.append('--tast')
+        self._test_cmd.extend(self._tests)
+
+      for v in self._tast_vars or []:
+        self._test_cmd.extend(['--tast-var', v])
+
+      # Mounting ash-chrome gives it enough disk space to not need stripping,
+      # but only for one not instrumented with code coverage.
+      # Lacros uses --nostrip by default, so there is no need to specify.
+      if not self._deploy_lacros and not self._should_strip:
+        self._test_cmd.append('--nostrip')
+
+  def post_run(self, return_code):
+    # If we don't need to parse the host-side Tast tool's results, fall back to
+    # the parent method's default behavior.
+    if self._llvm_profile_var:
+      return super(TastTest, self).post_run(return_code)
+
+    tast_results_path = os.path.join(self._logs_dir, 'streamed_results.jsonl')
+    if not os.path.exists(tast_results_path):
+      logging.error(
+          'Tast results not found at %s. Falling back to generic result '
+          'reporting.', tast_results_path)
+      return super(TastTest, self).post_run(return_code)
+
+    # See the link below for the format of the results:
+    # https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/cmd/tast/run#TestResult
+    with jsonlines.open(tast_results_path) as reader:
+      tast_results = collections.deque(reader)
+
+    suite_results = base_test_result.TestRunResults()
+    for test in tast_results:
+      errors = test['errors']
+      start, end = test['start'], test['end']
+      # Use dateutil to parse the timestamps since datetime can't handle
+      # nanosecond precision.
+      duration = dateutil.parser.parse(end) - dateutil.parser.parse(start)
+      duration_ms = duration.total_seconds() * 1000
+      if bool(test['skipReason']):
+        result = base_test_result.ResultType.SKIP
+      elif errors:
+        result = base_test_result.ResultType.FAIL
+      else:
+        result = base_test_result.ResultType.PASS
+      error_log = ''
+      if errors:
+        # See the link below for the format of these errors:
+        # https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/tast/testing#Error
+        for err in errors:
+          error_log += err['stack'] + '\n'
+      error_log += (
+          "\nIf you're unsure why this test failed, consult the steps "
+          'outlined in\n%s\n' % TAST_DEBUG_DOC)
+      base_result = base_test_result.BaseTestResult(
+          test['name'], result, duration=duration_ms, log=error_log)
+      suite_results.AddResult(base_result)
+      self._maybe_handle_perf_results(test['name'])
+
+      if self._rdb_client:
+        # Walk the contents of the test's "outDir" and atttach any file found
+        # inside as an RDB 'artifact'. (This could include system logs, screen
+        # shots, etc.)
+        artifacts = self.get_artifacts(test['outDir'])
+        self._rdb_client.Post(test['name'], result, duration_ms, error_log,
+                              artifacts)
+
+    if self._rdb_client and self._logs_dir:
+      # Attach artifacts from the device that don't apply to a single test.
+      artifacts = self.get_artifacts(
+          os.path.join(self._logs_dir, 'system_logs'))
+      artifacts.update(
+          self.get_artifacts(os.path.join(self._logs_dir, 'crashes')))
+      self._rdb_client.ReportInvocationLevelArtifacts(artifacts)
+
+    if self._test_launcher_summary_output:
+      with open(self._test_launcher_summary_output, 'w') as f:
+        json.dump(json_results.GenerateResultsDict([suite_results]), f)
+
+    if not suite_results.DidRunPass():
+      return 1
+    elif return_code:
+      logging.warning(
+          'No failed tests found, but exit code of %d was returned from '
+          'cros_run_test.', return_code)
+      return return_code
+    return 0
+
+  def _maybe_handle_perf_results(self, test_name):
+    """Prepares any perf results from |test_name| for process_perf_results.
+
+    - process_perf_results looks for top level directories containing a
+      perf_results.json file and a test_results.json file. The directory names
+      are used as the benchmark names.
+    - If a perf_results.json or results-chart.json file exists in the
+      |test_name| results directory, a top level directory is created and the
+      perf results file is copied to perf_results.json.
+    - A trivial test_results.json file is also created to indicate that the test
+      succeeded (this function would not be called otherwise).
+    - When process_perf_results is run, it will find the expected files in the
+      named directory and upload the benchmark results.
+    """
+
+    perf_results = os.path.join(self._logs_dir, 'tests', test_name,
+                                'perf_results.json')
+    # TODO(stevenjb): Remove check for crosbolt results-chart.json file.
+    if not os.path.exists(perf_results):
+      perf_results = os.path.join(self._logs_dir, 'tests', test_name,
+                                  'results-chart.json')
+    if os.path.exists(perf_results):
+      benchmark_dir = os.path.join(self._logs_dir, test_name)
+      if not os.path.isdir(benchmark_dir):
+        os.makedirs(benchmark_dir)
+      shutil.copyfile(perf_results,
+                      os.path.join(benchmark_dir, 'perf_results.json'))
+      # process_perf_results.py expects a test_results.json file.
+      test_results = {'valid': True, 'failures': []}
+      with open(os.path.join(benchmark_dir, 'test_results.json'), 'w') as out:
+        json.dump(test_results, out)
+
+
+class GTestTest(RemoteTest):
+
+  # The following list corresponds to paths that should not be copied over to
+  # the device during tests. In other words, these files are only ever used on
+  # the host.
+  _FILE_IGNORELIST = [
+      re.compile(r'.*build/android.*'),
+      re.compile(r'.*build/chromeos.*'),
+      re.compile(r'.*build/cros_cache.*'),
+      # The following matches anything under //testing/ that isn't under
+      # //testing/buildbot/filters/.
+      re.compile(r'.*testing/(?!buildbot/filters).*'),
+      re.compile(r'.*third_party/chromite.*'),
+  ]
+
+  def __init__(self, args, unknown_args):
+    super(GTestTest, self).__init__(args, unknown_args)
+
+    self._test_exe = args.test_exe
+    self._runtime_deps_path = args.runtime_deps_path
+    self._vpython_dir = args.vpython_dir
+
+    self._on_device_script = None
+    self._env_vars = args.env_var
+    self._stop_ui = args.stop_ui
+    self._trace_dir = args.trace_dir
+
+  @property
+  def suite_name(self):
+    return self._test_exe
+
+  def build_test_command(self):
+    # To keep things easy for us, ensure both types of output locations are
+    # the same.
+    if self._test_launcher_summary_output and self._logs_dir:
+      json_out_dir = os.path.dirname(self._test_launcher_summary_output) or '.'
+      if os.path.abspath(json_out_dir) != os.path.abspath(self._logs_dir):
+        raise TestFormatError(
+            '--test-launcher-summary-output and --logs-dir must point to '
+            'the same directory.')
+
+    if self._test_launcher_summary_output:
+      result_dir, result_file = os.path.split(
+          self._test_launcher_summary_output)
+      # If args.test_launcher_summary_output is a file in cwd, result_dir will
+      # be an empty string, so replace it with '.' when this is the case so
+      # cros_run_test can correctly handle it.
+      if not result_dir:
+        result_dir = '.'
+      device_result_file = '/tmp/%s' % result_file
+      self._test_cmd += [
+          '--results-src',
+          device_result_file,
+          '--results-dest-dir',
+          result_dir,
+      ]
+
+    if self._trace_dir and self._logs_dir:
+      trace_path = os.path.dirname(self._trace_dir) or '.'
+      if os.path.abspath(trace_path) != os.path.abspath(self._logs_dir):
+        raise TestFormatError(
+            '--trace-dir and --logs-dir must point to the same directory.')
+
+    if self._trace_dir:
+      trace_path, trace_dirname = os.path.split(self._trace_dir)
+      device_trace_dir = '/tmp/%s' % trace_dirname
+      self._test_cmd += [
+          '--results-src',
+          device_trace_dir,
+          '--results-dest-dir',
+          trace_path,
+      ]
+
+    # Build the shell script that will be used on the device to invoke the test.
+    # Stored here as a list of lines.
+    device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
+    if self._llvm_profile_var:
+      device_test_script_contents += [
+          'export LLVM_PROFILE_FILE=%s' % self._llvm_profile_var,
+      ]
+
+    for var_name, var_val in self._env_vars:
+      device_test_script_contents += ['export %s=%s' % (var_name, var_val)]
+
+    if self._vpython_dir:
+      vpython_path = os.path.join(self._path_to_outdir, self._vpython_dir,
+                                  'vpython')
+      cpython_path = os.path.join(self._path_to_outdir, self._vpython_dir,
+                                  'bin', 'python')
+      if not os.path.exists(vpython_path) or not os.path.exists(cpython_path):
+        raise TestFormatError(
+            '--vpython-dir must point to a dir with both infra/python/cpython '
+            'and infra/tools/luci/vpython installed.')
+      vpython_spec_path = os.path.relpath(
+          os.path.join(CHROMIUM_SRC_PATH, '.vpython'), self._path_to_outdir)
+      # Initialize the vpython cache. This can take 10-20s, and some tests
+      # can't afford to wait that long on the first invocation.
+      device_test_script_contents.extend([
+          'export PATH=$PWD/%s:$PWD/%s/bin/:$PATH' %
+          (self._vpython_dir, self._vpython_dir),
+          'vpython -vpython-spec %s -vpython-tool install' %
+          (vpython_spec_path),
+      ])
+
+    test_invocation = ('LD_LIBRARY_PATH=./ ./%s --test-launcher-shard-index=%d '
+                       '--test-launcher-total-shards=%d' %
+                       (self._test_exe, self._test_launcher_shard_index,
+                        self._test_launcher_total_shards))
+    if self._test_launcher_summary_output:
+      test_invocation += ' --test-launcher-summary-output=%s' % (
+          device_result_file)
+
+    if self._trace_dir:
+      device_test_script_contents.extend([
+          'rm -rf %s' % device_trace_dir,
+          'su chronos -c -- "mkdir -p %s"' % device_trace_dir,
+      ])
+      test_invocation += ' --trace-dir=%s' % device_trace_dir
+
+    if self._additional_args:
+      test_invocation += ' %s' % ' '.join(self._additional_args)
+
+    if self._stop_ui:
+      device_test_script_contents += [
+          'stop ui',
+      ]
+      # The UI service on the device owns the chronos user session, so shutting
+      # it down as chronos kills the entire execution of the test. So we'll have
+      # to run as root up until the test invocation.
+      test_invocation = 'su chronos -c -- "%s"' % test_invocation
+      # And we'll need to chown everything since cros_run_test's "--as-chronos"
+      # option normally does that for us.
+      device_test_script_contents.append('chown -R chronos: ../..')
+    else:
+      self._test_cmd += [
+          # Some tests fail as root, so run as the less privileged user
+          # 'chronos'.
+          '--as-chronos',
+      ]
+
+    device_test_script_contents.append(test_invocation)
+
+    self._on_device_script = self.write_test_script_to_disk(
+        device_test_script_contents)
+
+    runtime_files = [os.path.relpath(self._on_device_script)]
+    runtime_files += self._read_runtime_files()
+    if self._vpython_dir:
+      # --vpython-dir is relative to the out dir, but --files expects paths
+      # relative to src dir, so fix the path up a bit.
+      runtime_files.append(
+          os.path.relpath(
+              os.path.abspath(
+                  os.path.join(self._path_to_outdir, self._vpython_dir)),
+              CHROMIUM_SRC_PATH))
+      # TODO(bpastene): Add the vpython spec to the test's runtime deps instead
+      # of handling it here.
+      runtime_files.append('.vpython')
+
+    for f in runtime_files:
+      self._test_cmd.extend(['--files', f])
+
+    self._test_cmd += [
+        '--',
+        './' + os.path.relpath(self._on_device_script, self._path_to_outdir)
+    ]
+
+  def _read_runtime_files(self):
+    if not self._runtime_deps_path:
+      return []
+
+    abs_runtime_deps_path = os.path.abspath(
+        os.path.join(self._path_to_outdir, self._runtime_deps_path))
+    with open(abs_runtime_deps_path) as runtime_deps_file:
+      files = [l.strip() for l in runtime_deps_file if l]
+    rel_file_paths = []
+    for f in files:
+      rel_file_path = os.path.relpath(
+          os.path.abspath(os.path.join(self._path_to_outdir, f)))
+      if not any(regex.match(rel_file_path) for regex in self._FILE_IGNORELIST):
+        rel_file_paths.append(rel_file_path)
+    return rel_file_paths
+
+  def post_run(self, _):
+    if self._on_device_script:
+      os.remove(self._on_device_script)
+
+
+def device_test(args, unknown_args):
+  # cros_run_test has trouble with relative paths that go up directories,
+  # so cd to src/, which should be the root of all data deps.
+  os.chdir(CHROMIUM_SRC_PATH)
+
+  # pylint: disable=redefined-variable-type
+  # TODO: Remove the above when depot_tool's pylint is updated to include the
+  # fix to https://github.com/PyCQA/pylint/issues/710.
+  if args.test_type == 'tast':
+    test = TastTest(args, unknown_args)
+  else:
+    test = GTestTest(args, unknown_args)
+
+  test.build_test_command()
+  logging.info('Running the following command on the device:')
+  logging.info(' '.join(test.test_cmd))
+
+  return test.run_test()
+
+
+def host_cmd(args, cmd_args):
+  if not cmd_args:
+    raise TestFormatError('Must specify command to run on the host.')
+  elif args.deploy_chrome and not args.path_to_outdir:
+    raise TestFormatError(
+        '--path-to-outdir must be specified if --deploy-chrome is passed.')
+
+  cros_run_test_cmd = [
+      CROS_RUN_TEST_PATH,
+      '--board',
+      args.board,
+      '--cache-dir',
+      os.path.join(CHROMIUM_SRC_PATH, args.cros_cache),
+  ]
+  if args.use_vm:
+    cros_run_test_cmd += [
+        '--start',
+        # Don't persist any filesystem changes after the VM shutsdown.
+        '--copy-on-write',
+    ]
+  else:
+    cros_run_test_cmd += [
+        '--device', args.device if args.device else LAB_DUT_HOSTNAME
+    ]
+  if args.verbose:
+    cros_run_test_cmd.append('--debug')
+  if args.flash:
+    cros_run_test_cmd.append('--flash')
+    if args.public_image:
+      cros_run_test_cmd += ['--public-image']
+
+  if args.logs_dir:
+    for log in SYSTEM_LOG_LOCATIONS:
+      cros_run_test_cmd += ['--results-src', log]
+    cros_run_test_cmd += [
+        '--results-dest-dir',
+        os.path.join(args.logs_dir, 'system_logs')
+    ]
+
+  test_env = setup_env()
+  if args.deploy_chrome or args.deploy_lacros:
+    # Mounting ash-chrome gives it enough disk space to not need stripping.
+    cros_run_test_cmd.extend([
+        '--deploy-lacros', '--lacros-launcher-script',
+        LACROS_LAUNCHER_SCRIPT_PATH
+    ] if args.deploy_lacros else ['--deploy', '--mount', '--nostrip'])
+
+    cros_run_test_cmd += [
+        '--build-dir',
+        os.path.join(CHROMIUM_SRC_PATH, args.path_to_outdir)
+    ]
+
+  cros_run_test_cmd += [
+      '--host-cmd',
+      '--',
+  ] + cmd_args
+
+  logging.info('Running the following command:')
+  logging.info(' '.join(cros_run_test_cmd))
+
+  return subprocess.call(
+      cros_run_test_cmd, stdout=sys.stdout, stderr=sys.stderr, env=test_env)
+
+
+def setup_env():
+  """Returns a copy of the current env with some needed vars added."""
+  env = os.environ.copy()
+  # Some chromite scripts expect chromite/bin to be on PATH.
+  env['PATH'] = env['PATH'] + ':' + os.path.join(CHROMITE_PATH, 'bin')
+  # deploy_chrome needs a set of GN args used to build chrome to determine if
+  # certain libraries need to be pushed to the device. It looks for the args via
+  # an env var. To trigger the default deploying behavior, give it a dummy set
+  # of args.
+  # TODO(crbug.com/823996): Make the GN-dependent deps controllable via cmd
+  # line args.
+  if not env.get('GN_ARGS'):
+    env['GN_ARGS'] = 'enable_nacl = true'
+  if not env.get('USE'):
+    env['USE'] = 'highdpi'
+  return env
+
+
+def add_common_args(*parsers):
+  for parser in parsers:
+    parser.add_argument('--verbose', '-v', action='store_true')
+    parser.add_argument(
+        '--board', type=str, required=True, help='Type of CrOS device.')
+    parser.add_argument(
+        '--cros-cache',
+        type=str,
+        default=DEFAULT_CROS_CACHE,
+        help='Path to cros cache.')
+    parser.add_argument(
+        '--path-to-outdir',
+        type=str,
+        required=True,
+        help='Path to output directory, all of whose contents will be '
+        'deployed to the device.')
+    parser.add_argument(
+        '--runtime-deps-path',
+        type=str,
+        help='Runtime data dependency file from GN.')
+    parser.add_argument(
+        '--vpython-dir',
+        type=str,
+        help='Location on host of a directory containing a vpython binary to '
+        'deploy to the device before the test starts. The location of '
+        'this dir will be added onto PATH in the device. WARNING: The '
+        'arch of the device might not match the arch of the host, so '
+        'avoid using "${platform}" when downloading vpython via CIPD.')
+    parser.add_argument(
+        '--logs-dir',
+        type=str,
+        dest='logs_dir',
+        help='Will copy everything under /var/log/ from the device after the '
+        'test into the specified dir.')
+    # Shard args are parsed here since we might also specify them via env vars.
+    parser.add_argument(
+        '--test-launcher-shard-index',
+        type=int,
+        default=os.environ.get('GTEST_SHARD_INDEX', 0),
+        help='Index of the external shard to run.')
+    parser.add_argument(
+        '--test-launcher-total-shards',
+        type=int,
+        default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+        help='Total number of external shards.')
+    parser.add_argument(
+        '--flash',
+        action='store_true',
+        help='Will flash the device to the current SDK version before running '
+        'the test.')
+    parser.add_argument(
+        '--public-image',
+        action='store_true',
+        help='Will flash a public "full" image to the device.')
+
+    vm_or_device_group = parser.add_mutually_exclusive_group()
+    vm_or_device_group.add_argument(
+        '--use-vm',
+        action='store_true',
+        help='Will run the test in the VM instead of a device.')
+    vm_or_device_group.add_argument(
+        '--device',
+        type=str,
+        help='Hostname (or IP) of device to run the test on. This arg is not '
+        'required if --use-vm is set.')
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  subparsers = parser.add_subparsers(dest='test_type')
+  # Host-side test args.
+  host_cmd_parser = subparsers.add_parser(
+      'host-cmd',
+      help='Runs a host-side test. Pass the host-side command to run after '
+      '"--". If --use-vm is passed, hostname and port for the device '
+      'will be 127.0.0.1:9222.')
+  host_cmd_parser.set_defaults(func=host_cmd)
+  host_cmd_parser.add_argument(
+      '--deploy-chrome',
+      action='store_true',
+      help='Will deploy a locally built ash-chrome binary to the device before '
+      'running the host-cmd.')
+  host_cmd_parser.add_argument(
+      '--deploy-lacros',
+      action='store_true',
+      help='Deploy a lacros-chrome instead of ash-chrome.')
+
+  gtest_parser = subparsers.add_parser(
+      'gtest', help='Runs a device-side gtest.')
+  gtest_parser.set_defaults(func=device_test)
+  gtest_parser.add_argument(
+      '--test-exe',
+      type=str,
+      required=True,
+      help='Path to test executable to run inside the device.')
+
+  # GTest args. Some are passed down to the test binary in the device. Others
+  # are parsed here since they might need tweaking or special handling.
+  gtest_parser.add_argument(
+      '--test-launcher-summary-output',
+      type=str,
+      help='When set, will pass the same option down to the test and retrieve '
+      'its result file at the specified location.')
+  gtest_parser.add_argument(
+      '--stop-ui',
+      action='store_true',
+      help='Will stop the UI service in the device before running the test.')
+  gtest_parser.add_argument(
+      '--trace-dir',
+      type=str,
+      help='When set, will pass down to the test to generate the trace and '
+      'retrieve the trace files to the specified location.')
+  gtest_parser.add_argument(
+      '--env-var',
+      nargs=2,
+      action='append',
+      default=[],
+      help='Env var to set on the device for the duration of the test. '
+      'Expected format is "--env-var SOME_VAR_NAME some_var_value". Specify '
+      'multiple times for more than one var.')
+
+  # Tast test args.
+  # pylint: disable=line-too-long
+  tast_test_parser = subparsers.add_parser(
+      'tast',
+      help='Runs a device-side set of Tast tests. For more details, see: '
+      'https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/running_tests.md'
+  )
+  tast_test_parser.set_defaults(func=device_test)
+  tast_test_parser.add_argument(
+      '--suite-name',
+      type=str,
+      required=True,
+      help='Name to apply to the set of Tast tests to run. This has no effect '
+      'on what is executed, but is used mainly for test results reporting '
+      'and tracking (eg: flakiness dashboard).')
+  tast_test_parser.add_argument(
+      '--test-launcher-summary-output',
+      type=str,
+      help='Generates a simple GTest-style JSON result file for the test run.')
+  tast_test_parser.add_argument(
+      '--attr-expr',
+      type=str,
+      help='A boolean expression whose matching tests will run '
+      '(eg: ("dep:chrome")).')
+  tast_test_parser.add_argument(
+      '--strip-chrome',
+      action='store_true',
+      help='Strips symbols from ash-chrome before deploying to the device.')
+  tast_test_parser.add_argument(
+      '--deploy-lacros',
+      action='store_true',
+      help='Deploy a lacros-chrome instead of ash-chrome.')
+  tast_test_parser.add_argument(
+      '--tast-var',
+      action='append',
+      dest='tast_vars',
+      help='Runtime variables for Tast tests, and the format are expected to '
+      'be "key=value" pairs.')
+  tast_test_parser.add_argument(
+      '--test',
+      '-t',
+      action='append',
+      dest='tests',
+      help='A Tast test to run in the device (eg: "ui.ChromeLogin").')
+  tast_test_parser.add_argument(
+      '--gtest_filter',
+      type=str,
+      help="Similar to GTest's arg of the same name, this will filter out the "
+      "specified tests from the Tast run. However, due to the nature of Tast's "
+      'cmd-line API, this will overwrite the value(s) of "--test" above.')
+
+  add_common_args(gtest_parser, tast_test_parser, host_cmd_parser)
+
+  args = sys.argv[1:]
+  unknown_args = []
+  # If a '--' is present in the args, treat everything to the right of it as
+  # args to the test and everything to the left as args to this test runner.
+  # Otherwise treat all known args as args to this test runner and all unknown
+  # args as test args.
+  if '--' in args:
+    unknown_args = args[args.index('--') + 1:]
+    args = args[0:args.index('--')]
+  if unknown_args:
+    args = parser.parse_args(args=args)
+  else:
+    args, unknown_args = parser.parse_known_args()
+
+  logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
+
+  if not args.use_vm and not args.device:
+    logging.warning(
+        'The test runner is now assuming running in the lab environment, if '
+        'this is unintentional, please re-invoke the test runner with the '
+        '"--use-vm" arg if using a VM, otherwise use the "--device=<DUT>" arg '
+        'to specify a DUT.')
+
+    # If we're not running on a VM, but haven't specified a hostname, assume
+    # we're on a lab bot and are trying to run a test on a lab DUT. See if the
+    # magic lab DUT hostname resolves to anything. (It will in the lab and will
+    # not on dev machines.)
+    try:
+      socket.getaddrinfo(LAB_DUT_HOSTNAME, None)
+    except socket.gaierror:
+      logging.error('The default lab DUT hostname of %s is unreachable.',
+                    LAB_DUT_HOSTNAME)
+      return 1
+
+  return args.func(args, unknown_args)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/chromeos/test_runner_test.py b/src/build/chromeos/test_runner_test.py
new file mode 100755
index 0000000..15d1b1f
--- /dev/null
+++ b/src/build/chromeos/test_runner_test.py
@@ -0,0 +1,352 @@
+#!/usr/bin/env vpython3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import shutil
+import sys
+import tempfile
+import unittest
+
+# The following non-std imports are fetched via vpython. See the list at
+# //.vpython
+import mock  # pylint: disable=import-error
+from parameterized import parameterized  # pylint: disable=import-error
+import six
+
+import test_runner
+
+_TAST_TEST_RESULTS_JSON = {
+    "name": "ui.ChromeLogin",
+    "errors": None,
+    "start": "2020-01-01T15:41:30.799228462-08:00",
+    "end": "2020-01-01T15:41:53.318914698-08:00",
+    "skipReason": ""
+}
+
+
+class TestRunnerTest(unittest.TestCase):
+
+  def setUp(self):
+    self._tmp_dir = tempfile.mkdtemp()
+    self.mock_rdb = mock.patch.object(
+        test_runner.result_sink, 'TryInitClient', return_value=None)
+    self.mock_rdb.start()
+
+  def tearDown(self):
+    shutil.rmtree(self._tmp_dir, ignore_errors=True)
+    self.mock_rdb.stop()
+
+  def safeAssertItemsEqual(self, list1, list2):
+    """A Py3 safe version of assertItemsEqual.
+
+    See https://bugs.python.org/issue17866.
+    """
+    if six.PY3:
+      self.assertSetEqual(set(list1), set(list2))
+    else:
+      self.assertItemsEqual(list1, list2)
+
+
+class TastTests(TestRunnerTest):
+
+  def get_common_tast_args(self, use_vm):
+    return [
+        'script_name',
+        'tast',
+        '--suite-name=chrome_all_tast_tests',
+        '--board=eve',
+        '--flash',
+        '--path-to-outdir=out_eve/Release',
+        '--logs-dir=%s' % self._tmp_dir,
+        '--use-vm' if use_vm else '--device=localhost:2222',
+    ]
+
+  def get_common_tast_expectations(self, use_vm, is_lacros=False):
+    expectation = [
+        test_runner.CROS_RUN_TEST_PATH,
+        '--board',
+        'eve',
+        '--cache-dir',
+        test_runner.DEFAULT_CROS_CACHE,
+        '--results-dest-dir',
+        '%s/system_logs' % self._tmp_dir,
+        '--flash',
+        '--build-dir',
+        'out_eve/Release',
+        '--results-dir',
+        self._tmp_dir,
+        '--tast-total-shards=1',
+        '--tast-shard-index=0',
+    ]
+    expectation.extend(['--start', '--copy-on-write']
+                       if use_vm else ['--device', 'localhost:2222'])
+    for p in test_runner.SYSTEM_LOG_LOCATIONS:
+      expectation.extend(['--results-src', p])
+
+    if not is_lacros:
+      expectation += [
+          '--mount',
+          '--deploy',
+          '--nostrip',
+      ]
+    return expectation
+
+  def test_tast_gtest_filter(self):
+    """Tests running tast tests with a gtest-style filter."""
+    with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+      json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+    args = self.get_common_tast_args(False) + [
+        '--attr-expr=( "group:mainline" && "dep:chrome" && !informational)',
+        '--gtest_filter=ui.ChromeLogin:ui.WindowControl',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      # The gtest filter should cause the Tast expr to be replaced with a list
+      # of the tests in the filter.
+      expected_cmd = self.get_common_tast_expectations(False) + [
+          '--tast=("name:ui.ChromeLogin" || "name:ui.WindowControl")'
+      ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_tast_attr_expr(self, use_vm):
+    """Tests running a tast tests specified by an attribute expression."""
+    with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+      json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+    args = self.get_common_tast_args(use_vm) + [
+        '--attr-expr=( "group:mainline" && "dep:chrome" && !informational)',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      expected_cmd = self.get_common_tast_expectations(use_vm) + [
+          '--tast=( "group:mainline" && "dep:chrome" && !informational)',
+      ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_tast_lacros(self, use_vm):
+    """Tests running a tast tests for Lacros."""
+    with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+      json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+    args = self.get_common_tast_args(use_vm) + [
+        '-t=lacros.Basic',
+        '--deploy-lacros',
+    ]
+
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      expected_cmd = self.get_common_tast_expectations(
+          use_vm, is_lacros=True) + [
+              '--tast',
+              'lacros.Basic',
+              '--deploy-lacros',
+              '--lacros-launcher-script',
+              test_runner.LACROS_LAUNCHER_SCRIPT_PATH,
+          ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_tast_with_vars(self, use_vm):
+    """Tests running a tast tests with runtime variables."""
+    with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+      json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+    args = self.get_common_tast_args(use_vm) + [
+        '-t=ui.ChromeLogin',
+        '--tast-var=key=value',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+      test_runner.main()
+      expected_cmd = self.get_common_tast_expectations(use_vm) + [
+          '--tast', 'ui.ChromeLogin', '--tast-var', 'key=value'
+      ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_tast(self, use_vm):
+    """Tests running a tast tests."""
+    with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f:
+      json.dump(_TAST_TEST_RESULTS_JSON, f)
+
+    args = self.get_common_tast_args(use_vm) + [
+        '-t=ui.ChromeLogin',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      expected_cmd = self.get_common_tast_expectations(use_vm) + [
+          '--tast', 'ui.ChromeLogin'
+      ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+
+class GTestTest(TestRunnerTest):
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_gtest(self, use_vm):
+    """Tests running a gtest."""
+    fd_mock = mock.mock_open()
+
+    args = [
+        'script_name',
+        'gtest',
+        '--test-exe=out_eve/Release/base_unittests',
+        '--board=eve',
+        '--path-to-outdir=out_eve/Release',
+        '--use-vm' if use_vm else '--device=localhost:2222',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen,\
+         mock.patch.object(os, 'fdopen', fd_mock),\
+         mock.patch.object(os, 'remove') as mock_remove,\
+         mock.patch.object(tempfile, 'mkstemp',
+            return_value=(3, 'out_eve/Release/device_script.sh')),\
+         mock.patch.object(os, 'fchmod'):
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      self.assertEqual(1, mock_popen.call_count)
+      expected_cmd = [
+          test_runner.CROS_RUN_TEST_PATH, '--board', 'eve', '--cache-dir',
+          test_runner.DEFAULT_CROS_CACHE, '--as-chronos', '--remote-cmd',
+          '--cwd', 'out_eve/Release', '--files',
+          'out_eve/Release/device_script.sh'
+      ]
+      expected_cmd.extend(['--start', '--copy-on-write']
+                          if use_vm else ['--device', 'localhost:2222'])
+      expected_cmd.extend(['--', './device_script.sh'])
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+      fd_mock().write.assert_called_once_with(
+          '#!/bin/sh\nexport HOME=/usr/local/tmp\n'
+          'export TMPDIR=/usr/local/tmp\n'
+          'LD_LIBRARY_PATH=./ ./out_eve/Release/base_unittests '
+          '--test-launcher-shard-index=0 --test-launcher-total-shards=1\n')
+      mock_remove.assert_called_once_with('out_eve/Release/device_script.sh')
+
+  def test_gtest_with_vpython(self):
+    """Tests building a gtest with --vpython-dir."""
+    args = mock.MagicMock()
+    args.test_exe = 'base_unittests'
+    args.test_launcher_summary_output = None
+    args.trace_dir = None
+    args.runtime_deps_path = None
+    args.path_to_outdir = self._tmp_dir
+    args.vpython_dir = self._tmp_dir
+    args.logs_dir = self._tmp_dir
+
+    # With vpython_dir initially empty, the test_runner should error out
+    # due to missing vpython binaries.
+    gtest = test_runner.GTestTest(args, None)
+    with self.assertRaises(test_runner.TestFormatError):
+      gtest.build_test_command()
+
+    # Create the two expected tools, and the test should be ready to run.
+    with open(os.path.join(args.vpython_dir, 'vpython'), 'w'):
+      pass  # Just touch the file.
+    os.mkdir(os.path.join(args.vpython_dir, 'bin'))
+    with open(os.path.join(args.vpython_dir, 'bin', 'python'), 'w'):
+      pass
+    gtest = test_runner.GTestTest(args, None)
+    gtest.build_test_command()
+
+
+class HostCmdTests(TestRunnerTest):
+
+  @parameterized.expand([
+      [True],
+      [False],
+  ])
+  def test_host_cmd(self, is_lacros):
+    args = [
+        'script_name',
+        'host-cmd',
+        '--board=eve',
+        '--flash',
+        '--path-to-outdir=out/Release',
+        '--device=localhost:2222',
+    ]
+    if is_lacros:
+      args += ['--deploy-lacros']
+    else:
+      args += ['--deploy-chrome']
+    args += [
+        '--',
+        'fake_cmd',
+    ]
+    with mock.patch.object(sys, 'argv', args),\
+         mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen:
+      mock_popen.return_value.returncode = 0
+
+      test_runner.main()
+      expected_cmd = [
+          test_runner.CROS_RUN_TEST_PATH,
+          '--board',
+          'eve',
+          '--cache-dir',
+          test_runner.DEFAULT_CROS_CACHE,
+          '--flash',
+          '--device',
+          'localhost:2222',
+          '--build-dir',
+          os.path.join(test_runner.CHROMIUM_SRC_PATH, 'out/Release'),
+          '--host-cmd',
+      ]
+      if is_lacros:
+        expected_cmd += [
+            '--deploy-lacros',
+            '--lacros-launcher-script',
+            test_runner.LACROS_LAUNCHER_SCRIPT_PATH,
+        ]
+      else:
+        expected_cmd += ['--mount', '--nostrip', '--deploy']
+
+      expected_cmd += [
+          '--',
+          'fake_cmd',
+      ]
+
+      self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0])
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/ciopfs.sha1 b/src/build/ciopfs.sha1
new file mode 100644
index 0000000..c1855a3
--- /dev/null
+++ b/src/build/ciopfs.sha1
@@ -0,0 +1 @@
+5454b3c4f1c9992047e7ae9d6d14d5b49b1b12f3
\ No newline at end of file
diff --git a/src/build/cipd/cipd.gni b/src/build/cipd/cipd.gni
new file mode 100644
index 0000000..e7795c1
--- /dev/null
+++ b/src/build/cipd/cipd.gni
@@ -0,0 +1,140 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build targets for constructing CIPD packages.
+#
+# Prepares a CIPD archive and generates a manifest file.
+#
+# TODO(crbug.com/1042819): Add support for including directories.
+#
+# Parameters:
+#   package_definition_yaml: CIPD package definition filename. "cipd.yaml"
+#                            if unspecified.
+#   package: The path where the package will be located inside the CIPD
+#            repository.
+#   description: Sets the "description" field in CIPD package definition.
+#   install_mode: String, should be either "symlink" or "copy". Defaults to
+#                 "symlink".
+#   deps: A list of targets to build prior to copying files.
+#   sources: A list of files to copy into the staging root.
+#   source_directories: A list of directories to include in the package. Should
+#                       only be used when listing out all the files (in a given
+#                       directory) in |sources| is unfeasible.
+#
+# Example:
+#   cipd_package_definition("chromedriver") {
+#     package = "path/to/cipd/package"
+#     description = "Prebuilt test binary."
+#     install_mode = "copy"
+#     deps = [ "//path/to:test_binary_target" ]
+#     sources = [ "//path/to:test_binary_file" ]
+#   }
+#
+template("cipd_package_definition") {
+  forward_variables_from(invoker,
+                         [
+                           "deps",
+                           "data",
+                           "source_directories",
+                           "data_deps",
+                           "sources",
+                           "testonly",
+                         ])
+
+  assert(defined(sources) || defined(source_directories),
+         "At least one sources input must be specified.")
+
+  _install_mode = "symlink"
+  if (defined(invoker.install_mode)) {
+    _install_mode = invoker.install_mode
+  }
+  assert(_install_mode == "copy" || _install_mode == "symlink",
+         "\"install_mode\" arg should be either \"copy\" or \"symlink\".")
+
+  _cipd_definition_yaml = "cipd.yaml"
+  if (defined(invoker.package_definition_yaml)) {
+    _cipd_definition_yaml = invoker.package_definition_yaml
+  }
+
+  _package_staging_dir = "${target_gen_dir}/${target_name}"
+
+  _yaml_contents = [
+    "package: ${invoker.package}",
+    "description: ${invoker.description}",
+    "root: " + rebase_path(_package_staging_dir),
+    "install_mode: ${_install_mode}",
+    "data:",
+  ]
+
+  if (defined(sources)) {
+    foreach(source, sources) {
+      _yaml_contents += [ "  - file: " + get_path_info(source, "file") ]
+    }
+    copy(target_name) {
+      outputs = [ "${_package_staging_dir}/{{source_file_part}}" ]
+    }
+  }
+
+  if (defined(source_directories)) {
+    foreach(directory, source_directories) {
+      _yaml_contents += [ "  - dir: " + directory ]
+    }
+  }
+
+  write_file("${_package_staging_dir}/${_cipd_definition_yaml}", _yaml_contents)
+}
+
+# Create a cipd file based on inputs and FILES.cfg config. Most of the arguments
+# are similar with |cipd_package_definition| above.
+#
+# Additional parameters:
+#
+#   package_definition_yaml: The output yaml file. Default is
+#                            ${target_name}_cipd.yaml.
+#   files_file: The file defines what files and directories to include.
+#               Example: //tools/build/chromeos/FILES.cfg.
+#   buildtype: str, required. It can be "dev" or "official".
+#              Only when the file has the same buildtype, it will be included.
+#   arch: str, required. It can be "32bit", "64bit", "arm".
+#
+# Example:
+# cipd_package_definition_by_file("chrome_cipd") {
+#     package = "path/to/cipd/package"
+#     description = "Prebuilt test binary."
+#     install_mode = "copy"
+#     files_file = "//chrome/tools/build/chromeos/FILES.json"
+#     buildtype = "dev"
+#     arch = "64bit"
+#     deps = [ "//path/to:test_binary_target" ]
+#   }
+template("cipd_package_definition_by_file") {
+  forward_variables_from(invoker,
+                         [
+                           "deps",
+                           "data",
+                           "data_deps",
+                           "sources",
+                           "testonly",
+                         ])
+  _output_yaml_filename = "${target_name}_cipd.yaml"
+  if (defined(invoker.package_definition_yaml)) {
+    _output_yaml_filename = invoker.package_definition_yaml
+  }
+  action(target_name) {
+    script = "//build/cipd/cipd_from_file.py"
+    inputs = [ "//build/cipd/cipd_from_file.py" ]
+    args = [
+      "--description=" + invoker.description,
+      "--buildtype=" + invoker.buildtype,
+      "--arch=" + invoker.arch,
+      "--files_file=" + rebase_path(invoker.files_file, root_build_dir),
+      "--package=" + invoker.package,
+      "--install_mode=" + invoker.install_mode,
+      "--output_yaml_file=" +
+          rebase_path("${root_out_dir}/" + _output_yaml_filename,
+                      root_build_dir),
+    ]
+    outputs = [ "${root_out_dir}/" + _output_yaml_filename ]
+  }
+}
diff --git a/src/build/cipd/cipd_from_file.py b/src/build/cipd/cipd_from_file.py
new file mode 100755
index 0000000..0f08f69
--- /dev/null
+++ b/src/build/cipd/cipd_from_file.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script to generate yaml file based on FILES.cfg."""
+
+import argparse
+import os
+
+
+def _ParseFilesCfg(files_file):
+  """Return the dictionary of archive file info read from the given file."""
+  if not os.path.exists(files_file):
+    raise IOError('Files list does not exist (%s).' % files_file)
+  exec_globals = {'__builtins__': None}
+
+  exec(open(files_file).read(), exec_globals)
+  return exec_globals['FILES']
+
+
+def _Process(args):
+  yaml_content = ('package: ' + args.package + '\ndescription: ' +
+                  args.description + '\ninstall_mode: ' + args.install_mode +
+                  '\ndata:\n')
+  fileobj = _ParseFilesCfg(args.files_file)
+  for item in fileobj:
+    if 'buildtype' in item:
+      if args.buildtype not in item['buildtype']:
+        continue
+    if 'arch' in item:
+      if args.arch not in item['arch']:
+        continue
+    if 'type' in item and item['type'] == 'folder':
+      yaml_content += ' - dir: ' + item['filename'] + '\n'
+    else:
+      yaml_content += ' - file: ' + item['filename'] + '\n'
+
+  with open(args.output_yaml_file, 'w') as f:
+    f.write(yaml_content)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output_yaml_file', help='File to create.')
+  parser.add_argument(
+      '--package',
+      help='The path where the package will be located inside the CIPD\
+           repository.')
+  parser.add_argument(
+      '--description',
+      help='Sets the "description" field in CIPD package definition.')
+  parser.add_argument('--install_mode',
+                      help='String, should be either "symlink" or "copy".')
+  parser.add_argument('--files_file',
+                      help='FILES.cfg describes what files to include.')
+  parser.add_argument('--buildtype', help='buildtype for FILES.cfg.')
+  parser.add_argument('--arch', help='arch for FILES.cfg')
+
+  args = parser.parse_args()
+
+  _Process(args)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/cipd/clobber_cipd_root.py b/src/build/cipd/clobber_cipd_root.py
new file mode 100755
index 0000000..5d36c72
--- /dev/null
+++ b/src/build/cipd/clobber_cipd_root.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Clobbers a CIPD root."""
+
+import argparse
+import os
+import shutil
+import sys
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Clobbers the CIPD root in the given directory.')
+
+  parser.add_argument(
+      '--root',
+      required=True,
+      help='Root directory for dependency.')
+  args = parser.parse_args()
+
+  cipd_root_dir = os.path.join(args.root, '.cipd')
+  if os.path.exists(cipd_root_dir):
+    shutil.rmtree(cipd_root_dir)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/clobber.py b/src/build/clobber.py
new file mode 100755
index 0000000..1de3212
--- /dev/null
+++ b/src/build/clobber.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script provides methods for clobbering build directories."""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+def extract_gn_build_commands(build_ninja_file):
+  """Extracts from a build.ninja the commands to run GN.
+
+  The commands to run GN are the gn rule and build.ninja build step at the
+  top of the build.ninja file. We want to keep these when deleting GN builds
+  since we want to preserve the command-line flags to GN.
+
+  On error, returns the empty string."""
+  result = ""
+  with open(build_ninja_file, 'r') as f:
+    # Read until the third blank line. The first thing GN writes to the file
+    # is "ninja_required_version = x.y.z", then the "rule gn" and the third
+    # is the section for "build build.ninja", separated by blank lines.
+    num_blank_lines = 0
+    while num_blank_lines < 3:
+      line = f.readline()
+      if len(line) == 0:
+        return ''  # Unexpected EOF.
+      result += line
+      if line[0] == '\n':
+        num_blank_lines = num_blank_lines + 1
+  return result
+
+
+def delete_dir(build_dir):
+  if os.path.islink(build_dir):
+    return
+  # For unknown reasons (anti-virus?) rmtree of Chromium build directories
+  # often fails on Windows.
+  if sys.platform.startswith('win'):
+    subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
+  else:
+    shutil.rmtree(build_dir)
+
+
+def delete_build_dir(build_dir):
+  # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+  build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+  if not os.path.exists(build_ninja_d_file):
+    delete_dir(build_dir)
+    return
+
+  # GN builds aren't automatically regenerated when you sync. To avoid
+  # messing with the GN workflow, erase everything but the args file, and
+  # write a dummy build.ninja file that will automatically rerun GN the next
+  # time Ninja is run.
+  build_ninja_file = os.path.join(build_dir, 'build.ninja')
+  build_commands = extract_gn_build_commands(build_ninja_file)
+
+  try:
+    gn_args_file = os.path.join(build_dir, 'args.gn')
+    with open(gn_args_file, 'r') as f:
+      args_contents = f.read()
+  except IOError:
+    args_contents = ''
+
+  e = None
+  try:
+    # delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
+    # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
+    # the exception and rethrow it later.
+    delete_dir(build_dir)
+    os.mkdir(build_dir)
+  except Exception as e:
+    pass
+
+  # Put back the args file (if any).
+  if args_contents != '':
+    with open(gn_args_file, 'w') as f:
+      f.write(args_contents)
+
+  # Write the build.ninja file sufficiently to regenerate itself.
+  with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+    if build_commands != '':
+      f.write(build_commands)
+    else:
+      # Couldn't parse the build.ninja file, write a default thing.
+      f.write('''ninja_required_version = 1.7.2
+
+rule gn
+  command = gn -q gen //out/%s/
+  description = Regenerating ninja files
+
+build build.ninja: gn
+  generator = 1
+  depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+  # Write a .d file for the build which references a nonexistant file. This
+  # will make Ninja always mark the build as dirty.
+  with open(build_ninja_d_file, 'w') as f:
+    f.write('build.ninja: nonexistant_file.gn\n')
+
+  if e:
+    # Rethrow the exception we caught earlier.
+    raise e
+
+def clobber(out_dir):
+  """Clobber contents of build directory.
+
+  Don't delete the directory itself: some checkouts have the build directory
+  mounted."""
+  for f in os.listdir(out_dir):
+    path = os.path.join(out_dir, f)
+    if os.path.isfile(path):
+      os.unlink(path)
+    elif os.path.isdir(path):
+      delete_build_dir(path)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('out_dir', help='The output directory to clobber')
+  args = parser.parse_args()
+  clobber(args.out_dir)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/compiled_action.gni b/src/build/compiled_action.gni
new file mode 100644
index 0000000..7e25a0b
--- /dev/null
+++ b/src/build/compiled_action.gni
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+#   tool (required)
+#       [label] Label of the tool to run. This should be an executable, and
+#       this label should not include a toolchain (anything in parens). The
+#       host compile of this tool will be used.
+#
+#   outputs (required)
+#       [list of files] Like the outputs of action (if using "compiled_action",
+#       this would be just the list of outputs), or action_foreach (if using
+#       "compiled_action_foreach", this would contain source expansions mapping
+#       input to output files).
+#
+#   args (required)
+#       [list of strings] Same meaning as action/action_foreach.
+#
+#   inputs (optional)
+#       Files the binary takes as input. The step will be re-run whenever any
+#       of these change. If inputs is empty, the step will run only when the
+#       binary itself changes.
+#
+#   depfile
+#   deps
+#   visibility   (all optional)
+#       Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+#   compiled_action("run_my_tool") {
+#     tool = "//tools/something:mytool"
+#     outputs = [
+#       "$target_gen_dir/mysource.cc",
+#       "$target_gen_dir/mysource.h",
+#     ]
+#
+#     # The tool takes this input.
+#     inputs = [ "my_input_file.idl" ]
+#
+#     # In this case, the tool takes as arguments the input file and the output
+#     # build dir (both relative to the "cd" that the script will be run in)
+#     # and will produce the output files listed above.
+#     args = [
+#       rebase_path("my_input_file.idl", root_build_dir),
+#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+#     ]
+#   }
+#
+# You would typically declare your tool like this:
+#   if (host_toolchain == current_toolchain) {
+#     executable("mytool") {
+#       ...
+#     }
+#   }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (host_os == "win") {
+  _host_executable_suffix = ".exe"
+} else {
+  _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  assert(!defined(invoker.sources),
+         "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "depfile",
+                             "inputs",
+                             "outputs",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    deps += [ host_tool ]
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
+
+template("compiled_action_foreach") {
+  assert(defined(invoker.sources), "sources must be defined for $target_name")
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  action_foreach(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "depfile",
+                             "inputs",
+                             "outputs",
+                             "sources",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    deps += [ host_tool ]
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
diff --git a/src/build/compute_build_timestamp.py b/src/build/compute_build_timestamp.py
new file mode 100755
index 0000000..ceb507b
--- /dev/null
+++ b/src/build/compute_build_timestamp.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Returns a timestamp that approximates the build date.
+
+build_type impacts the timestamp generated, both relative to the date of the
+last recent commit:
+- default: the build date is set to the most recent first Sunday of a month at
+  5:00am. The reason is that it is a time where invalidating the build cache
+  shouldn't have major repercussions (due to lower load).
+- official: the build date is set to the time of the most recent commit.
+Either way, it is guaranteed to be in the past and always in UTC.
+"""
+
+# The requirements for the timestamp:
+# (1) for the purposes of continuous integration, longer duration
+#     between cache invalidation is better, but >=1mo is preferable.
+# (2) for security purposes, timebombs would ideally be as close to
+#     the actual time of the build as possible. It must be in the past.
+# (3) HSTS certificate pinning is valid for 70 days. To make CI builds enforce
+#     HTST pinning, <=1mo is preferable.
+#
+# On Windows, the timestamp is also written in the PE/COFF file header of
+# executables of dlls.  That timestamp and the executable's file size are
+# the only two pieces of information that identify a given executable on
+# the symbol server, so rarely changing timestamps can cause conflicts there
+# as well. We only upload symbols for official builds to the symbol server.
+
+from __future__ import print_function
+
+import argparse
+import calendar
+import datetime
+import doctest
+import os
+import sys
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def GetFirstSundayOfMonth(year, month):
+  """Returns the first sunday of the given month of the given year.
+
+  >>> GetFirstSundayOfMonth(2016, 2)
+  7
+  >>> GetFirstSundayOfMonth(2016, 3)
+  6
+  >>> GetFirstSundayOfMonth(2000, 1)
+  2
+  """
+  weeks = calendar.Calendar().monthdays2calendar(year, month)
+  # Return the first day in the first week that is a Sunday.
+  return [date_day[0] for date_day in weeks[0] if date_day[1] == 6][0]
+
+
+def GetUnofficialBuildDate(build_date):
+  """Gets the approximate build date given the specific build type.
+
+  >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 6, 1, 2, 3))
+  datetime.datetime(2016, 1, 3, 5, 0)
+  >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 7, 5))
+  datetime.datetime(2016, 2, 7, 5, 0)
+  >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 8, 5))
+  datetime.datetime(2016, 2, 7, 5, 0)
+  """
+
+  if build_date.hour < 5:
+    # The time is locked at 5:00 am in UTC to cause the build cache
+    # invalidation to not happen exactly at midnight. Use the same calculation
+    # as the day before.
+    # See //base/build_time.cc.
+    build_date = build_date - datetime.timedelta(days=1)
+  build_date = datetime.datetime(build_date.year, build_date.month,
+                                 build_date.day, 5, 0, 0)
+
+  day = build_date.day
+  month = build_date.month
+  year = build_date.year
+  first_sunday = GetFirstSundayOfMonth(year, month)
+  # If our build is after the first Sunday, we've already refreshed our build
+  # cache on a quiet day, so just use that day.
+  # Otherwise, take the first Sunday of the previous month.
+  if day >= first_sunday:
+    day = first_sunday
+  else:
+    month -= 1
+    if month == 0:
+      month = 12
+      year -= 1
+    day = GetFirstSundayOfMonth(year, month)
+  return datetime.datetime(
+      year, month, day, build_date.hour, build_date.minute, build_date.second)
+
+
+def main():
+  if doctest.testmod()[0]:
+    return 1
+  argument_parser = argparse.ArgumentParser()
+  argument_parser.add_argument(
+      'build_type', help='The type of build', choices=('official', 'default'))
+  args = argument_parser.parse_args()
+
+  # The mtime of the revision in build/util/LASTCHANGE is stored in a file
+  # next to it. Read it, to get a deterministic time close to "now".
+  # That date is then modified as described at the top of the file so that
+  # it changes less frequently than with every commit.
+  # This intentionally always uses build/util/LASTCHANGE's commit time even if
+  # use_dummy_lastchange is set.
+  lastchange_file = os.path.join(THIS_DIR, 'util', 'LASTCHANGE.committime')
+  last_commit_timestamp = int(open(lastchange_file).read())
+  build_date = datetime.datetime.utcfromtimestamp(last_commit_timestamp)
+
+  # For official builds we want full fidelity time stamps because official
+  # builds are typically added to symbol servers and Windows symbol servers
+  # use the link timestamp as the prime differentiator, but for unofficial
+  # builds we do lots of quantization to avoid churn.
+  if args.build_type != 'official':
+    build_date = GetUnofficialBuildDate(build_date)
+  print(int(calendar.timegm(build_date.utctimetuple())))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/config/BUILD.gn b/src/build/config/BUILD.gn
new file mode 100644
index 0000000..ed94a16
--- /dev/null
+++ b/src/build/config/BUILD.gn
@@ -0,0 +1,390 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/features.gni")
+
+# Subprojects need to override arguments in {mac,ios}_sdk_overrides.gni in their
+# .gn config, but those arguments are only used on macOS. Including
+# mac_sdk_overrides.gni insures that this doesn't trigger an unused argument
+# warning.
+import("//build/config/ios/ios_sdk_overrides.gni")
+import("//build/config/mac/mac_sdk_overrides.gni")
+
+import("//build/config/pch.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/goma.gni")
+if (is_android) {
+  import("//build/config/android/abi.gni")
+}
+
+# ==============================================
+#   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+# ==============================================
+#
+# Legacy feature defines applied to all targets.
+#
+# These are applied to every single compile in the build and most of them are
+# only relevant to a few files. This bloats command lines and causes
+# unnecessary recompiles when flags are flipped.
+#
+# To pass defines to source code from the build, use the buildflag system which
+# will write headers containing the defines you need. This isolates the define
+# and means its definition can participate in the build graph, only recompiling
+# things when it actually changes.
+#
+# See //build/buildflag_header.gni for instructions on generating headers.
+#
+# This will also allow you to scope your build flag to a BUILD.gn file (or a
+# .gni file if you need it from more than one place) rather than making global
+# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
+# build flags.
+config("feature_flags") {
+  defines = []
+  if (dcheck_always_on) {
+    defines += [ "DCHECK_ALWAYS_ON=1" ]
+    if (dcheck_is_configurable) {
+      defines += [ "DCHECK_IS_CONFIGURABLE" ]
+    }
+  }
+  if (use_udev) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "USE_UDEV" ]
+  }
+  if (use_aura) {
+    defines += [ "USE_AURA=1" ]
+  }
+  if (use_glib) {
+    defines += [ "USE_GLIB=1" ]
+  }
+  if (use_nss_certs) {
+    defines += [ "USE_NSS_CERTS=1" ]
+  }
+  if (use_ozone && !is_android) {
+    # Note that some Chrome OS builds unconditionally set |use_ozone| to true,
+    # but they also build some targets with the Android toolchain. This ensures
+    # that Android targets still build with USE_OZONE=0 in such cases.
+    #
+    # TODO(crbug.com/837032): Maybe this can be cleaned up if we can avoid
+    # setting use_ozone globally.
+    defines += [ "USE_OZONE=1" ]
+  }
+  if (use_x11) {
+    defines += [ "USE_X11=1" ]
+  }
+  if (is_asan || is_hwasan || is_lsan || is_tsan || is_msan) {
+    defines += [ "MEMORY_TOOL_REPLACES_ALLOCATOR" ]
+  }
+  if (is_asan) {
+    defines += [ "ADDRESS_SANITIZER" ]
+  }
+  if (is_lsan) {
+    defines += [ "LEAK_SANITIZER" ]
+  }
+  if (is_tsan) {
+    defines += [
+      "THREAD_SANITIZER",
+      "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+    ]
+  }
+  if (is_msan) {
+    defines += [ "MEMORY_SANITIZER" ]
+  }
+  if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
+    defines += [ "UNDEFINED_SANITIZER" ]
+  }
+  if (is_official_build) {
+    defines += [ "OFFICIAL_BUILD" ]
+  }
+
+  # ==============================================
+  #   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+  # ==============================================
+  #
+  # See the comment at the top.
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+  defines = [
+    "_DEBUG",
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+  ]
+
+  if (is_nacl) {
+    defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+  }
+
+  if (is_win) {
+    if (!enable_iterator_debugging && !use_custom_libcxx) {
+      # Iterator debugging is enabled by default by the compiler on debug
+      # builds, and we have to tell it to turn it off.
+      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+    }
+  } else if ((is_linux || is_chromeos) && current_cpu == "x64" &&
+             enable_iterator_debugging) {
+    # Enable libstdc++ debugging facilities to help catch problems early, see
+    # http://crbug.com/65151 .
+    # TODO(phajdan.jr): Should we enable this for all of POSIX?
+    defines += [ "_GLIBCXX_DEBUG=1" ]
+  }
+}
+
+config("release") {
+  defines = [ "NDEBUG" ]
+
+  # Sanitizers.
+  if (is_tsan) {
+    defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=1" ]
+  } else {
+    defines += [ "NVALGRIND" ]
+    if (!is_nacl) {
+      # NaCl always enables dynamic annotations. Currently this value is set to
+      # 1 for all .nexes.
+      defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+    }
+  }
+
+  if (is_ios) {
+    # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
+    # follows XCode's default behavior for Release builds.
+    defines += [ "NS_BLOCK_ASSERTIONS=1" ]
+  }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+  if (is_win) {
+    # TODO(brettw) this list of defaults should probably be smaller, and
+    # instead the targets that use the less common ones (e.g. wininet or
+    # winspool) should include those explicitly.
+    libs = [
+      "advapi32.lib",
+      "comdlg32.lib",
+      "dbghelp.lib",
+      "dnsapi.lib",
+      "gdi32.lib",
+      "msimg32.lib",
+      "odbc32.lib",
+      "odbccp32.lib",
+      "oleaut32.lib",
+      "shell32.lib",
+      "shlwapi.lib",
+      "user32.lib",
+      "usp10.lib",
+      "uuid.lib",
+      "version.lib",
+      "wininet.lib",
+      "winmm.lib",
+      "winspool.lib",
+      "ws2_32.lib",
+
+      # Please don't add more stuff here. We should actually be making this
+      # list smaller, since all common things should be covered. If you need
+      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+      # target that needs it.
+    ]
+    if (current_os == "winuwp") {
+      # These libraries are needed for Windows UWP (i.e. store apps).
+      libs += [
+        "dloadhelper.lib",
+        "WindowsApp.lib",
+      ]
+    } else {
+      # These libraries are not compatible with Windows UWP (i.e. store apps.)
+      libs += [
+        "delayimp.lib",
+        "kernel32.lib",
+        "ole32.lib",
+      ]
+    }
+  } else if (is_android) {
+    libs = [
+      "dl",
+      "m",
+    ]
+  } else if (is_mac) {
+    # Targets should choose to explicitly link frameworks they require. Since
+    # linking can have run-time side effects, nothing should be listed here.
+    libs = []
+  } else if (is_ios) {
+    # The libraries listed here will be specified for both the target and the
+    # host. Only the common ones should be listed here.
+    frameworks = [
+      "CoreFoundation.framework",
+      "CoreGraphics.framework",
+      "CoreText.framework",
+      "Foundation.framework",
+    ]
+  } else if (is_linux || is_chromeos) {
+    libs = [
+      "dl",
+      "pthread",
+      "rt",
+    ]
+  }
+}
+
+group("common_deps") {
+  visibility = [
+    ":executable_deps",
+    ":loadable_module_deps",
+    ":shared_library_deps",
+  ]
+
+  # WARNING: This group is a dependency of **every executable and shared
+  # library**.  Please be careful adding new dependencies here.
+  public_deps = []
+
+  if (using_sanitizer) {
+    public_deps += [ "//build/config/sanitizers:deps" ]
+  }
+
+  if (use_custom_libcxx) {
+    public_deps += [ "//buildtools/third_party/libc++" ]
+  }
+
+  if (use_afl) {
+    public_deps += [ "//third_party/afl" ]
+  }
+
+  if (is_android && use_order_profiling) {
+    public_deps += [ "//base/android/orderfile:orderfile_instrumentation" ]
+  }
+
+  if (is_fuchsia) {
+    public_deps +=
+        [ "//third_party/fuchsia-sdk/sdk/build/config:runtime_library_group" ]
+  }
+}
+
+# Only the executable template in BUILDCONFIG.gn should reference this.
+group("executable_deps") {
+  public_deps = [ ":common_deps" ]
+  if (export_libcxxabi_from_executables) {
+    public_deps += [ "//buildtools/third_party/libc++abi" ]
+  }
+}
+
+# Only the loadable_module template in BUILDCONFIG.gn should reference this.
+group("loadable_module_deps") {
+  public_deps = [ ":common_deps" ]
+}
+
+# Only the shared_library template in BUILDCONFIG.gn should reference this.
+group("shared_library_deps") {
+  public_deps = [ ":common_deps" ]
+}
+
+# Executable configs -----------------------------------------------------------
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+  _windows_linker_configs = [
+    "//build/config/win:sdk_link",
+    "//build/config/win:common_linker_setup",
+  ]
+}
+
+# This config defines the configs applied to all executables.
+config("executable_config") {
+  configs = []
+
+  if (is_win) {
+    configs += _windows_linker_configs
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:mac_dynamic_flags" ]
+  } else if (is_ios) {
+    configs += [
+      "//build/config/ios:ios_dynamic_flags",
+      "//build/config/ios:ios_executable_flags",
+    ]
+  } else if (is_linux || is_chromeos || is_android || current_os == "aix") {
+    configs += [ "//build/config/gcc:executable_config" ]
+    if (is_chromecast) {
+      configs += [ "//build/config/chromecast:executable_config" ]
+    } else if (is_fuchsia) {
+      configs += [ "//build/config/fuchsia:executable_config" ]
+    }
+  }
+
+  # If we're using the prebuilt instrumented libraries with the sanitizers, we
+  # need to add ldflags to every binary to make sure they are picked up.
+  if (prebuilt_instrumented_libraries_available) {
+    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+  }
+  if (use_locally_built_instrumented_libraries) {
+    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+  }
+  configs += [ "//build/config/sanitizers:link_executable" ]
+}
+
+# Shared library configs -------------------------------------------------------
+
+# This config defines the configs applied to all shared libraries.
+config("shared_library_config") {
+  configs = []
+
+  if (is_win) {
+    configs += _windows_linker_configs
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:mac_dynamic_flags" ]
+  } else if (is_ios) {
+    configs += [
+      "//build/config/ios:ios_dynamic_flags",
+      "//build/config/ios:ios_shared_library_flags",
+    ]
+  } else if (is_chromecast) {
+    configs += [ "//build/config/chromecast:shared_library_config" ]
+  } else if (is_linux || is_chromeos || current_os == "aix") {
+    configs += [ "//build/config/gcc:shared_library_config" ]
+  }
+
+  # If we're using the prebuilt instrumented libraries with the sanitizers, we
+  # need to add ldflags to every binary to make sure they are picked up.
+  if (prebuilt_instrumented_libraries_available) {
+    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+  }
+  if (use_locally_built_instrumented_libraries) {
+    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+  }
+  configs += [ "//build/config/sanitizers:link_shared_library" ]
+}
+
+# Add this config to your target to enable precompiled headers.
+#
+# Precompiled headers are done on a per-target basis. If you have just a couple
+# of files, the time it takes to precompile (~2 seconds) can actually be longer
+# than the time saved. On a Z620, a 100 file target compiles about 2 seconds
+# faster with precompiled headers, with greater savings for larger targets.
+#
+# Recommend precompiled headers for targets with more than 50 .cc files.
+config("precompiled_headers") {
+  if (enable_precompiled_headers) {
+    if (is_win) {
+      # This is a string rather than a file GN knows about. It has to match
+      # exactly what's in the /FI flag below, and what might appear in the
+      # source code in quotes for an #include directive.
+      precompiled_header = "build/precompile.h"
+
+      # This is a file that GN will compile with the above header. It will be
+      # implicitly added to the sources (potentially multiple times, with one
+      # variant for each language used in the target).
+      precompiled_source = "//build/precompile.cc"
+
+      # Force include the header.
+      cflags = [ "/FI$precompiled_header" ]
+    } else if (is_mac || is_linux) {
+      precompiled_source = "//build/precompile.h"
+    }
+  }
+}
diff --git a/src/build/config/BUILDCONFIG.gn b/src/build/config/BUILDCONFIG.gn
new file mode 100644
index 0000000..0ef73ab
--- /dev/null
+++ b/src/build/config/BUILDCONFIG.gn
@@ -0,0 +1,580 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# WHAT IS THIS FILE?
+# =============================================================================
+#
+# This is the master GN build configuration. This file is loaded after the
+# build args (args.gn) for the build directory and after the toplevel ".gn"
+# file (which points to this file as the build configuration).
+#
+# This file will be executed and the resulting context will be used to execute
+# every other file in the build. So variables declared here (that don't start
+# with an underscore) will be implicitly global.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+#  - host_os, host_cpu, host_toolchain
+#  - target_os, target_cpu, default_toolchain
+#  - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+#   if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+  target_os = host_os
+}
+
+if (target_cpu == "") {
+  if (target_os == "android") {
+    # If we're building for Android, we should assume that we want to
+    # build for ARM by default, not the host_cpu (which is likely x64).
+    # This allows us to not have to specify both target_os and target_cpu
+    # on the command line.
+    target_cpu = "arm"
+  } else {
+    target_cpu = host_cpu
+  }
+}
+
+if (current_cpu == "") {
+  current_cpu = target_cpu
+}
+if (current_os == "") {
+  current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file.
+#
+# - If your feature is a single target, say //components/foo, you can put
+#   a declare_args() block in //components/foo/BUILD.gn and use it there.
+#   Nobody else in the build needs to see the flag.
+#
+# - Defines based on build variables should be implemented via the generated
+#   build flag header system. See //build/buildflag_header.gni. You can put
+#   the buildflag_header target in the same file as the build flag itself. You
+#   should almost never set "defines" directly.
+#
+# - If your flag toggles a target on and off or toggles between different
+#   versions of similar things, write a "group" target that forwards to the
+#   right target (or no target) depending on the value of the build flag. This
+#   group can be in the same BUILD.gn file as the build flag, and targets can
+#   depend unconditionally on the group rather than duplicating flag checks
+#   across many targets.
+#
+# - If a semi-random set of build files REALLY needs to know about a define and
+#   the above pattern for isolating the build logic in a forwarding group
+#   doesn't work, you can put the argument in a .gni file. This should be put
+#   in the lowest level of the build that knows about this feature (which should
+#   almost always be outside of the //build directory!).
+#
+# Other flag advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+#   to some complex thing in the default case (like the location of the
+#   compiler which would be computed by a script), use a default value of -1 or
+#   the empty string. Outside of the declare_args block, conditionally expand
+#   the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+#   your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+#   These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+#   even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+  # Set to enable the official build level of optimization. This has nothing
+  # to do with branding, but enables an additional level of optimization above
+  # release (!is_debug). This might be better expressed as a tri-state
+  # (debug, release, official) but for historical reasons there are two
+  # separate flags.
+  is_official_build = false
+
+  # Set to true when compiling with the Clang compiler.
+  is_clang = current_os != "linux" ||
+             (current_cpu != "s390x" && current_cpu != "s390" &&
+              current_cpu != "ppc64" && current_cpu != "ppc" &&
+              current_cpu != "mips" && current_cpu != "mips64" &&
+              current_cpu != "riscv64")
+
+  # Allows the path to a custom target toolchain to be injected as a single
+  # argument, and set as the default toolchain.
+  custom_toolchain = ""
+
+  # This should not normally be set as a build argument.  It's here so that
+  # every toolchain can pass through the "global" value via toolchain_args().
+  host_toolchain = ""
+
+  # DON'T ADD MORE FLAGS HERE. Read the comment above.
+}
+
+declare_args() {
+  # Debug build. Enabling official builds automatically sets is_debug to false.
+  is_debug = !is_official_build
+}
+
+declare_args() {
+  # Component build. Setting to true compiles targets declared as "components"
+  # as shared libraries loaded dynamically. This speeds up development time.
+  # When false, components will be linked statically.
+  #
+  # For more information see
+  # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
+  is_component_build = is_debug && current_os != "ios"
+}
+
+assert(!(is_debug && is_official_build), "Can't do official debug builds")
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+#
+# We do this before anything else to make sure we complain about any
+# unsupported os/cpu combinations as early as possible.
+
+if (host_toolchain == "") {
+  # This should only happen in the top-level context.
+  # In a specific toolchain context, the toolchain_args()
+  # block should have propagated a value down.
+  # TODO(dpranke): Add some sort of assert here that verifies that
+  # no toolchain omitted host_toolchain from its toolchain_args().
+
+  if (host_os == "linux") {
+    if (target_os != "linux") {
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else if (is_clang) {
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/linux:$host_cpu"
+    }
+  } else if (host_os == "mac") {
+    host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+  } else if (host_os == "win") {
+    # On Windows always use the target CPU for host builds for x86/x64. On the
+    # configurations we support this will always work and it saves build steps.
+    # Windows ARM64 targets require an x64 host for cross build.
+    if (target_cpu == "x86" || target_cpu == "x64") {
+      if (is_clang) {
+        host_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
+      } else {
+        host_toolchain = "//build/toolchain/win:$target_cpu"
+      }
+    } else if (is_clang) {
+      host_toolchain = "//build/toolchain/win:win_clang_$host_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/win:$host_cpu"
+    }
+  } else if (host_os == "aix") {
+    host_toolchain = "//build/toolchain/aix:$host_cpu"
+  } else {
+    assert(false, "Unsupported host_os: $host_os")
+  }
+}
+
+_default_toolchain = ""
+
+if (target_os == "android") {
+  assert(host_os == "linux" || host_os == "mac",
+         "Android builds are only supported on Linux and Mac hosts.")
+  _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu"
+} else if (target_os == "chromeos" || target_os == "linux") {
+  # See comments in build/toolchain/cros/BUILD.gn about board compiles.
+  if (is_clang) {
+    _default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
+  } else {
+    _default_toolchain = "//build/toolchain/linux:$target_cpu"
+  }
+} else if (target_os == "fuchsia") {
+  _default_toolchain = "//build/toolchain/fuchsia:$target_cpu"
+} else if (target_os == "ios") {
+  _default_toolchain = "//build/toolchain/ios:ios_clang_$target_cpu"
+} else if (target_os == "mac") {
+  assert(host_os == "mac" || host_os == "linux",
+         "Mac cross-compiles are unsupported.")
+  _default_toolchain = "//build/toolchain/mac:clang_$target_cpu"
+} else if (target_os == "win") {
+  # On Windows, we use the same toolchain for host and target by default.
+  # Beware, win cross builds have some caveats, see docs/win_cross.md
+  if (is_clang) {
+    _default_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
+  } else {
+    _default_toolchain = "//build/toolchain/win:$target_cpu"
+  }
+} else if (target_os == "winuwp") {
+  # Only target WinUWP on for a Windows store application and only
+  # x86, x64 and arm are supported target CPUs.
+  assert(target_cpu == "x86" || target_cpu == "x64" || target_cpu == "arm" ||
+         target_cpu == "arm64")
+  _default_toolchain = "//build/toolchain/win:uwp_$target_cpu"
+} else if (target_os == "aix") {
+  _default_toolchain = "//build/toolchain/aix:$target_cpu"
+} else {
+  assert(false, "Unsupported target_os: $target_os")
+}
+
+# If a custom toolchain has been set in the args, set it as default. Otherwise,
+# set the default toolchain for the platform (if any).
+if (custom_toolchain != "") {
+  set_default_toolchain(custom_toolchain)
+} else if (_default_toolchain != "") {
+  set_default_toolchain(_default_toolchain)
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Fuchsia and Windows).
+# - is_linux is true for desktop Linux, but not for ChromeOS nor Android (which
+#   is generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+is_android = current_os == "android"
+is_chromeos = current_os == "chromeos"
+is_fuchsia = current_os == "fuchsia"
+is_ios = current_os == "ios"
+is_linux = current_os == "linux"
+is_mac = current_os == "mac"
+is_nacl = current_os == "nacl"
+is_win = current_os == "win" || current_os == "winuwp"
+
+is_apple = is_ios || is_mac
+is_posix = !is_win && !is_fuchsia
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+#
+# WHAT GOES HERE?
+#
+# Other than the main compiler and linker configs, the only reason for a config
+# to be in this list is if some targets need to explicitly override that config
+# by removing it. This is how targets opt-out of flags. If you don't have that
+# requirement and just need to add a config everywhere, reference it as a
+# sub-config of an existing one, most commonly the main "compiler" one.
+
+# Holds all configs used for running the compiler.
+default_compiler_configs = [
+  "//build/config:feature_flags",
+  "//build/config/compiler:afdo",
+  "//build/config/compiler:afdo_optimize_size",
+  "//build/config/compiler:cet_shadow_stack",
+  "//build/config/compiler:compiler",
+  "//build/config/compiler:compiler_arm_fpu",
+  "//build/config/compiler:compiler_arm_thumb",
+  "//build/config/compiler:chromium_code",
+  "//build/config/compiler:default_include_dirs",
+  "//build/config/compiler:default_optimization",
+  "//build/config/compiler:default_stack_frames",
+  "//build/config/compiler:default_symbols",
+  "//build/config/compiler:export_dynamic",
+  "//build/config/compiler:no_exceptions",
+  "//build/config/compiler:no_rtti",
+  "//build/config/compiler:runtime_library",
+  "//build/config/compiler:thin_archive",
+  "//build/config/compiler:thinlto_optimize_default",
+  "//build/config/compiler:default_init_stack_vars",
+  "//build/config/compiler/pgo:default_pgo_flags",
+  "//build/config/coverage:default_coverage",
+  "//build/config/sanitizers:default_sanitizer_flags",
+]
+
+if (is_win) {
+  default_compiler_configs += [
+    "//build/config/win:default_cfg_compiler",
+    "//build/config/win:default_crt",
+    "//build/config/win:lean_and_mean",
+    "//build/config/win:nominmax",
+    "//build/config/win:unicode",
+    "//build/config/win:winver",
+  ]
+}
+
+if (is_posix) {
+  if (current_os != "aix") {
+    default_compiler_configs +=
+        [ "//build/config/gcc:symbol_visibility_hidden" ]
+  }
+}
+
+if (is_fuchsia) {
+  default_compiler_configs += [ "//build/config/gcc:symbol_visibility_hidden" ]
+}
+
+if (is_android) {
+  default_compiler_configs +=
+      [ "//build/config/android:default_orderfile_instrumentation" ]
+}
+
+if (is_clang && !is_nacl) {
+  default_compiler_configs += [
+    "//build/config/clang:find_bad_constructs",
+    "//build/config/clang:extra_warnings",
+  ]
+}
+
+# Debug/release-related defines.
+if (is_debug) {
+  default_compiler_configs += [ "//build/config:debug" ]
+} else {
+  default_compiler_configs += [ "//build/config:release" ]
+}
+
+# Static libraries and source sets use only the compiler ones.
+set_defaults("static_library") {
+  configs = default_compiler_configs
+}
+set_defaults("source_set") {
+  configs = default_compiler_configs
+}
+
+# Compute the set of configs common to all linked targets (shared libraries,
+# loadable modules, executables) to avoid duplication below.
+if (is_win) {
+  # Many targets remove these configs, so they are not contained within
+  # //build/config:executable_config for easy removal.
+  _linker_configs = [
+    "//build/config/win:default_incremental_linking",
+
+    # Default to console-mode apps. Most of our targets are tests and such
+    # that shouldn't use the windows subsystem.
+    "//build/config/win:console",
+  ]
+} else if (is_mac) {
+  _linker_configs = [ "//build/config/mac:strip_all" ]
+} else {
+  _linker_configs = []
+}
+
+# Executable defaults.
+default_executable_configs = default_compiler_configs + [
+                               "//build/config:default_libs",
+                               "//build/config:executable_config",
+                             ] + _linker_configs
+
+if (is_win) {
+  # Turn on linker CFI for executables, and position it so it can be removed
+  # if needed.
+  default_executable_configs += [ "//build/config/win:cfi_linker" ]
+}
+
+set_defaults("executable") {
+  configs = default_executable_configs
+}
+
+# Shared library and loadable module defaults (also for components in component
+# mode).
+default_shared_library_configs = default_compiler_configs + [
+                                   "//build/config:default_libs",
+                                   "//build/config:shared_library_config",
+                                 ] + _linker_configs
+if (is_win) {
+  # Turn on linker CFI for DLLs, and position it so it can be removed if needed.
+  default_shared_library_configs += [ "//build/config/win:cfi_linker" ]
+}
+
+if (is_android) {
+  # Strip native JNI exports from shared libraries by default. Binaries that
+  # want this can remove this config.
+  default_shared_library_configs +=
+      [ "//build/config/android:hide_all_but_jni_onload" ]
+}
+set_defaults("shared_library") {
+  configs = default_shared_library_configs
+}
+set_defaults("loadable_module") {
+  configs = default_shared_library_configs
+
+  # loadable_modules are generally used by other libs, not just via JNI.
+  if (is_android) {
+    configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+  }
+}
+
+# A helper for forwarding testonly and visibility.
+# Forwarding "*" does not include variables from outer scopes (to avoid copying
+# all globals into each template invocation), so it will not pick up
+# file-scoped or outer-template-scoped variables. Normally this behavior is
+# desired, but "visibility" and "testonly" are commonly defined in outer scopes.
+# Explicitly forwarding them in forward_variables_from() works around this
+# nuance. See //build/docs/writing_gn_templates.md#using-forward_variables_from
+TESTONLY_AND_VISIBILITY = [
+  "testonly",
+  "visibility",
+]
+
+# Sets default dependencies for executable and shared_library targets.
+#
+# Variables
+#   no_default_deps: If true, no standard dependencies will be added.
+#       Targets that set this usually also want to remove
+#       "//build/config/compiler:runtime_library" from configs (to remove
+#       its subconfig "//build/config/c++:runtime_library").
+foreach(_target_type,
+        [
+          "executable",
+          "loadable_module",
+          "shared_library",
+        ]) {
+  template(_target_type) {
+    # Alias "target_name" because it is clobbered by forward_variables_from().
+    _target_name = target_name
+    target(_target_type, _target_name) {
+      forward_variables_from(invoker,
+                             "*",
+                             TESTONLY_AND_VISIBILITY + [ "no_default_deps" ])
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      if (!defined(deps)) {
+        deps = []
+      }
+      if (!defined(invoker.no_default_deps) || !invoker.no_default_deps) {
+        # This pulls in one of:
+        # //build/config:executable_deps
+        # //build/config:loadable_module_deps
+        # //build/config:shared_library_deps
+        # (This explicit list is so that grepping for these configs finds where
+        # they are used.)
+        deps += [ "//build/config:${_target_type}_deps" ]
+      }
+
+      # On Android, write shared library output file to metadata. We will use
+      # this information to, for instance, collect all shared libraries that
+      # should be packaged into an APK.
+      if (!defined(invoker.metadata) && is_android &&
+          (_target_type == "shared_library" ||
+           _target_type == "loadable_module")) {
+        _output_name = _target_name
+        if (defined(invoker.output_name)) {
+          _output_name = invoker.output_name
+        }
+
+        # Remove 'lib' prefix from output name if it exists.
+        _magic_prefix = "$0x01$0x01"
+        _output_name = string_replace("${_magic_prefix}${_output_name}",
+                                      "${_magic_prefix}lib",
+                                      _magic_prefix,
+                                      1)
+        _output_name = string_replace(_output_name, _magic_prefix, "", 1)
+
+        if (defined(output_extension)) {
+          _shlib_extension = ".$output_extension"
+        } else if (is_component_build && _target_type != "loadable_module") {
+          _shlib_extension = ".cr.so"
+        } else {
+          _shlib_extension = ".so"
+        }
+
+        metadata = {
+          shared_libraries =
+              [ "$root_out_dir/lib${_output_name}${_shlib_extension}" ]
+        }
+      }
+    }
+  }
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# Defines a component, which equates to a shared_library when
+# is_component_build == true and a static_library otherwise.
+#
+# Use static libraries for the static build rather than source sets because
+# many of of our test binaries link many large dependencies but often don't
+# use large portions of them. The static libraries are much more efficient to
+# link in this situation since only the necessary object files are linked.
+#
+# The invoker can override the type of the target in the non-component-build
+# case by setting static_component_type to either "source_set" or
+# "static_library". If unset, the default will be used.
+template("component") {
+  if (is_component_build) {
+    _component_mode = "shared_library"
+  } else if (defined(invoker.static_component_type)) {
+    assert(invoker.static_component_type == "static_library" ||
+           invoker.static_component_type == "source_set")
+    _component_mode = invoker.static_component_type
+  } else if (!defined(invoker.sources) || invoker.sources == []) {
+    # When there are no sources defined, use a source set to avoid creating
+    # an empty static library (which generally don't work).
+    _component_mode = "source_set"
+  } else {
+    _component_mode = "static_library"
+  }
+  target(_component_mode, target_name) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+  }
+}
+
+# Component defaults
+set_defaults("component") {
+  if (is_component_build) {
+    configs = default_shared_library_configs
+    if (is_android) {
+      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+    }
+  } else {
+    configs = default_compiler_configs
+  }
+}
diff --git a/src/build/config/aix/BUILD.gn b/src/build/config/aix/BUILD.gn
new file mode 100644
index 0000000..6c8749a
--- /dev/null
+++ b/src/build/config/aix/BUILD.gn
@@ -0,0 +1,49 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  defines = [
+    "_LINUX_SOURCE_COMPAT=1",
+    "__STDC_FORMAT_MACROS",
+    "_ALL_SOURCE=1",
+  ]
+
+  cflags = [
+    "-Wall",
+    "-Wno-unused-parameter",
+    "-pthread",
+    "-Wmissing-field-initializers",
+    "-Wno-uninitialized",
+    "-mcpu=power5+",
+    "-mfprnd",
+    "-mno-popcntb",
+    "-maix64",
+    "-fdata-sections",
+    "-ffunction-sections",
+    "-O3",
+
+    # "-Werror"
+    # We need to find a way to fix the TOC warnings if we want to enable this.
+  ]
+
+  cflags_cc = [
+    "-fno-rtti",
+    "-fno-exceptions",
+    "-Wno-narrowing",
+    "-Wno-non-virtual-dtor",
+  ]
+
+  ldflags = [
+    "-pthread",
+    "-maix64",
+    "-Wl,-bbigtoc",
+  ]
+}
diff --git a/src/build/config/android/BUILD.gn b/src/build/config/android/BUILD.gn
new file mode 100644
index 0000000..8eed45e
--- /dev/null
+++ b/src/build/config/android/BUILD.gn
@@ -0,0 +1,158 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+if (current_toolchain == default_toolchain) {
+  import("//build/toolchain/concurrent_links.gni")
+}
+
+assert(is_android)
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Android-only.
+config("compiler") {
+  cflags = [
+    "-ffunction-sections",
+    "-fno-short-enums",
+  ]
+  defines = [
+    "ANDROID",
+
+    # The NDK has these things, but doesn't define the constants to say that it
+    # does. Define them here instead.
+    "HAVE_SYS_UIO_H",
+
+    # Forces full rebuilds on NDK rolls. To rebuild everything when NDK version
+    # stays the same, increment the suffix number.
+    "ANDROID_NDK_VERSION_ROLL=${android_ndk_version}_1",
+  ]
+
+  if (current_cpu == "mips64el") {
+    cflags += [
+      # Have to force IAS for mips64.
+      "-fintegrated-as",
+    ]
+  }
+
+  ldflags = [
+    # Don't allow visible symbols from libgcc or libc++ to be
+    # re-exported.
+    "-Wl,--exclude-libs=libgcc.a",
+
+    # Don't allow visible symbols from libraries that contain
+    # assembly code with symbols that aren't hidden properly.
+    # http://crbug.com/448386
+    "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+  ]
+
+  # TODO(crbug.com/1184398): Move to compiler-rt when we are ready.
+  ldflags += [ "--rtlib=libgcc" ]
+  if (current_cpu == "arm64") {
+    # For outline atomics on AArch64 (can't pass this unconditionally
+    # due to unused flag warning on other targets).
+    cflags += [ "--rtlib=libgcc" ]
+  }
+
+  # $compile_api_level corresponds to the API level used for the sysroot path
+  # calculation in //build/config/android/config.gni
+  if (android_64bit_target_cpu) {
+    compile_api_level = android64_ndk_api_level
+  } else {
+    compile_api_level = android32_ndk_api_level
+  }
+
+  cflags += [ "--target=$android_abi_target$compile_api_level" ]
+  ldflags += [ "--target=$android_abi_target$compile_api_level" ]
+
+  # Assign any flags set for the C compiler to asmflags so that they are sent
+  # to the assembler.
+  asmflags = cflags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Android-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  # Let the linker find libgcc.a.
+  ldflags = [ "--gcc-toolchain=" +
+              rebase_path(android_toolchain_root, root_build_dir) ]
+
+  libs = []
+
+  # On 64-bit platforms, the only symbols provided by libandroid_support.a are
+  # strto{d,f,l,ul}_l. These symbols are not used by our libc++, and newer NDKs
+  # don't provide a libandroid_support.a on 64-bit platforms, so we only depend
+  # on this library on 32-bit platforms.
+  if (current_cpu == "arm" || current_cpu == "x86") {
+    libs += [ "android_support" ]
+  }
+
+  # arm builds of libc++ starting in NDK r12 depend on unwind.
+  if (current_cpu == "arm") {
+    libs += [ "unwind" ]
+  }
+
+  if (current_cpu == "arm" && arm_version == 6) {
+    libs += [ "atomic" ]
+  }
+
+  if (current_cpu == "mipsel") {
+    libs += [ "atomic" ]
+  }
+
+  # TODO(jdduke) Re-enable on mips after resolving linking
+  # issues with libc++ (crbug.com/456380).
+  if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+    ldflags += [ "-Wl,--warn-shared-textrel" ]
+  }
+}
+
+config("hide_all_but_jni_onload") {
+  ldflags = [ "-Wl,--version-script=" + rebase_path(
+                  "//build/android/android_only_explicit_jni_exports.lst",
+                  root_build_dir) ]
+}
+
+config("hide_all_but_jni") {
+  ldflags = [ "-Wl,--version-script=" +
+              rebase_path("//build/android/android_only_jni_exports.lst",
+                          root_build_dir) ]
+}
+
+config("lld_pack_relocations") {
+  ldflags = [ "-Wl,--pack-dyn-relocs=android" ]
+}
+
+# Used for instrumented build to generate the orderfile.
+config("default_orderfile_instrumentation") {
+  if (use_order_profiling) {
+    cflags = [ "-finstrument-function-entry-bare" ]
+    if (use_thin_lto) {
+      # TODO(pcc): This should not be necessary. Remove once
+      # https://reviews.llvm.org/D50016 lands and gets rolled in.
+      ldflags = [ "-Wl,-u,__cyg_profile_func_enter_bare" ]
+    }
+  }
+}
+
+if (current_toolchain == default_toolchain) {
+  pool("goma_javac_pool") {
+    # Override action_pool when goma is enabled for javac.
+    depth = 10000
+  }
+
+  # When defined, this pool should be used instead of link_pool for command
+  # that need 1-2GB of RAM. https://crbug.com/1078460
+  if (defined(java_cmd_pool_size)) {
+    pool("java_cmd_pool") {
+      depth = java_cmd_pool_size
+    }
+  }
+}
diff --git a/src/build/config/android/abi.gni b/src/build/config/android/abi.gni
new file mode 100644
index 0000000..53e5701
--- /dev/null
+++ b/src/build/config/android/abi.gni
@@ -0,0 +1,96 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Logic separated out from config.gni so that it can be used by compiler.gni
+# without introducing a circular dependency.
+
+# NOTE: Because Chrome OS builds may depend on targets built with the Android
+# toolchain, this GNI file may be read and processed from within Chrome OS
+# toolchains. Checking |is_android| here would therefore be too restrictive.
+assert(is_android || is_chromeos)
+
+declare_args() {
+  # Adds intrumentation to each function. Writes a file with the order that
+  # functions are called at startup.
+  use_order_profiling = false
+
+  # Only effective if use_order_profiling = true. When this is true,
+  # instrumentation switches from startup profiling after a delay, and
+  # then waits for a devtools memory dump request to dump all
+  # profiling information. When false, the same delay is used to switch from
+  # startup, and then after a second delay all profiling information is dumped.
+  # See base::android::orderfile::StartDelayedDump for more information.
+  devtools_instrumentation_dumping = false
+
+  # Only effective if use_order_profiling = true. When this is true the call
+  # graph based instrumentation is used.
+  use_call_graph = false
+
+  # Build additional browser splits with HWASAN instrumentation enabled.
+  build_hwasan_splits = false
+}
+
+assert(!devtools_instrumentation_dumping || use_order_profiling,
+       "devtools_instrumentation_dumping requires use_order_profiling")
+assert(!use_call_graph || use_order_profiling,
+       "use_call_graph requires use_order_profiling")
+
+if (current_cpu == "x86") {
+  android_app_abi = "x86"
+  android_abi_target = "i686-linux-android"
+} else if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+  if (arm_version < 7) {
+    android_app_abi = "armeabi"
+  } else {
+    android_app_abi = "armeabi-v7a"
+  }
+  android_abi_target = "arm-linux-androideabi"
+} else if (current_cpu == "mipsel") {
+  android_app_abi = "mips"
+  android_abi_target = "mipsel-linux-android"
+} else if (current_cpu == "x64") {
+  android_app_abi = "x86_64"
+
+  # Place holder for x64 support, not tested.
+  # TODO: Enable clang support for Android x64. http://crbug.com/539781
+  android_abi_target = "x86_64-linux-android"
+} else if (current_cpu == "arm64") {
+  android_app_abi = "arm64-v8a"
+  android_abi_target = "aarch64-linux-android"
+} else if (current_cpu == "mips64el") {
+  android_app_abi = "mips64"
+
+  # Place holder for mips64 support, not tested.
+  android_abi_target = "mips64el-linux-android"
+} else {
+  assert(false, "Unknown Android ABI: " + current_cpu)
+}
+
+if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") {
+  android_64bit_target_cpu = true
+} else if (target_cpu == "arm" || target_cpu == "x86" ||
+           target_cpu == "mipsel") {
+  android_64bit_target_cpu = false
+} else {
+  assert(false, "Unknown target CPU: $target_cpu")
+}
+
+# Intentionally do not define android_app_secondary_abi_cpu and
+# android_app_secondary_abi for 32-bit target_cpu, since they are not used.
+if (target_cpu == "arm64") {
+  android_secondary_abi_cpu = "arm"
+  android_app_secondary_abi = "armeabi-v7a"
+} else if (target_cpu == "x64") {
+  android_secondary_abi_cpu = "x86"
+  android_app_secondary_abi = "x86"
+} else if (target_cpu == "mips64el") {
+  android_secondary_abi_cpu = "mipsel"
+  android_app_secondary_abi = "mips"
+}
+
+if (defined(android_secondary_abi_cpu)) {
+  android_secondary_abi_toolchain =
+      "//build/toolchain/android:android_clang_${android_secondary_abi_cpu}"
+}
diff --git a/src/build/config/android/android_nocompile.gni b/src/build/config/android/android_nocompile.gni
new file mode 100644
index 0000000..a99bad3
--- /dev/null
+++ b/src/build/config/android/android_nocompile.gni
@@ -0,0 +1,96 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+declare_args() {
+  # Used by tests to enable generating build files for GN targets which should
+  # not compile.
+  enable_android_nocompile_tests = false
+}
+
+# Defines a test suite which checks that the 'test targets' fail to compile. The
+# test suite runs 'gn gen' with a custom output directory and attempts to compile
+# each test target.
+#
+# All of the tests should be defined in the same dedicated BUILD.gn file in order
+# to minimize the number of targets that are processed by 'gn gen'.
+#
+# Variables
+#   tests: List of test configurations. A test configuration has the following
+#     keys:
+#     'target': The GN target which should not compile when
+#       enable_android_nocompile_tests=true The target should compile when
+#       enable_android_nocompile_tests=false.
+#     'expected_compile_output_regex': Error message regex to search for when compile fails.
+#     'nocompile_sources': Source files which do not compile. This ensures that
+#       the test suite is re-run when one of these files change (as the test
+#       targets might not depend of the files when
+#       enable_android_nocompile_tests=false).
+template("android_nocompile_test_suite") {
+  assert(!enable_android_nocompile_tests)
+
+  action(target_name) {
+    testonly = true
+    script = "//build/android/gyp/nocompile_test.py"
+
+    _tests = invoker.tests
+    _test0 = _tests[0]
+    _test0_dir = get_label_info(_test0["target"], "dir")
+    foreach(_test_config, _tests) {
+      assert(
+          _test0_dir == get_label_info(_test_config["target"], "dir"),
+          "To avoid running 'gn gen' for each test, all tests in an android_nocompile_test_suite() should be declared in same BUILD.gn file")
+    }
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    inputs = []
+    if (defined(invoker.pydeps)) {
+      foreach(_pydeps_file, invoker.pydeps) {
+        _pydeps_file_lines = read_file(_pydeps_file, "list lines")
+        _pydeps_entries = filter_exclude(_pydeps_file_lines, [ "#*" ])
+        _pydeps_file_dir = get_path_info(_pydeps_file, "dir")
+        inputs += rebase_path(_pydeps_entries, ".", _pydeps_file_dir)
+      }
+    }
+
+    sources = []
+    _json_test_configs = []
+    foreach(_test_config, _tests) {
+      _test = _test_config["target"]
+      deps += [ _test ]
+      sources += _test_config["nocompile_sources"]
+      _dep_dir = get_label_info(_test, "dir")
+      _dep_name = get_label_info(_test, "name")
+      _json_test_configs += [
+        {
+          target = "${_dep_dir}:${_dep_name}"
+          expect_regex = _test_config["expected_compile_output_regex"]
+        },
+      ]
+    }
+
+    _config_path = "$target_gen_dir/${target_name}.nocompile_config"
+    write_file(_config_path, _json_test_configs, "json")
+
+    _stamp_path = "${target_gen_dir}/${target_name}.stamp"
+    args = [
+      "--gn-args-path",
+      "args.gn",
+      "--out-dir",
+      rebase_path("${target_out_dir}/${target_name}/nocompile_out",
+                  root_build_dir),
+      "--test-configs-path",
+      rebase_path(_config_path, root_build_dir),
+      "--stamp",
+      rebase_path(_stamp_path, root_build_dir),
+    ]
+    inputs += [ _config_path ]
+    outputs = [ _stamp_path ]
+  }
+}
diff --git a/src/build/config/android/build_vars.gni b/src/build/config/android/build_vars.gni
new file mode 100644
index 0000000..a47607d
--- /dev/null
+++ b/src/build/config/android/build_vars.gni
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+
+# Contains useful GN variables that may be used by scripts that take
+# --output-directory as an arg.
+build_vars_file = "$root_build_dir/build_vars.json"
+
+android_build_vars_json = {
+  if (enable_java_templates) {
+    android_ndk_root = rebase_path(android_ndk_root, root_build_dir)
+    android_sdk_build_tools =
+        rebase_path(android_sdk_build_tools, root_build_dir)
+    android_sdk_build_tools_version = android_sdk_build_tools_version
+    android_sdk_root = rebase_path(android_sdk_root, root_build_dir)
+    android_sdk_version = android_sdk_version
+    android_tool_prefix = rebase_path(android_tool_prefix, root_build_dir)
+    final_android_sdk = final_android_sdk
+
+    if (defined(android_secondary_abi_cpu)) {
+      android_secondary_abi_toolchain =
+          rebase_path(get_label_info(":foo($android_secondary_abi_toolchain)",
+                                     "root_out_dir"),
+                      root_build_dir)
+    }
+  }
+}
diff --git a/src/build/config/android/channel.gni b/src/build/config/android/channel.gni
new file mode 100644
index 0000000..6348bb9
--- /dev/null
+++ b/src/build/config/android/channel.gni
@@ -0,0 +1,14 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # The channel to build on Android: stable, beta, dev, canary, work, or
+  # default. "default" should be used on non-official builds.
+  android_channel = "default"
+}
+
+assert(android_channel == "default" || android_channel == "canary" ||
+           android_channel == "dev" || android_channel == "beta" ||
+           android_channel == "stable",
+       "Invalid channel: " + android_channel)
diff --git a/src/build/config/android/config.gni b/src/build/config/android/config.gni
new file mode 100644
index 0000000..652ea74
--- /dev/null
+++ b/src/build/config/android/config.gni
@@ -0,0 +1,353 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+# NOTE: Because Chrome OS builds may depend on targets built with the Android
+# toolchain, this GNI file may be read and processed from within Chrome OS
+# toolchains. Checking |is_android| here would therefore be too restrictive.
+if (is_android || is_chromeos) {
+  import("//build/config/chromecast_build.gni")
+  import("//build/config/dcheck_always_on.gni")
+  import("//build_overrides/build.gni")
+  import("abi.gni")
+
+  if (build_with_chromium) {
+    # Some non-chromium projects (e.g. WebRTC) use our build configs
+    # heavily but don't write gclient args files.
+
+    import("//build/config/gclient_args.gni")
+    if (defined(checkout_android_native_support)) {
+      n = "$0x0A"  # Newline
+      assert(checkout_android_native_support,
+             "Missing native Android toolchain support. |target_os| in your " +
+                 ".gclient configuration file (in the parent directory of " +
+                 "src) must include \"android\" and/or \"chromeos\". For " +
+                 "example:${n}${n}solutions = [${n}...${n}]${n}" +
+                 "target_os=[\"chromeos\"]${n}${n}" +
+                 "After adding |target_os| please re-run \"gclient sync\".${n}")
+    }
+  }
+
+  has_chrome_android_internal =
+      exec_script("//build/dir_exists.py",
+                  [ rebase_path("//clank", root_build_dir) ],
+                  "string") == "True"
+
+  # We are using a separate declare_args block for only this argument so that
+  # we can decide if we have to pull in definitions from the internal config
+  # early.
+  declare_args() {
+    # Enables using the internal Chrome for Android repository. The default
+    # value depends on whether the repository is available, and if it's not but
+    # this argument is manually set to True, the generation will fail.
+    # The main purpose of this argument is to avoid having to maintain 2
+    # repositories to support both public only and internal builds.
+    enable_chrome_android_internal = has_chrome_android_internal
+
+    # The default to use for android:minSdkVersion for targets that do
+    # not explicitly set it.
+    default_min_sdk_version = 21
+
+    # [WIP] Allows devs to achieve much faster edit-build-install cycles.
+    # Currently only works for ChromeModern apks due to incremental install.
+    # This needs to be in a separate declare_args as it determines some of the
+    # args in the main declare_args block below.
+    android_fast_local_dev = false
+  }
+
+  declare_args() {
+    # Android API level for 32 bits platforms
+    android32_ndk_api_level = default_min_sdk_version
+
+    # Android API level for 64 bits platforms
+    if (default_min_sdk_version < 21) {
+      android64_ndk_api_level = 21
+    } else {
+      android64_ndk_api_level = default_min_sdk_version
+    }
+  }
+
+  if (enable_chrome_android_internal) {
+    import("//clank/config.gni")
+  } else {
+    import("//build/config/android/sdk.gni")
+    declare_args() {
+      # Which Android SDK to use.
+      android_sdk_release = default_android_sdk_release
+    }
+  }
+
+  if (!defined(default_android_ndk_root)) {
+    default_android_ndk_root = "//third_party/android_ndk"
+    default_android_ndk_version = "r22"
+    default_android_ndk_major_version = 22
+  } else {
+    assert(defined(default_android_ndk_version))
+    assert(defined(default_android_ndk_major_version))
+  }
+
+  public_android_sdk_root = "//third_party/android_sdk/public"
+  if (android_sdk_release == "r") {
+    default_android_sdk_root = public_android_sdk_root
+    default_android_sdk_version = "30"
+    default_android_sdk_build_tools_version = "30.0.1"
+    public_android_sdk = true
+  }
+
+  # For use downstream when we are building with preview Android SDK
+  if (!defined(final_android_sdk)) {
+    final_android_sdk = public_android_sdk
+  }
+
+  if (!defined(default_lint_android_sdk_root)) {
+    # Purposefully repeated so that downstream can change
+    # default_android_sdk_root without changing lint version.
+    default_lint_android_sdk_root = public_android_sdk_root
+    default_lint_android_sdk_version = 30
+  }
+
+  if (!defined(default_extras_android_sdk_root)) {
+    # Purposefully repeated so that downstream can change
+    # default_android_sdk_root without changing where we load the SDK extras
+    # from. (Google Play services, etc.)
+    default_extras_android_sdk_root = public_android_sdk_root
+  }
+
+  if (!defined(default_android_keystore_path)) {
+    default_android_keystore_path = "//build/android/chromium-debug.keystore"
+    default_android_keystore_name = "chromiumdebugkey"
+    default_android_keystore_password = "chromium"
+  }
+
+  # google_play_services_package contains the path where individual client
+  # targets (e.g. google_play_services_base_java) are located.
+  if (!defined(google_play_services_package)) {
+    if (is_chromecast && chromecast_branding != "public") {
+      google_play_services_package = "//chromecast/internal/android/prebuilt/google-play-services-first-party"
+    } else {
+      google_play_services_package = "//third_party/android_deps"
+    }
+  }
+
+  if (!defined(dagger_java_target)) {
+    dagger_java_target =
+        "//third_party/android_deps:com_google_dagger_dagger_java"
+  }
+
+  if (!defined(dagger_annotation_processor_target)) {
+    dagger_annotation_processor_target =
+        "//third_party/android_deps:com_google_dagger_dagger_compiler_java"
+  }
+
+  if (!defined(guava_android_target)) {
+    guava_android_target =
+        "//third_party/android_deps:com_google_guava_guava_android_java"
+  }
+
+  if (!defined(material_design_target)) {
+    material_design_target =
+        "//third_party/android_deps:com_google_android_material_material_java"
+  }
+
+  if (!defined(android_protoc_bin)) {
+    android_protoc_bin = "//third_party/android_protoc/protoc"
+    android_proto_runtime =
+        "//third_party/android_deps:com_google_protobuf_protobuf_javalite_java"
+  }
+
+  webview_public_framework_dep =
+      "//third_party/android_sdk:public_framework_system_java"
+  if (!defined(webview_framework_dep)) {
+    webview_framework_dep = webview_public_framework_dep
+  }
+
+  assert(defined(default_android_sdk_root),
+         "SDK release " + android_sdk_release + " not recognized.")
+
+  declare_args() {
+    android_ndk_root = default_android_ndk_root
+    android_ndk_version = default_android_ndk_version
+    android_ndk_major_version = default_android_ndk_major_version
+
+    android_sdk_root = default_android_sdk_root
+    android_sdk_version = default_android_sdk_version
+    android_sdk_build_tools_version = default_android_sdk_build_tools_version
+
+    lint_android_sdk_root = default_lint_android_sdk_root
+    lint_android_sdk_version = default_lint_android_sdk_version
+
+    # Libc++ library directory. Override to use a custom libc++ binary.
+    android_libcpp_lib_dir = ""
+
+    # Android versionCode for android_apk()s that don't explicitly set one.
+    android_default_version_code = "1"
+
+    # Android versionName for android_apk()s that don't explicitly set one.
+    android_default_version_name = "Developer Build"
+
+    # Forced Android versionCode
+    android_override_version_code = ""
+
+    # Forced Android versionName
+    android_override_version_name = ""
+
+    # The path to the keystore to use for signing builds.
+    android_keystore_path = default_android_keystore_path
+
+    # The name of the keystore to use for signing builds.
+    android_keystore_name = default_android_keystore_name
+
+    # The password for the keystore to use for signing builds.
+    android_keystore_password = default_android_keystore_password
+
+    # Java debug on Android. Having this on enables multidexing, and turning it
+    # off will enable proguard.
+    is_java_debug = is_debug
+
+    # Mark APKs as android:debuggable="true".
+    debuggable_apks = !is_official_build
+
+    # Set to false to disable the Errorprone compiler.
+    # Defaults to false for official builds to reduce build times.
+    # Static analysis failures should have been already caught by normal bots.
+    # Disabled when fast_local_dev is turned on.
+    use_errorprone_java_compiler = !is_official_build && !android_fast_local_dev
+
+    # Build incremental targets whenever possible.
+    # See //build/android/incremental_install/README.md for more details.
+    incremental_install = android_fast_local_dev
+
+    # When true, updates all android_aar_prebuilt() .info files during gn gen.
+    # Refer to android_aar_prebuilt() for more details.
+    update_android_aar_prebuilts = false
+
+    # Turns off android lint. Useful for prototyping or for faster local builds.
+    # Defaults to true for official builds to reduce build times.
+    # Static analysis failures should have been already caught by normal bots.
+    # Disabled when fast_local_dev is turned on.
+    disable_android_lint = is_official_build || android_fast_local_dev
+
+    # Location of aapt2 used for app bundles. For now, a more recent version
+    # than the one distributed with the Android SDK is required.
+    android_sdk_tools_bundle_aapt2_dir =
+        "//third_party/android_build_tools/aapt2"
+
+    # Causes expectation failures to break the build, otherwise, just warns on
+    # stderr and writes a failure file to $android_configuration_failure_dir:
+    fail_on_android_expectations = false
+
+    # Controls whether proguard obfuscation is enabled for targets
+    # configured to use it.
+    enable_proguard_obfuscation = true
+
+    # Controls whether |short_resource_paths| and |strip_resource_names| are
+    # respected. Useful when trying to analyze APKs using tools that do not
+    # support mapping these names.
+    enable_arsc_obfuscation = true
+
+    # The target to use as the system WebView implementation.
+    system_webview_apk_target = "//android_webview:system_webview_apk"
+
+    # Where to write failed expectations for bots to read.
+    expectations_failure_dir = "$root_build_dir/failed_expectations"
+  }
+
+  # We need a second declare_args block to make sure we are using the overridden
+  # value of the arguments set above.
+  declare_args() {
+    if (defined(default_android_sdk_platform_version)) {
+      android_sdk_platform_version = default_android_sdk_platform_version
+    } else {
+      android_sdk_platform_version = android_sdk_version
+    }
+
+    # Whether java assertions and Preconditions checks are enabled.
+    enable_java_asserts = is_java_debug || dcheck_always_on
+
+    # Reduce build time by using d8 incremental build.
+    enable_incremental_d8 = true
+
+    # Use hashed symbol names to reduce JNI symbol overhead.
+    use_hashed_jni_names = !is_java_debug
+
+    # Desugar lambdas and interfaces methods using Desugar.jar rather than
+    # D8/R8. D8/R8 will still be used for backported method desugaring.
+    enable_bazel_desugar = true
+
+    # Enables Java library desugaring.
+    # This will cause an extra classes.dex file to appear in every apk.
+    enable_jdk_library_desugaring = true
+  }
+
+  # Host stuff -----------------------------------------------------------------
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_cpu == "x64") {
+    android_host_arch = "x86_64"
+  } else if (host_cpu == "x86") {
+    android_host_arch = "x86"
+  } else {
+    assert(false, "Need Android toolchain support for your build CPU arch.")
+  }
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_os == "linux") {
+    android_host_os = "linux"
+  } else if (host_os == "mac") {
+    android_host_os = "darwin"
+  } else {
+    assert(false, "Need Android toolchain support for your build OS.")
+  }
+
+  # Directories and files ------------------------------------------------------
+  #
+  # We define may of the dirs strings here for each output architecture (rather
+  # than just the current one) since these are needed by the Android toolchain
+  # file to define toolchains for all possible targets in one pass.
+
+  android_sdk =
+      "${android_sdk_root}/platforms/android-${android_sdk_platform_version}"
+  android_sdk_build_tools =
+      "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
+
+  # Path to the SDK's android.jar
+  android_sdk_jar = "$android_sdk/android.jar"
+
+  # Location of libgcc. This is only needed for the current GN toolchain, so we
+  # only need to define the current one, rather than one for every platform
+  # like the toolchain roots.
+  if (current_cpu == "x86") {
+    android_prebuilt_arch = "android-x86"
+    _binary_prefix = "i686-linux-android"
+  } else if (current_cpu == "arm") {
+    android_prebuilt_arch = "android-arm"
+    _binary_prefix = "arm-linux-androideabi"
+  } else if (current_cpu == "mipsel") {
+    android_prebuilt_arch = "android-mips"
+    _binary_prefix = "mipsel-linux-android"
+  } else if (current_cpu == "x64") {
+    android_prebuilt_arch = "android-x86_64"
+    _binary_prefix = "x86_64-linux-android"
+  } else if (current_cpu == "arm64") {
+    android_prebuilt_arch = "android-arm64"
+    _binary_prefix = "aarch64-linux-android"
+  } else if (current_cpu == "mips64el") {
+    android_prebuilt_arch = "android-mips64"
+    _binary_prefix = "mips64el-linux-android"
+  } else {
+    assert(false, "Need android libgcc support for your target arch.")
+  }
+
+  android_toolchain_root = "$android_ndk_root/toolchains/llvm/prebuilt/${android_host_os}-${android_host_arch}"
+  android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
+  android_readelf = "${android_tool_prefix}readelf"
+  android_objcopy = "${android_tool_prefix}objcopy"
+  android_gdbserver =
+      "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+  android_sdk_tools_bundle_aapt2 = "${android_sdk_tools_bundle_aapt2_dir}/aapt2"
+}
diff --git a/src/build/config/android/copy_ex.gni b/src/build/config/android/copy_ex.gni
new file mode 100644
index 0000000..d3705dd
--- /dev/null
+++ b/src/build/config/android/copy_ex.gni
@@ -0,0 +1,72 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copy a list of file into a destination directory. Potentially renaming
+# files are they are copied. This also ensures that symlinks are followed
+# during the copy (i.e. the symlinks are never copied, only their content).
+#
+# Variables:
+#  dest: Destination directory path.
+#  sources: List of source files or directories to copy to dest.
+#  renaming_sources: Optional list of source file paths that will be renamed
+#    during the copy operation. If provided, renaming_destinations is required.
+#  renaming_destinations: Optional list of destination file paths, required
+#    when renaming_sources is provided. Both lists should have the same size
+#    and matching entries.
+#  args: Optional. Additionnal arguments to the copy_ex.py script.
+#
+#  The following variables have the usual GN meaning: data, deps, inputs,
+#  outputs, testonly, visibility.
+
+import("//build/config/python.gni")
+
+template("copy_ex") {
+  action_with_pydeps(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    sources = []
+    if (defined(invoker.sources)) {
+      sources += invoker.sources
+    }
+    outputs = []
+    if (defined(invoker.outputs)) {
+      outputs += invoker.outputs
+    }
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    }
+
+    script = "//build/android/gyp/copy_ex.py"
+
+    args = [
+      "--dest",
+      rebase_path(invoker.dest, root_build_dir),
+    ]
+    rebased_sources = rebase_path(sources, root_build_dir)
+    args += [ "--files=$rebased_sources" ]
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    if (defined(invoker.renaming_sources) &&
+        defined(invoker.renaming_destinations)) {
+      sources += invoker.renaming_sources
+      renaming_destinations = invoker.renaming_destinations
+      outputs +=
+          get_path_info(rebase_path(renaming_destinations, ".", invoker.dest),
+                        "abspath")
+      rebased_renaming_sources =
+          rebase_path(invoker.renaming_sources, root_build_dir)
+      args += [ "--renaming-sources=$rebased_renaming_sources" ]
+      args += [ "--renaming-destinations=$renaming_destinations" ]
+    }
+  }
+}
diff --git a/src/build/config/android/extract_unwind_tables.gni b/src/build/config/android/extract_unwind_tables.gni
new file mode 100644
index 0000000..5444c5b
--- /dev/null
+++ b/src/build/config/android/extract_unwind_tables.gni
@@ -0,0 +1,44 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+template("unwind_table_asset") {
+  # Note: This file name is used in multiple monochrome build scripts.
+  _asset_path = "${target_gen_dir}/${target_name}/unwind_cfi_32"
+  _unwind_action = "${target_name}__extract"
+
+  action(_unwind_action) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _root_dir = "$root_out_dir"
+    if (defined(android_secondary_abi_cpu)) {
+      _root_dir = get_label_info(":foo($android_secondary_abi_toolchain)",
+                                 "root_out_dir")
+    }
+
+    script = "//build/android/gyp/extract_unwind_tables.py"
+    outputs = [ _asset_path ]
+    inputs = [ "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension" ]
+
+    args = [
+      "--input_path",
+      rebase_path(
+          "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension",
+          root_build_dir),
+      "--output_path",
+      rebase_path(_asset_path, root_build_dir),
+      "--dump_syms_path",
+      rebase_path("$root_out_dir/dump_syms", root_build_dir),
+    ]
+    deps = invoker.deps
+    deps += [ "//third_party/breakpad:dump_syms" ]
+  }
+  android_assets(target_name) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    sources = [ _asset_path ]
+    disable_compression = true
+    deps = [ ":$_unwind_action" ]
+  }
+}
diff --git a/src/build/config/android/internal_rules.gni b/src/build/config/android/internal_rules.gni
new file mode 100644
index 0000000..06c0702
--- /dev/null
+++ b/src/build/config/android/internal_rules.gni
@@ -0,0 +1,4367 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build/config/android/config.gni")
+import("//build/config/android/copy_ex.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/compute_inputs_for_analyze.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/python.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/kythe.gni")
+import("//build/util/generate_wrapper.gni")
+import("//build_overrides/build.gni")
+if (current_toolchain == default_toolchain) {
+  import("//build/toolchain/concurrent_links.gni")
+}
+assert(is_android)
+
+# The following _java_*_types variables capture all the existing target types.
+# If a new type is introduced, please add it to one of these categories,
+# preferring the more specific resource/library types.
+_java_resource_types = [
+  "android_assets",
+  "android_resources",
+]
+
+_java_library_types = [
+  "java_library",
+  "system_java_library",
+  "android_app_bundle_module",
+]
+
+# These are leaf java target types. They cannot be passed as deps to other
+# targets. Thus their naming schemes are not enforced.
+_java_leaf_types = [
+  "android_apk",
+  "android_app_bundle",
+  "dist_aar",
+  "dist_jar",
+  "java_annotation_processor",
+  "java_binary",
+  "junit_binary",
+]
+
+# All _java_resource_types targets must conform to these patterns.
+_java_resource_patterns = [
+  "*:*_assets",
+  "*android*:assets",
+  "*:*_apk_*resources",
+  "*android*:resources",
+  "*:*_resources",
+  "*:*_grd",
+  "*:*locale_paks",
+  "*:*_java_strings",
+  "*:*strings_java",
+]
+
+# All _java_library_types targets must conform to these patterns. This includes
+# all non-leaf targets that use java_library_impl.
+_java_library_patterns = [
+  "*:*_java",
+  "*:*_javalib",
+  "*:*_java_*",  # e.g. chrome_java_test_support
+  "*:java",
+  "*:junit",
+  "*:junit_*",
+  "*:*_junit_*",
+  "*:*javatests",
+  "*:*_bundle_module",
+
+  # TODO(agrieve): Rename targets below to match above patterns.
+  "//android_webview/glue:glue",
+]
+
+# These identify all non-leaf targets that have .build_config files. This is the
+# set of patterns that other targets can use to filter out java targets.
+java_target_patterns = _java_library_patterns + _java_resource_patterns
+
+_r8_path = "//third_party/r8/lib/r8.jar"
+_custom_d8_path = "//third_party/r8/custom_d8.jar"
+_desugar_jdk_libs_json = "//third_party/r8/desugar_jdk_libs.json"
+_desugar_jdk_libs_jar = "//third_party/android_deps/libs/com_android_tools_desugar_jdk_libs/desugar_jdk_libs-1.1.1.jar"
+_desugar_jdk_libs_configuration_jar = "//third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration/desugar_jdk_libs_configuration-1.1.1.jar"
+_desugar_runtime_jar = "$root_build_dir/obj/third_party/bazel/desugar/Desugar_runtime.processed.jar"
+
+_dexdump_path = "$android_sdk_build_tools/dexdump"
+_dexlayout_path = "//third_party/android_build_tools/art/dexlayout"
+_profman_path = "//third_party/android_build_tools/art/profman"
+_art_lib_file_names = [
+  "libartbase.so",
+  "libart-compiler.so",
+  "libart-dexlayout.so",
+  "libart-disassembler.so",
+  "libart-gtest.so",
+  "libart.so",
+  "libbacktrace.so",
+  "libbase.so",
+  "libcrypto-host.so",
+  "libc++.so",
+  "libcutils.so",
+  "libdexfile.so",
+  "libexpat-host.so",
+  "libicui18n-host.so",
+  "libicuuc-host.so",
+  "libjavacore.so",
+  "libjavacrypto.so",
+  "liblog.so",
+  "liblz4.so",
+  "liblzma.so",
+  "libnativebridge.so",
+  "libnativehelper.so",
+  "libnativeloader.so",
+  "libopenjdkjvm.so",
+  "libopenjdkjvmti.so",
+  "libopenjdk.so",
+  "libprofile.so",
+  "libsigchain.so",
+  "libssl-host.so",
+  "libunwindstack.so",
+  "libvixl-arm64.so",
+  "libvixl-arm.so",
+  "libvixld-arm64.so",
+  "libvixld-arm.so",
+  "libz-host.so",
+  "libziparchive.so",
+  "slicer.so",
+]
+_default_art_libs = []
+foreach(lib, _art_lib_file_names) {
+  _default_art_libs += [ "//third_party/android_build_tools/art/lib/$lib" ]
+}
+
+# Put the bug number in the target name so that false-positives have a hint in
+# the error message about why non-existent dependencies are there.
+build_config_target_suffix = "__build_config_crbug_908819"
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+  _type = invoker.type
+  _parent_invoker = invoker.invoker
+  _target_label =
+      get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain")
+
+  # Ensure targets match naming patterns so that __assetres, __header, __impl
+  # targets work properly. Those generated targets allow for effective deps
+  # filtering.
+  if (filter_exclude([ _type ], _java_resource_types) == []) {
+    if (filter_exclude([ _target_label ], _java_resource_patterns) != []) {
+      assert(false, "Invalid java resource target name: $_target_label")
+    }
+  } else if (filter_exclude([ _type ], _java_library_types) == []) {
+    if (filter_exclude([ _target_label ], _java_library_patterns) != [] ||
+        filter_exclude([ _target_label ], _java_resource_patterns) == []) {
+      assert(false, "Invalid java library target name: $_target_label")
+    }
+  } else if (_type == "group") {
+    if (filter_exclude([ _target_label ], java_target_patterns) != []) {
+      assert(false, "Invalid java target name: $_target_label")
+    }
+  } else if (filter_exclude([ _type ], _java_leaf_types) != []) {
+    assert(false, "This java type needs a category: $_type")
+  }
+
+  if (defined(invoker.public_target_label)) {
+    _target_label = invoker.public_target_label
+  }
+
+  action_with_pydeps(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "testonly",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (defined(invoker.android_manifest_dep)) {
+      deps += [ invoker.android_manifest_dep ]
+    }
+
+    script = "//build/android/gyp/write_build_config.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = []
+    outputs = [ invoker.build_config ]
+
+    _deps_configs = []
+    _public_deps_configs = []
+    if (defined(invoker.possible_config_deps)) {
+      foreach(_possible_dep, invoker.possible_config_deps) {
+        _dep_label = get_label_info(_possible_dep, "label_no_toolchain")
+        if (filter_exclude([ _dep_label ], java_target_patterns) == []) {
+          # Put the bug number in the target name so that false-positives
+          # have a hint in the error message about non-existent dependencies.
+          deps += [ "$_dep_label$build_config_target_suffix" ]
+          _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir")
+          _dep_name = get_label_info(_possible_dep, "name")
+          _dep_config = "$_dep_gen_dir/$_dep_name.build_config"
+
+          _deps_configs += [ _dep_config ]
+          if (defined(invoker.possible_config_public_deps)) {
+            if (filter_include([ _possible_dep ],
+                               invoker.possible_config_public_deps) != []) {
+              _public_deps_configs += [ _dep_config ]
+            }
+          }
+        }
+      }
+    }
+    _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir)
+    _rebased_public_deps_configs =
+        rebase_path(_public_deps_configs, root_build_dir)
+
+    args = [
+      "--type=$_type",
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--deps-configs=$_rebased_deps_configs",
+      "--public-deps-configs=$_rebased_public_deps_configs",
+      "--build-config",
+      rebase_path(invoker.build_config, root_build_dir),
+      "--gn-target",
+      _target_label,
+    ]
+
+    if (defined(invoker.ignore_dependency_public_deps) &&
+        invoker.ignore_dependency_public_deps) {
+      args += [ "--ignore-dependency-public-deps" ]
+    }
+
+    if (defined(invoker.aar_path)) {
+      args += [
+        "--aar-path",
+        rebase_path(invoker.aar_path, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.chromium_code) && !invoker.chromium_code) {
+      # Default to chromium code if invoker did not pass anything.
+      args += [ "--non-chromium-code" ]
+    }
+
+    if (defined(invoker.device_jar_path)) {
+      args += [
+        "--device-jar-path",
+        rebase_path(invoker.device_jar_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.host_jar_path)) {
+      args += [
+        "--host-jar-path",
+        rebase_path(invoker.host_jar_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.unprocessed_jar_path)) {
+      args += [
+        "--unprocessed-jar-path",
+        rebase_path(invoker.unprocessed_jar_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.ijar_path)) {
+      args += [
+        "--interface-jar-path",
+        rebase_path(invoker.ijar_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.java_resources_jar)) {
+      args += [
+        "--java-resources-jar-path",
+        rebase_path(invoker.java_resources_jar, root_build_dir),
+      ]
+    }
+    if (defined(invoker.annotation_processor_deps) &&
+        invoker.annotation_processor_deps != []) {
+      _processor_configs = []
+      foreach(_processor_dep, invoker.annotation_processor_deps) {
+        _dep_label = get_label_info(_processor_dep, "label_no_toolchain")
+        _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir")
+        _dep_name = get_label_info(_processor_dep, "name")
+        deps += [ "$_dep_label$build_config_target_suffix" ]
+        _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+      }
+      _rebased_processor_configs =
+          rebase_path(_processor_configs, root_build_dir)
+      args += [ "--annotation-processor-configs=$_rebased_processor_configs" ]
+    }
+
+    # Dex path for library targets, or the the intermediate library for apks.
+    if (defined(invoker.dex_path)) {
+      args += [
+        "--dex-path",
+        rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+
+    # Dex path for the final apk.
+    if (defined(invoker.final_dex_path)) {
+      args += [
+        "--final-dex-path",
+        rebase_path(invoker.final_dex_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.supports_android) && invoker.supports_android) {
+      args += [ "--supports-android" ]
+    }
+    if (defined(invoker.requires_android) && invoker.requires_android) {
+      args += [ "--requires-android" ]
+    }
+    if (defined(invoker.is_prebuilt) && invoker.is_prebuilt) {
+      args += [ "--is-prebuilt" ]
+    }
+    if (defined(invoker.bypass_platform_checks) &&
+        invoker.bypass_platform_checks) {
+      args += [ "--bypass-platform-checks" ]
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ]
+      apk_under_test_gen_dir =
+          get_label_info(invoker.apk_under_test, "target_gen_dir")
+      apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
+      apk_under_test_config =
+          "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
+      args += [
+        "--tested-apk-config",
+        rebase_path(apk_under_test_config, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.asset_sources)) {
+      _rebased_asset_sources =
+          rebase_path(invoker.asset_sources, root_build_dir)
+      args += [ "--asset-sources=$_rebased_asset_sources" ]
+    }
+    if (defined(invoker.asset_renaming_sources)) {
+      _rebased_asset_renaming_sources =
+          rebase_path(invoker.asset_renaming_sources, root_build_dir)
+      args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ]
+
+      # These are zip paths, so no need to rebase.
+      args += [
+        "--asset-renaming-destinations=${invoker.asset_renaming_destinations}",
+      ]
+    }
+    if (defined(invoker.disable_compression) && invoker.disable_compression) {
+      args += [ "--disable-asset-compression" ]
+    }
+    if (defined(invoker.treat_as_locale_paks) && invoker.treat_as_locale_paks) {
+      args += [ "--treat-as-locale-paks" ]
+    }
+
+    if (defined(invoker.android_manifest)) {
+      inputs += [ invoker.android_manifest ]
+      args += [
+        "--android-manifest",
+        rebase_path(invoker.android_manifest, root_build_dir),
+      ]
+    }
+    if (defined(invoker.resources_zip)) {
+      args += [
+        "--resources-zip",
+        rebase_path(invoker.resources_zip, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.resource_overlay) && invoker.resource_overlay) {
+      args += [ "--resource-overlay" ]
+    }
+
+    if (defined(invoker.custom_package)) {
+      args += [
+        "--package-name",
+        invoker.custom_package,
+      ]
+    }
+    if (defined(invoker.r_text)) {
+      args += [
+        "--r-text-path",
+        rebase_path(invoker.r_text, root_build_dir),
+      ]
+    }
+    if (defined(invoker.res_size_info_path)) {
+      args += [
+        "--res-size-info",
+        rebase_path(invoker.res_size_info_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.resource_dirs)) {
+      resource_dirs = rebase_path(invoker.resource_dirs, root_build_dir)
+      args += [ "--resource-dirs=$resource_dirs" ]
+    }
+    if (defined(invoker.res_sources_path)) {
+      _res_sources_path = rebase_path(invoker.res_sources_path, root_build_dir)
+      args += [ "--res-sources-path=$_res_sources_path" ]
+    }
+    if (defined(invoker.proto_resources_path)) {
+      _rebased_proto_resources =
+          rebase_path(invoker.proto_resources_path, root_build_dir)
+      args += [ "--apk-proto-resources=$_rebased_proto_resources" ]
+    }
+    if (defined(invoker.r_text_path)) {
+      _rebased_rtxt_path = rebase_path(invoker.r_text_path, root_build_dir)
+      args += [ "--r-text-path=$_rebased_rtxt_path" ]
+    }
+    if (defined(invoker.module_pathmap_path)) {
+      _rebased_pathmap_path =
+          rebase_path(invoker.module_pathmap_path, root_build_dir)
+      args += [ "--module-pathmap-path=$_rebased_pathmap_path" ]
+    }
+
+    if (defined(invoker.shared_libraries_runtime_deps_file)) {
+      # Don't list shared_libraries_runtime_deps_file as an input in order to
+      # avoid having to depend on the runtime_deps target. See comment in
+      # rules.gni for why we do this.
+      args += [
+        "--shared-libraries-runtime-deps",
+        rebase_path(invoker.shared_libraries_runtime_deps_file, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.base_allowlist_rtxt_path)) {
+      args += [
+        "--base-allowlist-rtxt-path",
+        rebase_path(invoker.base_allowlist_rtxt_path, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.is_base_module) && invoker.is_base_module) {
+      args += [ "--is-base-module" ]
+    }
+
+    if (defined(invoker.loadable_modules)) {
+      _rebased_loadable_modules =
+          rebase_path(invoker.loadable_modules, root_build_dir)
+      args += [ "--loadable-modules=$_rebased_loadable_modules" ]
+    }
+
+    if (defined(invoker.secondary_abi_shared_libraries_runtime_deps_file)) {
+      # Don't list secondary_abi_shared_libraries_runtime_deps_file as an
+      # input in order to avoid having to depend on the runtime_deps target.
+      # See comment in rules.gni for why we do this.
+      args += [
+        "--secondary-abi-shared-libraries-runtime-deps",
+        rebase_path(invoker.secondary_abi_shared_libraries_runtime_deps_file,
+                    root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.secondary_abi_loadable_modules) &&
+        invoker.secondary_abi_loadable_modules != []) {
+      _rebased_secondary_abi_loadable_modules =
+          rebase_path(invoker.secondary_abi_loadable_modules, root_build_dir)
+      args += [ "--secondary-abi-loadable-modules=$_rebased_secondary_abi_loadable_modules" ]
+    }
+
+    if (defined(invoker.native_lib_placeholders) &&
+        invoker.native_lib_placeholders != []) {
+      args += [ "--native-lib-placeholders=${invoker.native_lib_placeholders}" ]
+    }
+
+    if (defined(invoker.secondary_native_lib_placeholders) &&
+        invoker.secondary_native_lib_placeholders != []) {
+      args += [ "--secondary-native-lib-placeholders=${invoker.secondary_native_lib_placeholders}" ]
+    }
+
+    if (defined(invoker.uncompress_shared_libraries) &&
+        invoker.uncompress_shared_libraries) {
+      args += [ "--uncompress-shared-libraries" ]
+    }
+
+    if (defined(invoker.library_always_compress)) {
+      args += [ "--library-always-compress=${invoker.library_always_compress}" ]
+    }
+
+    if (defined(invoker.library_renames)) {
+      args += [ "--library-renames=${invoker.library_renames}" ]
+    }
+
+    if (defined(invoker.apk_path)) {
+      # TODO(tiborg): Remove APK path from build config and use
+      # install_artifacts from metadata instead.
+      _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir)
+      args += [ "--apk-path=$_rebased_apk_path" ]
+      if (defined(invoker.incremental_apk_path)) {
+        _rebased_incremental_apk_path =
+            rebase_path(invoker.incremental_apk_path, root_build_dir)
+        _rebased_incremental_install_json_path =
+            rebase_path(invoker.incremental_install_json_path, root_build_dir)
+        args += [
+          "--incremental-install-json-path=$_rebased_incremental_install_json_path",
+          "--incremental-apk-path=$_rebased_incremental_apk_path",
+        ]
+      }
+    }
+
+    if (defined(invoker.java_sources_file)) {
+      args += [
+        "--java-sources-file",
+        rebase_path(invoker.java_sources_file, root_build_dir),
+      ]
+    }
+    if (defined(invoker.srcjar)) {
+      args += [
+        "--srcjar",
+        rebase_path(invoker.srcjar, root_build_dir),
+      ]
+    }
+    if (defined(invoker.bundled_srcjars)) {
+      _rebased_bundled_srcjars =
+          rebase_path(invoker.bundled_srcjars, root_build_dir)
+      args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ]
+    }
+    if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+      args += [ "--proguard-enabled" ]
+    }
+    if (defined(invoker.proguard_mapping_path)) {
+      _rebased_proguard_mapping_path =
+          rebase_path(invoker.proguard_mapping_path, root_build_dir)
+      args += [ "--proguard-mapping-path=$_rebased_proguard_mapping_path" ]
+    }
+    if (defined(invoker.input_jars_paths)) {
+      _rebased_input_jars_paths =
+          rebase_path(invoker.input_jars_paths, root_build_dir)
+      args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ]
+    }
+    if (defined(invoker.low_classpath_priority) &&
+        invoker.low_classpath_priority) {
+      args += [ "--low-classpath-priority" ]
+    }
+    if (defined(invoker.mergeable_android_manifests)) {
+      _rebased_mergeable_android_manifests =
+          rebase_path(invoker.mergeable_android_manifests, root_build_dir)
+      args += [
+        "--mergeable-android-manifests=$_rebased_mergeable_android_manifests",
+      ]
+    }
+    if (defined(invoker.proguard_configs)) {
+      _rebased_proguard_configs =
+          rebase_path(invoker.proguard_configs, root_build_dir)
+      args += [ "--proguard-configs=$_rebased_proguard_configs" ]
+    }
+    if (defined(invoker.static_library_dependent_targets)) {
+      _dependent_configs = []
+      foreach(_dep, invoker.static_library_dependent_targets) {
+        _dep_name = _dep.name
+        _dep_label = get_label_info(_dep_name, "label_no_toolchain")
+        deps += [ "$_dep_label$build_config_target_suffix" ]
+        _dep_gen_dir = get_label_info(_dep_name, "target_gen_dir")
+        _dep_name = get_label_info(_dep_name, "name")
+        _config =
+            rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir)
+        _dependent_configs += [ _config ]
+      }
+      args += [ "--static-library-dependent-configs=$_dependent_configs" ]
+    }
+    if (defined(invoker.gradle_treat_as_prebuilt) &&
+        invoker.gradle_treat_as_prebuilt) {
+      args += [ "--gradle-treat-as-prebuilt" ]
+    }
+    if (defined(invoker.main_class)) {
+      args += [
+        "--main-class",
+        invoker.main_class,
+      ]
+    }
+    if (defined(invoker.base_module_target)) {
+      _base_label =
+          get_label_info(invoker.base_module_target, "label_no_toolchain")
+      _dep_gen_dir = get_label_info(_base_label, "target_gen_dir")
+      _dep_name = get_label_info(_base_label, "name")
+      deps += [ "$_base_label$build_config_target_suffix" ]
+      _base_module_build_config = "$_dep_gen_dir/$_dep_name.build_config"
+      inputs += [ _base_module_build_config ]
+      args += [
+        "--base-module-build-config",
+        rebase_path(_base_module_build_config, root_build_dir),
+      ]
+    }
+    if (defined(invoker.module_build_configs)) {
+      inputs += invoker.module_build_configs
+      _rebased_configs =
+          rebase_path(invoker.module_build_configs, root_build_dir)
+      args += [ "--module-build-configs=$_rebased_configs" ]
+    }
+    if (defined(invoker.version_name)) {
+      args += [
+        "--version-name",
+        invoker.version_name,
+      ]
+    }
+    if (defined(invoker.version_code)) {
+      args += [
+        "--version-code",
+        invoker.version_code,
+      ]
+    }
+    if (defined(invoker.recursive_resource_deps) &&
+        invoker.recursive_resource_deps) {
+      args += [ "--recursive-resource-deps" ]
+    }
+    if (current_toolchain != default_toolchain) {
+      # This has to be a built-time error rather than a GN assert because many
+      # packages have a mix of java and non-java targets. For example, the
+      # following would fail even though nothing depends on :bar(//baz):
+      #
+      # shared_library("foo") {
+      # }
+      #
+      # android_library("bar") {
+      #   deps = [ ":foo(//baz)" ]
+      #   assert(current_toolchain == default_toolchain)
+      # }
+      _msg = [
+        "Tried to build an Android target in a non-default toolchain.",
+        "target: $_target_label",
+        "current_toolchain: $current_toolchain",
+        "default_toolchain: $default_toolchain",
+      ]
+      args += [ "--fail=$_msg" ]
+    }
+  }
+}
+
+template("generate_android_wrapper") {
+  generate_wrapper(target_name) {
+    forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    generator_script = "//build/android/gyp/generate_android_wrapper.py"
+    sources = [
+      "//build/android/gyp/util/build_utils.py",
+      "//build/gn_helpers.py",
+      "//build/util/generate_wrapper.py",
+    ]
+  }
+}
+
+template("generate_r_java") {
+  action_with_pydeps(target_name) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (defined(invoker.possible_resource_deps)) {
+      foreach(_dep, invoker.possible_resource_deps) {
+        _target_label = get_label_info(_dep, "label_no_toolchain")
+        if (filter_exclude([ _target_label ], _java_library_patterns) == [] &&
+            filter_exclude([ _target_label ], _java_resource_patterns) != []) {
+          # Depend on the java libraries' transitive __assetres target instead.
+          # This is required to ensure depending on java_groups works.
+          deps += [ "${_target_label}__assetres" ]
+        } else {
+          deps += [ _dep ]
+        }
+      }
+    }
+    depfile = "$target_gen_dir/${invoker.target_name}.d"
+    inputs = [ invoker.build_config ]
+    outputs = [ invoker.srcjar_path ]
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    script = "//build/android/gyp/create_r_java.py"
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--srcjar-out",
+      rebase_path(invoker.srcjar_path, root_build_dir),
+      "--deps-rtxts=@FileArg($_rebased_build_config:deps_info:dependency_r_txt_files)",
+      "--r-package=${invoker.package}",
+    ]
+  }
+}
+
+# Generates a script in the build bin directory which runs the test
+# target using the test runner script in build/android/test_runner.py.
+template("test_runner_script") {
+  testonly = true
+  _test_name = invoker.test_name
+  _test_type = invoker.test_type
+  _incremental_apk = defined(invoker.incremental_apk) && invoker.incremental_apk
+
+  _runtime_deps =
+      !defined(invoker.ignore_all_data_deps) || !invoker.ignore_all_data_deps
+
+  if (_runtime_deps) {
+    # This runtime_deps file is used at runtime and thus cannot go in
+    # target_gen_dir.
+    _target_dir_name = get_label_info(":$target_name", "dir")
+    _runtime_deps_file =
+        "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.runtime_deps"
+    _runtime_deps_target = "${target_name}__write_deps"
+    group(_runtime_deps_target) {
+      forward_variables_from(invoker,
+                             [
+                               "data",
+                               "deps",
+                               "public_deps",
+                             ])
+      data_deps = []
+      if (defined(invoker.data_deps)) {
+        data_deps += invoker.data_deps
+      }
+      if (defined(invoker.additional_apks)) {
+        data_deps += invoker.additional_apks
+      }
+      write_runtime_deps = _runtime_deps_file
+    }
+  }
+
+  if (defined(invoker.apk_under_test)) {
+    _install_artifacts_json =
+        "${target_gen_dir}/${target_name}.install_artifacts"
+    generated_file("${target_name}__install_artifacts") {
+      deps = [ invoker.apk_under_test ]
+      output_conversion = "json"
+      outputs = [ _install_artifacts_json ]
+      data_keys = [ "install_artifacts" ]
+      walk_keys = [ "install_artifacts_barrier" ]
+      rebase = root_build_dir
+    }
+  }
+
+  generate_android_wrapper(target_name) {
+    wrapper_script = "$root_build_dir/bin/run_${_test_name}"
+
+    executable = "//testing/test_env.py"
+
+    if (defined(android_test_runner_script)) {
+      _runner_script = android_test_runner_script
+    } else {
+      _runner_script = "//build/android/test_runner.py"
+    }
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    data_deps = [
+      "//build/android:test_runner_py",
+      "//testing:test_scripts_shared",
+    ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+    data = []
+    if (defined(invoker.data)) {
+      data += invoker.data
+    }
+
+    executable_args = [
+      "@WrappedPath(" + rebase_path(_runner_script, root_build_dir) + ")",
+      _test_type,
+      "--output-directory",
+      "@WrappedPath(.)",
+    ]
+
+    if (_runtime_deps) {
+      deps += [ ":$_runtime_deps_target" ]
+      data += [ _runtime_deps_file ]
+      _rebased_runtime_deps_file =
+          rebase_path(_runtime_deps_file, root_build_dir)
+      executable_args += [
+        "--runtime-deps-path",
+        "@WrappedPath(${_rebased_runtime_deps_file})",
+      ]
+    }
+
+    # apk_target is not used for native executable tests
+    # (e.g. breakpad_unittests).
+    if (defined(invoker.apk_target)) {
+      assert(!defined(invoker.executable_dist_dir))
+      deps += [ "${invoker.apk_target}$build_config_target_suffix" ]
+      _apk_build_config =
+          get_label_info(invoker.apk_target, "target_gen_dir") + "/" +
+          get_label_info(invoker.apk_target, "name") + ".build_config"
+      _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
+      not_needed([ "_rebased_apk_build_config" ])
+    } else if (_test_type == "gtest") {
+      assert(
+          defined(invoker.executable_dist_dir),
+          "Must define either apk_target or executable_dist_dir for test_runner_script()")
+      _rebased_executable_dist_dir =
+          rebase_path(invoker.executable_dist_dir, root_build_dir)
+      executable_args += [
+        "--executable-dist-dir",
+        "@WrappedPath(${_rebased_executable_dist_dir})",
+      ]
+    }
+
+    _device_test = true
+    if (_test_type == "gtest") {
+      assert(defined(invoker.test_suite))
+      executable_args += [
+        "--suite",
+        invoker.test_suite,
+      ]
+      if (use_clang_coverage) {
+        # Set a default coverage output directory (can be overridden by user
+        # passing the same flag).
+        _rebased_coverage_dir =
+            rebase_path("$root_out_dir/coverage", root_build_dir)
+        executable_args += [
+          "--coverage-dir",
+          "@WrappedPath(${_rebased_coverage_dir})",
+        ]
+      }
+    } else if (_test_type == "instrumentation") {
+      _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))"
+      if (_incremental_apk) {
+        _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path))"
+      }
+      _rebased_test_jar = rebase_path(invoker.test_jar, root_build_dir)
+      executable_args += [
+        "--test-apk",
+        _test_apk,
+        "--test-jar",
+        "@WrappedPath(${_rebased_test_jar})",
+      ]
+      if (defined(invoker.apk_under_test)) {
+        if (_incremental_apk) {
+          deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ]
+          _apk_under_test_build_config =
+              get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" +
+              get_label_info(invoker.apk_under_test, "name") + ".build_config"
+          _rebased_apk_under_test_build_config =
+              rebase_path(_apk_under_test_build_config, root_build_dir)
+          _apk_under_test = "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path))"
+        } else {
+          deps += [ ":${target_name}__install_artifacts" ]
+          _rebased_install_artifacts_json =
+              rebase_path(_install_artifacts_json, root_build_dir)
+          _apk_under_test =
+              "@WrappedPath(@FileArg($_rebased_install_artifacts_json[]))"
+        }
+        executable_args += [
+          "--apk-under-test",
+          _apk_under_test,
+        ]
+      }
+      if (defined(invoker.use_webview_provider)) {
+        deps += [ "${invoker.use_webview_provider}$build_config_target_suffix" ]
+        _build_config =
+            get_label_info(invoker.use_webview_provider, "target_gen_dir") +
+            "/" + get_label_info(invoker.use_webview_provider, "name") +
+            ".build_config"
+        _rebased_build_config = rebase_path(_build_config, root_build_dir)
+        executable_args += [
+          "--use-webview-provider",
+          "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))",
+        ]
+      }
+      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled &&
+          !_incremental_apk) {
+        executable_args += [ "--enable-java-deobfuscation" ]
+      }
+      if (use_jacoco_coverage) {
+        # Set a default coverage output directory (can be overridden by user
+        # passing the same flag).
+        _rebased_coverage_dir =
+            rebase_path("$root_out_dir/coverage", root_build_dir)
+        executable_args += [
+          "--coverage-dir",
+          "@WrappedPath(${_rebased_coverage_dir})",
+        ]
+      }
+    } else if (_test_type == "junit") {
+      assert(defined(invoker.test_suite))
+      _device_test = false
+      executable_args += [
+        "--test-suite",
+        invoker.test_suite,
+      ]
+
+      deps += [ ":${invoker.test_suite}$build_config_target_suffix" ]
+      _junit_binary_build_config =
+          "${target_gen_dir}/${invoker.test_suite}.build_config"
+
+      _rebased_robolectric_runtime_deps_dir =
+          rebase_path("$root_build_dir/lib.java/third_party/robolectric",
+                      root_build_dir)
+      _rebased_resource_apk = rebase_path(invoker.resource_apk, root_build_dir)
+      executable_args += [
+        "--resource-apk",
+        "@WrappedPath(${_rebased_resource_apk})",
+        "--robolectric-runtime-deps-dir",
+        "@WrappedPath(${_rebased_robolectric_runtime_deps_dir})",
+      ]
+      if (use_jacoco_coverage) {
+        # Set a default coverage output directory (can be overridden by user
+        # passing the same flag).
+        _rebased_coverage_dir =
+            rebase_path("$root_out_dir/coverage", root_build_dir)
+        executable_args += [
+          "--coverage-dir",
+          "@WrappedPath(${_rebased_coverage_dir})",
+        ]
+      }
+    } else if (_test_type == "linker") {
+      executable_args += [
+        "--test-apk",
+        "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))",
+      ]
+    } else {
+      assert(false, "Invalid test type: $_test_type.")
+    }
+
+    if (defined(invoker.additional_apks)) {
+      foreach(additional_apk, invoker.additional_apks) {
+        deps += [ "$additional_apk$build_config_target_suffix" ]
+        _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" +
+                        get_label_info(additional_apk, "name") + ".build_config"
+        _rebased_build_config = rebase_path(_build_config, root_build_dir)
+        executable_args += [
+          "--additional-apk",
+          "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))",
+        ]
+      }
+    }
+    if (defined(invoker.shard_timeout)) {
+      executable_args += [ "--shard-timeout=${invoker.shard_timeout}" ]
+    }
+    if (_incremental_apk) {
+      executable_args += [
+        "--test-apk-incremental-install-json",
+        "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path))",
+      ]
+      if (defined(invoker.apk_under_test)) {
+        executable_args += [
+          "--apk-under-test-incremental-install-json",
+          "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_json_path))",
+        ]
+      }
+      executable_args += [ "--fast-local-dev" ]
+    }
+    if (_device_test && is_asan) {
+      executable_args += [ "--tool=asan" ]
+    }
+
+    if (defined(invoker.modules)) {
+      foreach(module, invoker.modules) {
+        executable_args += [
+          "--module",
+          module,
+        ]
+      }
+    }
+
+    if (defined(invoker.fake_modules)) {
+      foreach(fake_module, invoker.fake_modules) {
+        executable_args += [
+          "--fake-module",
+          fake_module,
+        ]
+      }
+    }
+
+    if (defined(invoker.additional_locales)) {
+      foreach(locale, invoker.additional_locales) {
+        executable_args += [
+          "--additional-locale",
+          locale,
+        ]
+      }
+    }
+
+    if (defined(invoker.extra_args)) {
+      executable_args += invoker.extra_args
+    }
+  }
+}
+
+if (enable_java_templates) {
+  template("android_lint") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      if (!defined(deps)) {
+        deps = []
+      }
+
+      # https://crbug.com/1098752 Fix for bot OOM (https://crbug.com/1098333).
+      if (defined(java_cmd_pool_size)) {
+        pool = "//build/config/android:java_cmd_pool($default_toolchain)"
+      } else {
+        pool = "//build/toolchain:link_pool($default_toolchain)"
+      }
+
+      # Lint requires generated sources and generated resources from the build.
+      # Turbine __header targets depend on all generated sources, and the
+      # __assetres targets depend on all generated resources.
+      if (defined(invoker.deps)) {
+        foreach(_dep, invoker.deps) {
+          _target_label = get_label_info(_dep, "label_no_toolchain")
+          if (filter_exclude([ _target_label ], _java_library_patterns) == [] &&
+              filter_exclude([ _target_label ], _java_resource_patterns) !=
+              []) {
+            deps += [
+              "${_target_label}__assetres",
+              "${_target_label}__header",
+            ]
+          } else {
+            # Keep non-java deps as they may generate files used only by lint.
+            # e.g. generated suppressions.xml files.
+            deps += [ _dep ]
+          }
+        }
+      }
+
+      if (defined(invoker.min_sdk_version)) {
+        _min_sdk_version = invoker.min_sdk_version
+      } else {
+        _min_sdk_version = default_min_sdk_version
+      }
+
+      _lint_binary_path = "$lint_android_sdk_root/cmdline-tools/latest/bin/lint"
+      _cache_dir = "$root_build_dir/android_lint_cache"
+
+      # Save generated xml files in a consistent location for debugging.
+      _lint_gen_dir = "$target_gen_dir/$target_name"
+      _backported_methods = "//third_party/r8/backported_methods.txt"
+
+      script = "//build/android/gyp/lint.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _lint_binary_path,
+        _backported_methods,
+      ]
+
+      args = [
+        "--target-name",
+        get_label_info(target_name, "label_no_toolchain"),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--lint-binary-path",
+        rebase_path(_lint_binary_path, root_build_dir),
+        "--cache-dir",
+        rebase_path(_cache_dir, root_build_dir),
+        "--lint-gen-dir",
+        rebase_path(_lint_gen_dir, root_build_dir),
+        "--android-sdk-version=${lint_android_sdk_version}",
+        "--min-sdk-version=$_min_sdk_version",
+        "--android-sdk-root",
+        rebase_path(lint_android_sdk_root, root_build_dir),
+        "--backported-methods",
+        rebase_path(_backported_methods, root_build_dir),
+      ]
+
+      if (defined(invoker.skip_build_server) && invoker.skip_build_server) {
+        # Nocompile tests need lint to fail through ninja.
+        args += [ "--skip-build-server" ]
+      }
+
+      if (defined(invoker.lint_suppressions_file)) {
+        inputs += [ invoker.lint_suppressions_file ]
+
+        args += [
+          "--config-path",
+          rebase_path(invoker.lint_suppressions_file, root_build_dir),
+        ]
+      }
+
+      if (defined(testonly) && testonly) {
+        # Allows us to ignore unnecessary checks when linting test targets.
+        args += [ "--testonly" ]
+      }
+
+      if (defined(invoker.manifest_package)) {
+        args += [ "--manifest-package=${invoker.manifest_package}" ]
+      }
+
+      if (treat_warnings_as_errors) {
+        args += [ "--warnings-as-errors" ]
+      }
+
+      if (defined(invoker.lint_baseline_file)) {
+        if (compute_inputs_for_analyze) {
+          # The baseline file is included in lint.py as a depfile dep. Since
+          # removing it regenerates the file, it is useful to not have this as
+          # a gn input during local development. Add it only for bots' analyze.
+          inputs += [ invoker.lint_baseline_file ]
+        }
+        args += [
+          # Baseline allows us to turn on lint warnings without fixing all the
+          # pre-existing issues. This stops the flood of new issues while the
+          # existing ones are being fixed.
+          "--baseline",
+          rebase_path(invoker.lint_baseline_file, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.create_cache) && invoker.create_cache) {
+        # Putting the stamp file in the cache dir allows us to depend on ninja
+        # to create the cache dir for us.
+        _stamp_path = "$_cache_dir/build.lint.stamp"
+        args += [ "--create-cache" ]
+      } else {
+        _stamp_path = "$target_out_dir/$target_name/build.lint.stamp"
+        deps += [
+          "//build/android:prepare_android_lint_cache",
+          invoker.build_config_dep,
+        ]
+        inputs += [ invoker.build_config ]
+        _rebased_build_config =
+            rebase_path(invoker.build_config, root_build_dir)
+
+        args += [
+          "--manifest-path=@FileArg($_rebased_build_config:deps_info:lint_android_manifest)",
+          "--extra-manifest-paths=@FileArg($_rebased_build_config:deps_info:lint_extra_android_manifests)",
+
+          # Lint requires all source and all resource files to be passed in the
+          # same invocation for checks like UnusedResources.
+          "--java-sources=@FileArg($_rebased_build_config:deps_info:lint_java_sources)",
+          "--aars=@FileArg($_rebased_build_config:deps_info:lint_aars)",
+          "--srcjars=@FileArg($_rebased_build_config:deps_info:lint_srcjars)",
+          "--resource-sources=@FileArg($_rebased_build_config:deps_info:lint_resource_sources)",
+          "--resource-zips=@FileArg($_rebased_build_config:deps_info:lint_resource_zips)",
+
+          # The full classpath is required for annotation checks like @IntDef.
+          "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
+        ]
+      }
+
+      outputs = [ _stamp_path ]
+      args += [
+        "--stamp",
+        rebase_path(_stamp_path, root_build_dir),
+      ]
+    }
+  }
+
+  template("proguard") {
+    forward_variables_from(invoker,
+                           TESTONLY_AND_VISIBILITY + [
+                                 "data",
+                                 "data_deps",
+                                 "public_deps",
+                               ])
+    _script = "//build/android/gyp/proguard.py"
+    _deps = invoker.deps
+
+    _inputs = [
+      invoker.build_config,
+      _r8_path,
+    ]
+    if (defined(invoker.inputs)) {
+      _inputs += invoker.inputs
+    }
+    if (defined(invoker.proguard_mapping_path)) {
+      _mapping_path = invoker.proguard_mapping_path
+    } else {
+      _mapping_path = "${invoker.output_path}.mapping"
+    }
+
+    _enable_jdk_library_desugaring = enable_jdk_library_desugaring
+    if (defined(invoker.supports_jdk_library_desugaring) &&
+        !invoker.supports_jdk_library_desugaring) {
+      _enable_jdk_library_desugaring = false
+    }
+
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    _args = [
+      "--mapping-output",
+      rebase_path(_mapping_path, root_build_dir),
+      "--classpath",
+      "@FileArg($_rebased_build_config:deps_info:proguard_classpath_jars)",
+      "--classpath",
+      "@FileArg($_rebased_build_config:android:sdk_jars)",
+      "--r8-path",
+      rebase_path(_r8_path, root_build_dir),
+    ]
+    if (treat_warnings_as_errors) {
+      _args += [ "--warnings-as-errors" ]
+    }
+    if (defined(invoker.desugar_jars_paths)) {
+      _rebased_desugar_jars_paths =
+          rebase_path(invoker.desugar_jars_paths, root_build_dir)
+      args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
+    }
+
+    if ((!defined(invoker.proguard_enable_obfuscation) ||
+         invoker.proguard_enable_obfuscation) && enable_proguard_obfuscation) {
+      _proguard_sourcefile_suffix = ""
+      if (defined(invoker.proguard_sourcefile_suffix)) {
+        _proguard_sourcefile_suffix = "-${invoker.proguard_sourcefile_suffix}"
+      }
+
+      # This is generally the apk name, and serves to identify the mapping
+      # file that would be required to deobfuscate a stacktrace.
+      _mapping_id = get_path_info(_mapping_path, "name")
+      _args += [
+        "--enable-obfuscation",
+        "--sourcefile",
+        "chromium-${_mapping_id}${_proguard_sourcefile_suffix}",
+      ]
+    } else if (defined(invoker.proguard_sourcefile_suffix)) {
+      not_needed(invoker, [ "proguard_sourcefile_suffix" ])
+    }
+
+    if (defined(invoker.modules)) {
+      foreach(_feature_module, invoker.modules) {
+        _rebased_module_build_config =
+            rebase_path(_feature_module.build_config, root_build_dir)
+        _args += [
+          "--feature-name=${_feature_module.name}",
+          "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)",
+          "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)",
+        ]
+        if (defined(_feature_module.uses_split)) {
+          _args += [ "--uses-split=${_feature_module.name}:${_feature_module.uses_split}" ]
+        }
+        _deps += [ _feature_module.build_config_target ]
+      }
+      _stamp = "${target_gen_dir}/${target_name}.r8.stamp"
+      _outputs = [ _stamp ]
+      _output_arg = [
+        "--stamp",
+        rebase_path(_stamp, root_build_dir),
+      ]
+    } else {
+      # We don't directly set the output arg on the _args variable since it is
+      # shared with the expectation target that uses its own stamp file and
+      # does not take an --output-path.
+      _output_arg = [
+        "--output-path",
+        rebase_path(invoker.output_path, root_build_dir),
+      ]
+      _outputs = [ invoker.output_path ]
+    }
+    _outputs += [ _mapping_path ]
+
+    if (defined(invoker.disable_r8_outlining) && invoker.disable_r8_outlining) {
+      _args += [ "--disable-outlining" ]
+    }
+
+    if (defined(invoker.enable_proguard_checks) &&
+        !invoker.enable_proguard_checks) {
+      _args += [ "--disable-checks" ]
+    }
+
+    if (defined(invoker.is_static_library) && invoker.is_static_library) {
+      _args += [
+        "--extra-mapping-output-paths",
+        "@FileArg($_rebased_build_config:deps_info:static_library_proguard_mapping_output_paths)",
+      ]
+    }
+
+    if (_enable_jdk_library_desugaring) {
+      _args += [
+        "--desugar-jdk-libs-json",
+        rebase_path(_desugar_jdk_libs_json, root_build_dir),
+      ]
+      _inputs += [ _desugar_jdk_libs_json ]
+
+      _args += [
+        "--desugar-jdk-libs-jar",
+        rebase_path(_desugar_jdk_libs_jar, root_build_dir),
+        "--desugar-jdk-libs-configuration-jar",
+        rebase_path(_desugar_jdk_libs_configuration_jar, root_build_dir),
+      ]
+      _inputs += [
+        _desugar_jdk_libs_jar,
+        _desugar_jdk_libs_configuration_jar,
+      ]
+
+      _desugared_library_keep_rule_output_path =
+          "$target_gen_dir/$target_name.desugared_library_keep_rules.flags"
+      _args += [
+        "--desugared-library-keep-rule-output",
+        rebase_path(_desugared_library_keep_rule_output_path, root_build_dir),
+      ]
+    }
+    _ignore_desugar_missing_deps =
+        defined(invoker.ignore_desugar_missing_deps) &&
+        invoker.ignore_desugar_missing_deps
+    if (!_ignore_desugar_missing_deps && !enable_bazel_desugar) {
+      _args += [ "--show-desugar-default-interface-warnings" ]
+    }
+
+    if (enable_java_asserts) {
+      # The default for generating dex file format is
+      # --force-disable-assertions.
+      _args += [ "--force-enable-assertions" ]
+    }
+
+    if (defined(invoker.args)) {
+      _args += invoker.args
+    }
+
+    if (defined(invoker.expected_proguard_config)) {
+      _expectations_target =
+          "${invoker.top_target_name}_validate_proguard_config"
+      action_with_pydeps(_expectations_target) {
+        script = _script
+
+        # Need to depend on all deps so that proguard.txt within .aar files get
+        # extracted.
+        deps = _deps
+        depfile = "${target_gen_dir}/${target_name}.d"
+        inputs = [
+          invoker.build_config,
+          invoker.expected_proguard_config,
+        ]
+        _actual_file = "$target_gen_dir/$target_name.proguard_configs"
+        _failure_file =
+            "$expectations_failure_dir/" +
+            string_replace(invoker.expected_proguard_config, "/", "_")
+        outputs = [
+          _actual_file,
+          _failure_file,
+        ]
+        args = _args + [
+                 "--depfile",
+                 rebase_path(depfile, root_build_dir),
+                 "--failure-file",
+                 rebase_path(_failure_file, root_build_dir),
+                 "--expected-file",
+                 rebase_path(invoker.expected_proguard_config, root_build_dir),
+                 "--actual-file",
+                 rebase_path(_actual_file, root_build_dir),
+                 "--only-verify-expectations",
+               ]
+        if (defined(invoker.expected_proguard_config_base)) {
+          inputs += [ invoker.expected_proguard_config_base ]
+          args += [
+            "--expected-file-base",
+            rebase_path(invoker.expected_proguard_config_base, root_build_dir),
+          ]
+        }
+        if (fail_on_android_expectations) {
+          args += [ "--fail-on-expectations" ]
+        }
+      }
+      _deps += [ ":$_expectations_target" ]
+    }
+    action_with_pydeps(target_name) {
+      script = _script
+      deps = _deps
+      inputs = _inputs
+      outputs = _outputs
+      depfile = "${target_gen_dir}/${target_name}.d"
+      args = _args + _output_arg + [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+             ]
+
+      # http://crbug.com/725224. Fix for bots running out of memory.
+      if (defined(java_cmd_pool_size)) {
+        pool = "//build/config/android:java_cmd_pool($default_toolchain)"
+      } else {
+        pool = "//build/toolchain:link_pool($default_toolchain)"
+      }
+    }
+  }
+
+  # Generates a script in the build bin directory to run a java binary.
+  #
+  # Variables
+  #   main_class: The class containing the program entry point.
+  #   build_config: Path to .build_config for the jar (contains classpath).
+  #   script_name: Name of the script to generate.
+  #   wrapper_script_args: List of extra arguments to pass to the executable.
+  #   tiered_stop_at_level_one: Whether to pass --tiered-stop-at-level-one
+  #
+  template("java_binary_script") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+
+      _main_class = invoker.main_class
+      _build_config = invoker.build_config
+      _script_name = invoker.script_name
+
+      script = "//build/android/gyp/create_java_binary_script.py"
+      inputs = [ _build_config ]
+      _java_script = "$root_build_dir/bin/$_script_name"
+      outputs = [ _java_script ]
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      args = [
+        "--output",
+        rebase_path(_java_script, root_build_dir),
+        "--main-class",
+        _main_class,
+      ]
+      args += [
+        "--classpath=@FileArg($_rebased_build_config:deps_info:host_classpath)",
+      ]
+
+      if (use_jacoco_coverage) {
+        args += [
+          "--classpath",
+          rebase_path("//third_party/jacoco/lib/jacocoagent.jar",
+                      root_build_dir),
+        ]
+      }
+      if (use_jacoco_coverage || !treat_warnings_as_errors) {
+        args += [ "--noverify" ]
+      }
+      if (defined(invoker.tiered_stop_at_level_one) &&
+          invoker.tiered_stop_at_level_one) {
+        args += [ "--tiered-stop-at-level-one" ]
+      }
+      if (defined(invoker.wrapper_script_args)) {
+        args += [ "--" ] + invoker.wrapper_script_args
+      }
+    }
+  }
+
+  # Variables
+  #   apply_mapping: The path to the ProGuard mapping file to apply.
+  #   disable_incremental: Disable incremental dexing.
+  template("dex") {
+    _min_sdk_version = default_min_sdk_version
+    if (defined(invoker.min_sdk_version)) {
+      _min_sdk_version = invoker.min_sdk_version
+    }
+
+    _proguard_enabled =
+        defined(invoker.proguard_enabled) && invoker.proguard_enabled
+    _is_dex_merging = defined(invoker.input_dex_filearg)
+    _enable_multidex =
+        !defined(invoker.enable_multidex) || invoker.enable_multidex
+    _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21
+    _enable_desugar = !defined(invoker.enable_desugar) || invoker.enable_desugar
+    _desugar_needs_classpath = _enable_desugar && !enable_bazel_desugar
+
+    # It's not safe to dex merge with libraries dex'ed at higher api versions.
+    assert(!_is_dex_merging || _min_sdk_version >= default_min_sdk_version)
+
+    # For D8's backported method desugaring to work properly, the dex merge step
+    # must not be set to a higher minSdkVersion than it was for the libraries.
+    if (_enable_desugar && _is_dex_merging) {
+      _min_sdk_version = default_min_sdk_version
+    }
+
+    assert(defined(invoker.output) ||
+           (_proguard_enabled && defined(invoker.modules)))
+    assert(!_proguard_enabled || !(defined(invoker.input_dex_filearg) ||
+                                       defined(invoker.input_classes_filearg) ||
+                                       defined(invoker.input_class_jars)),
+           "Cannot explicitly set inputs when proguarding a dex.")
+
+    # Dex merging should not also be dexing.
+    assert(!(_is_dex_merging && defined(invoker.input_classes_filearg)))
+    assert(!(_is_dex_merging && defined(invoker.input_class_jars)))
+
+    assert(!(defined(invoker.apply_mapping) && !_proguard_enabled),
+           "apply_mapping can only be specified if proguard is enabled.")
+
+    if (_enable_main_dex_list) {
+      _main_dex_rules = "//build/android/main_dex_classes.flags"
+    }
+
+    if (_desugar_needs_classpath || _proguard_enabled) {
+      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    }
+
+    if (_proguard_enabled) {
+      _proguard_target_name = target_name
+
+      proguard(_proguard_target_name) {
+        forward_variables_from(invoker,
+                               TESTONLY_AND_VISIBILITY + [
+                                     "build_config",
+                                     "data",
+                                     "data_deps",
+                                     "deps",
+                                     "desugar_jars_paths",
+                                     "disable_r8_outlining",
+                                     "enable_proguard_checks",
+                                     "expected_proguard_config",
+                                     "expected_proguard_config_base",
+                                     "ignore_desugar_missing_deps",
+                                     "is_static_library",
+                                     "modules",
+                                     "proguard_enable_obfuscation",
+                                     "proguard_mapping_path",
+                                     "proguard_sourcefile_suffix",
+                                     "supports_jdk_library_desugaring",
+                                     "top_target_name",
+                                   ])
+        inputs = []
+        if (defined(invoker.inputs)) {
+          inputs += invoker.inputs
+        }
+        if (defined(invoker.proguard_configs)) {
+          inputs += invoker.proguard_configs
+        }
+
+        args = [
+          "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
+          "--min-api=$_min_sdk_version",
+        ]
+        if (defined(invoker.has_apk_under_test) && invoker.has_apk_under_test) {
+          args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath_extended)" ]
+        } else {
+          args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath)" ]
+        }
+        if (enable_bazel_desugar) {
+          deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ]
+          inputs += [ _desugar_runtime_jar ]
+          args += [
+            "--input-paths",
+            rebase_path(_desugar_runtime_jar, root_build_dir),
+          ]
+        }
+        if (defined(invoker.proguard_args)) {
+          args += invoker.proguard_args
+        }
+
+        if (defined(invoker.apply_mapping)) {
+          _rebased_apply_mapping_path =
+              rebase_path(invoker.apply_mapping, root_build_dir)
+          args += [ "--apply-mapping=$_rebased_apply_mapping_path" ]
+        }
+
+        if (_enable_main_dex_list) {
+          if (defined(invoker.extra_main_dex_proguard_config)) {
+            args += [
+              "--main-dex-rules-path",
+              rebase_path(invoker.extra_main_dex_proguard_config,
+                          root_build_dir),
+            ]
+            inputs += [ invoker.extra_main_dex_proguard_config ]
+          }
+          args += [
+            "--main-dex-rules-path",
+            rebase_path(_main_dex_rules, root_build_dir),
+          ]
+          inputs += [ _main_dex_rules ]
+        }
+
+        if (defined(invoker.output)) {
+          output_path = invoker.output
+        } else if (!defined(proguard_mapping_path)) {
+          proguard_mapping_path = "$target_out_dir/$target_name.mapping"
+        }
+      }
+    } else {  # !_proguard_enabled
+      _is_library = defined(invoker.is_library) && invoker.is_library
+      _input_class_jars = []
+      if (defined(invoker.input_class_jars)) {
+        _input_class_jars = invoker.input_class_jars
+      }
+      _deps = invoker.deps
+
+      if (!_is_library && enable_bazel_desugar) {
+        # It would be more efficient to use the pre-dex'ed copy of the runtime,
+        # but it's easier to add it in this way.
+        _deps += [ "//third_party/bazel/desugar:desugar_runtime_java" ]
+        _input_class_jars += [ _desugar_runtime_jar ]
+      }
+      if (_input_class_jars != []) {
+        _rebased_input_class_jars =
+            rebase_path(_input_class_jars, root_build_dir)
+      }
+
+      action_with_pydeps(target_name) {
+        forward_variables_from(invoker,
+                               TESTONLY_AND_VISIBILITY + [
+                                     "data",
+                                     "data_deps",
+                                   ])
+        script = "//build/android/gyp/dex.py"
+        deps = _deps
+        depfile = "$target_gen_dir/$target_name.d"
+        outputs = [ invoker.output ]
+        inputs = [
+          _r8_path,
+          _custom_d8_path,
+        ]
+
+        if (!_is_library) {
+          # http://crbug.com/725224. Fix for bots running out of memory.
+          if (defined(java_cmd_pool_size)) {
+            pool = "//build/config/android:java_cmd_pool($default_toolchain)"
+          } else {
+            pool = "//build/toolchain:link_pool($default_toolchain)"
+          }
+        }
+
+        args = [
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--output",
+          rebase_path(outputs[0], root_build_dir),
+          "--min-api=$_min_sdk_version",
+          "--r8-jar-path",
+          rebase_path(_r8_path, root_build_dir),
+          "--custom-d8-jar-path",
+          rebase_path(_custom_d8_path, root_build_dir),
+
+          # Uncomment when rebuilding custom_d8.jar.
+          #"--skip-custom-d8",
+        ]
+        if (treat_warnings_as_errors) {
+          args += [ "--warnings-as-errors" ]
+        }
+
+        if (enable_incremental_d8 && !(defined(invoker.disable_incremental) &&
+                                       invoker.disable_incremental)) {
+          # Don't use incremental dexing for ProGuarded inputs as a precaution.
+          args += [
+            "--incremental-dir",
+            rebase_path("$target_out_dir/$target_name", root_build_dir),
+          ]
+        }
+
+        if (_enable_multidex) {
+          args += [ "--multi-dex" ]
+          if (_enable_main_dex_list) {
+            if (defined(invoker.extra_main_dex_proguard_config)) {
+              args += [
+                "--main-dex-rules-path",
+                rebase_path(invoker.extra_main_dex_proguard_config,
+                            root_build_dir),
+              ]
+              inputs += [ invoker.extra_main_dex_proguard_config ]
+            }
+            args += [
+              "--main-dex-rules-path",
+              rebase_path(_main_dex_rules, root_build_dir),
+            ]
+            inputs += [ _main_dex_rules ]
+          }
+        }
+        if (_is_library) {
+          args += [ "--library" ]
+        }
+        if (defined(invoker.input_dex_filearg)) {
+          inputs += [ invoker.build_config ]
+          args += [ "--dex-inputs-filearg=${invoker.input_dex_filearg}" ]
+        }
+        if (defined(invoker.input_classes_filearg)) {
+          inputs += [ invoker.build_config ]
+          args += [ "--class-inputs-filearg=${invoker.input_classes_filearg}" ]
+        }
+        if (_input_class_jars != []) {
+          inputs += _input_class_jars
+          args += [ "--class-inputs=${_rebased_input_class_jars}" ]
+        }
+
+        if (defined(invoker.dexlayout_profile)) {
+          args += [
+            "--dexlayout-profile",
+            rebase_path(invoker.dexlayout_profile, root_build_dir),
+            "--dexlayout-path",
+            rebase_path(_dexlayout_path, root_build_dir),
+            "--profman-path",
+            rebase_path(_profman_path, root_build_dir),
+            "--dexdump-path",
+            rebase_path(_dexdump_path, root_build_dir),
+          ]
+          inputs += [
+            _dexlayout_path,
+            _profman_path,
+            _dexdump_path,
+            invoker.dexlayout_profile,
+          ]
+          inputs += _default_art_libs
+        }
+
+        # Never compile intemediates with --release in order to:
+        # 1) not require recompiles when toggling is_java_debug,
+        # 2) allow incremental_install=1 to still have local variable
+        #    information even when is_java_debug=false.
+        if (!is_java_debug && !_is_library) {
+          args += [ "--release" ]
+        }
+
+        if (_enable_desugar) {
+          args += [ "--desugar" ]
+
+          # Passing the flag for dex merging causes invalid dex files to be created.
+          if (enable_jdk_library_desugaring && !_is_dex_merging) {
+            inputs += [ _desugar_jdk_libs_json ]
+            args += [
+              "--desugar-jdk-libs-json",
+              rebase_path(_desugar_jdk_libs_json, root_build_dir),
+            ]
+          }
+          _ignore_desugar_missing_deps =
+              defined(invoker.ignore_desugar_missing_deps) &&
+              invoker.ignore_desugar_missing_deps
+          if (!_ignore_desugar_missing_deps && !enable_bazel_desugar) {
+            args += [ "--show-desugar-default-interface-warnings" ]
+          }
+        }
+        if (_desugar_needs_classpath) {
+          _desugar_dependencies_path =
+              "$target_gen_dir/$target_name.desugardeps"
+          args += [
+            "--desugar-dependencies",
+            rebase_path(_desugar_dependencies_path, root_build_dir),
+            "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_jars)",
+
+            # Pass the full classpath to find new dependencies that are not in
+            # the .desugardeps file.
+            "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
+          ]
+          if (defined(invoker.desugar_jars_paths)) {
+            _rebased_desugar_jars_paths =
+                rebase_path(invoker.desugar_jars_paths, root_build_dir)
+            args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
+          }
+          if (defined(invoker.final_ijar_path)) {
+            # Need to include the input .interface.jar on the classpath in order to make
+            # jar_excluded_patterns classes visible to desugar.
+            args += [
+              "--classpath",
+              rebase_path(invoker.final_ijar_path, root_build_dir),
+            ]
+            inputs += [ invoker.final_ijar_path ]
+          }
+        } else {
+          not_needed(invoker, [ "desugar_jars_paths" ])
+        }
+
+        if (enable_java_asserts) {
+          # The default for generating dex file format is
+          # --force-disable-assertions.
+          args += [ "--force-enable-assertions" ]
+        }
+      }
+    }
+  }
+
+  # Variables
+  #   output: Path to output ".l8.dex".
+  #   min_sdk_version: The minimum Android SDK version this target supports.
+  template("dex_jdk_libs") {
+    action_with_pydeps(target_name) {
+      script = "//build/android/gyp/dex_jdk_libs.py"
+      inputs = [
+        _r8_path,
+        _desugar_jdk_libs_json,
+        _desugar_jdk_libs_jar,
+        _desugar_jdk_libs_configuration_jar,
+      ]
+      outputs = [ invoker.output ]
+      args = [
+        "--r8-path",
+        rebase_path(_r8_path, root_build_dir),
+        "--desugar-jdk-libs-json",
+        rebase_path(_desugar_jdk_libs_json, root_build_dir),
+        "--desugar-jdk-libs-jar",
+        rebase_path(_desugar_jdk_libs_jar, root_build_dir),
+        "--desugar-jdk-libs-configuration-jar",
+        rebase_path(_desugar_jdk_libs_configuration_jar, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+        "--min-api=${invoker.min_sdk_version}",
+      ]
+      if (treat_warnings_as_errors) {
+        args += [ "--warnings-as-errors" ]
+      }
+    }
+  }
+
+  template("jacoco_instr") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "deps",
+                                   "public_deps",
+                                 ])
+
+      # The name needs to match the SOURCES_JSON_FILES_SUFFIX in
+      # generate_coverage_metadata_for_java.py.
+      _sources_json_file = "$target_out_dir/${target_name}__jacoco_sources.json"
+      _jacococli_jar = "//third_party/jacoco/lib/jacococli.jar"
+
+      script = "//build/android/gyp/jacoco_instr.py"
+      inputs = invoker.java_files + [
+                 _jacococli_jar,
+                 invoker.input_jar_path,
+               ]
+      outputs = [
+        _sources_json_file,
+        invoker.output_jar_path,
+      ]
+      args = [
+        "--input-path",
+        rebase_path(invoker.input_jar_path, root_build_dir),
+        "--output-path",
+        rebase_path(invoker.output_jar_path, root_build_dir),
+        "--sources-json-file",
+        rebase_path(_sources_json_file, root_build_dir),
+        "--java-sources-file",
+        rebase_path(invoker.java_sources_file, root_build_dir),
+        "--jacococli-jar",
+        rebase_path(_jacococli_jar, root_build_dir),
+      ]
+      if (coverage_instrumentation_input_file != "") {
+        args += [
+          "--files-to-instrument",
+          rebase_path(coverage_instrumentation_input_file, root_build_dir),
+        ]
+      }
+    }
+  }
+
+  template("filter_jar") {
+    action_with_pydeps(target_name) {
+      script = "//build/android/gyp/filter_zip.py"
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      inputs = [ invoker.input_jar ]
+      if (defined(invoker.inputs)) {
+        inputs += invoker.inputs
+      }
+      outputs = [ invoker.output_jar ]
+
+      _jar_excluded_patterns = []
+      if (defined(invoker.jar_excluded_patterns)) {
+        _jar_excluded_patterns = invoker.jar_excluded_patterns
+      }
+      _jar_included_patterns = []
+      if (defined(invoker.jar_included_patterns)) {
+        _jar_included_patterns = invoker.jar_included_patterns
+      }
+      _strip_resource_classes = defined(invoker.strip_resource_classes) &&
+                                invoker.strip_resource_classes
+      args = [
+        "--input",
+        rebase_path(invoker.input_jar, root_build_dir),
+        "--output",
+        rebase_path(invoker.output_jar, root_build_dir),
+        "--exclude-globs=${_jar_excluded_patterns}",
+        "--include-globs=${_jar_included_patterns}",
+      ]
+      if (_strip_resource_classes) {
+        inputs += [ invoker.build_config ]
+        _rebased_build_config =
+            rebase_path(invoker.build_config, root_build_dir)
+        args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
+      }
+    }
+  }
+
+  template("process_java_prebuilt") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    not_needed([ "_rebased_build_config" ])
+    not_needed(invoker, [ "build_config_dep" ])
+
+    _deps = invoker.jar_deps
+    _previous_output_jar = invoker.input_jar_path
+
+    # Create the .jar in lib.java for use by java_binary.
+    if (defined(invoker.host_jar_path)) {
+      if (defined(invoker.jacoco_instrument) && invoker.jacoco_instrument) {
+        _filter_jar_target_name = "${target_name}_host__filter_jar"
+        _filter_jar_output_jar = "$target_out_dir/$target_name.host_filter.jar"
+      } else {
+        _filter_jar_target_name = "${target_name}_host"
+        _filter_jar_output_jar = invoker.host_jar_path
+      }
+      filter_jar(_filter_jar_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "jar_excluded_patterns",
+                                 "jar_included_patterns",
+                                 "strip_resource_classes",
+                               ])
+        deps = _deps
+        input_jar = _previous_output_jar
+        output_jar = _filter_jar_output_jar
+        inputs = []
+        if (defined(strip_resource_classes) && strip_resource_classes) {
+          inputs += [ invoker.build_config ]
+          deps += [ invoker.build_config_dep ]
+          args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
+        }
+        if (defined(invoker.inputs)) {
+          inputs += invoker.inputs
+          deps += invoker.input_deps
+        }
+      }
+
+      if (defined(invoker.jacoco_instrument) && invoker.jacoco_instrument) {
+        # Jacoco must run after desugar (or else desugar sometimes fails).
+        # It must run after filtering to avoid the same (filtered) class mapping
+        # to multiple .jar files.
+        # We run offline code coverage processing here rather than with a
+        # javaagent as the desired coverage data was not being generated.
+        # See crbug.com/1097815.
+        jacoco_instr("${target_name}_host") {
+          deps = [ ":$_filter_jar_target_name" ] + invoker.jar_deps
+          forward_variables_from(invoker,
+                                 [
+                                   "java_files",
+                                   "java_sources_file",
+                                 ])
+
+          input_jar_path = _filter_jar_output_jar
+          output_jar_path = invoker.host_jar_path
+        }
+      }
+    }
+
+    if (defined(invoker.device_jar_path)) {
+      if (invoker.enable_desugar) {
+        _desugar_target = "${target_name}_device__desugar"
+        _desugar_output_jar = "$target_out_dir/$target_name.desugar.jar"
+
+        action_with_pydeps(_desugar_target) {
+          script = "//build/android/gyp/desugar.py"
+          deps = _deps + invoker.classpath_deps
+          depfile = "$target_gen_dir/$target_name.d"
+          _desugar_jar = "//third_party/bazel/desugar/Desugar.jar"
+
+          inputs = [
+            invoker.build_config,
+            _previous_output_jar,
+            _desugar_jar,
+          ]
+          outputs = [ _desugar_output_jar ]
+          args = [
+            "--desugar-jar",
+            rebase_path(_desugar_jar, root_build_dir),
+            "--input-jar",
+            rebase_path(_previous_output_jar, root_build_dir),
+            "--output-jar",
+            rebase_path(_desugar_output_jar, root_build_dir),
+
+            # Temporarily using java_full_interface_classpath until classpath validation of targets
+            # is implemented, see http://crbug.com/885273
+            "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
+            "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)",
+            "--depfile",
+            rebase_path(depfile, root_build_dir),
+          ]
+          if (defined(invoker.desugar_jars_paths)) {
+            _rebased_desugar_jars_paths =
+                rebase_path(invoker.desugar_jars_paths, root_build_dir)
+            args += [ "--classpath=${_rebased_desugar_jars_paths}" ]
+          }
+          if (treat_warnings_as_errors) {
+            args += [ "--warnings-as-errors" ]
+          }
+        }
+
+        _deps = []
+        _deps = [ ":$_desugar_target" ]
+        _previous_output_jar = _desugar_output_jar
+      }
+
+      if (invoker.jacoco_instrument) {
+        _filter_jar_target_name = "${target_name}_device__filter_jar"
+        _filter_jar_output_jar =
+            "$target_out_dir/$target_name.device_filter.jar"
+      } else {
+        _filter_jar_target_name = "${target_name}_device"
+        _filter_jar_output_jar = invoker.device_jar_path
+      }
+      filter_jar(_filter_jar_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "jar_excluded_patterns",
+                                 "jar_included_patterns",
+                                 "strip_resource_classes",
+                               ])
+        deps = _deps
+        input_jar = _previous_output_jar
+        output_jar = _filter_jar_output_jar
+        inputs = []
+        if (defined(strip_resource_classes) && strip_resource_classes) {
+          inputs += [ invoker.build_config ]
+          deps += [ invoker.build_config_dep ]
+          args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
+        }
+        if (!defined(invoker.host_jar_path) && defined(invoker.inputs)) {
+          inputs += invoker.inputs
+          deps += invoker.input_deps
+        }
+      }
+
+      if (invoker.jacoco_instrument) {
+        # Jacoco must run after desugar (or else desugar sometimes fails).
+        # It must run after filtering to avoid the same (filtered) class mapping
+        # to multiple .jar files.
+        jacoco_instr("${target_name}_device") {
+          deps = [ ":$_filter_jar_target_name" ] + invoker.jar_deps
+          forward_variables_from(invoker,
+                                 [
+                                   "java_files",
+                                   "java_sources_file",
+                                 ])
+
+          input_jar_path = _filter_jar_output_jar
+          output_jar_path = invoker.device_jar_path
+        }
+      }
+    }
+  }
+
+  template("bytecode_processor") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      _bytecode_checker_script = "$root_build_dir/bin/helper/bytecode_processor"
+      script = "//build/android/gyp/bytecode_processor.py"
+      inputs = [
+        invoker.build_config,
+        invoker.input_jar,
+        _bytecode_checker_script,
+      ]
+      outputs = [ "$target_out_dir/$target_name.bytecode.stamp" ]
+      deps =
+          invoker.deps +
+          [ "//build/android/bytecode:bytecode_processor($default_toolchain)" ]
+      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+      args = [
+        "--target-name",
+        get_label_info(target_name, "label_no_toolchain"),
+        "--script",
+        rebase_path(_bytecode_checker_script, root_build_dir),
+        "--gn-target=${invoker.target_label}",
+        "--input-jar",
+        rebase_path(invoker.input_jar, root_build_dir),
+        "--stamp",
+        rebase_path(outputs[0], root_build_dir),
+        "--direct-classpath-jars=@FileArg($_rebased_build_config:javac:classpath)",
+        "--full-classpath-jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+        "--full-classpath-gn-targets=@FileArg($_rebased_build_config:deps_info:javac_full_classpath_targets)",
+      ]
+      if (invoker.requires_android) {
+        args += [ "--sdk-classpath-jars=@FileArg($_rebased_build_config:android:sdk_jars)" ]
+      }
+      if (invoker.is_prebuilt) {
+        args += [ "--is-prebuilt" ]
+      }
+      if (treat_warnings_as_errors) {
+        args += [ "--warnings-as-errors" ]
+      }
+      if (defined(invoker.missing_classes_allowlist)) {
+        args += [
+          "--missing-classes-allowlist=${invoker.missing_classes_allowlist}",
+        ]
+      }
+    }
+  }
+
+  template("merge_manifests") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      script = "//build/android/gyp/merge_manifest.py"
+      depfile = "$target_gen_dir/$target_name.d"
+
+      inputs = [
+        invoker.build_config,
+        invoker.input_manifest,
+      ]
+
+      outputs = [ invoker.output_manifest ]
+      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--android-sdk-cmdline-tools",
+        rebase_path("${public_android_sdk_root}/cmdline-tools/latest",
+                    root_build_dir),
+        "--root-manifest",
+        rebase_path(invoker.input_manifest, root_build_dir),
+        "--output",
+        rebase_path(invoker.output_manifest, root_build_dir),
+        "--extras",
+        "@FileArg($_rebased_build_config:extra_android_manifests)",
+        "--min-sdk-version=${invoker.min_sdk_version}",
+        "--target-sdk-version=${invoker.target_sdk_version}",
+      ]
+
+      if (defined(invoker.manifest_package)) {
+        args += [ "--manifest-package=${invoker.manifest_package}" ]
+      }
+
+      if (defined(invoker.max_sdk_version)) {
+        args += [ "--max-sdk-version=${invoker.max_sdk_version}" ]
+      }
+
+      if (treat_warnings_as_errors) {
+        args += [ "--warnings-as-errors" ]
+      }
+    }
+  }
+
+  # This template is used to parse a set of resource directories and
+  # create the R.txt, .srcjar and .resources.zip for it.
+  #
+  # Input variables:
+  #   deps: Specifies the input dependencies for this target.
+  #
+  #   build_config: Path to the .build_config file corresponding to the target.
+  #
+  #   resource_dirs (Deprecated):
+  #     ** This is deprecated, please specify files using |sources| parameter **
+  #     List of directories containing Android resources, layout should be
+  #     similar to what aapt -S <dir> expects.
+  #
+  #   sources:
+  #     List of input resource files.
+  #
+  #   custom_package: (optional)
+  #     Package name for the generated R.java source file. Optional if
+  #     android_manifest is not provided.
+  #
+  #   android_manifest: (optional)
+  #     If custom_package is not provided, path to an AndroidManifest.xml file
+  #     that is only used to extract a package name out of it.
+  #
+  #   r_text_in_path: (optional)
+  #     Path to an input R.txt file to use to generate the R.java file.
+  #     The default is to use 'aapt' to generate the file from the content
+  #     of the resource directories.
+  #
+  # Output variables:
+  #   resources_zip:
+  #     Path to a .resources.zip that will simply contain all the
+  #     input resources, collected in a single archive.
+  #
+  #   r_text_out_path: Path for the generated R.txt file.
+  #
+  template("prepare_resources") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "deps",
+                                   "sources",
+                                 ])
+      script = "//build/android/gyp/prepare_resources.py"
+
+      depfile = "$target_gen_dir/${invoker.target_name}.d"
+      outputs = [
+        invoker.resources_zip,
+        invoker.resources_zip + ".info",
+        invoker.r_text_out_path,
+      ]
+
+      inputs = [ invoker.res_sources_path ]
+
+      _rebased_res_sources_path =
+          rebase_path(invoker.res_sources_path, root_build_dir)
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--res-sources-path=$_rebased_res_sources_path",
+        "--resource-zip-out",
+        rebase_path(invoker.resources_zip, root_build_dir),
+        "--r-text-out",
+        rebase_path(invoker.r_text_out_path, root_build_dir),
+      ]
+
+      if (defined(invoker.r_text_in_path)) {
+        _r_text_in_path = invoker.r_text_in_path
+        inputs += [ _r_text_in_path ]
+        args += [
+          "--r-text-in",
+          rebase_path(_r_text_in_path, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.strip_drawables) && invoker.strip_drawables) {
+        args += [ "--strip-drawables" ]
+      }
+    }
+  }
+
+  # A template that is used to compile all resources needed by a binary
+  # (e.g. an android_apk or a junit_binary) into an intermediate .ar_
+  # archive. It can also generate an associated .srcjar that contains the
+  # final R.java sources for all resource packages the binary depends on.
+  #
+  # Input variables:
+  #   android_sdk_dep: The sdk dep that these resources should compile against.
+  #
+  #   deps: Specifies the input dependencies for this target.
+  #
+  #   build_config: Path to the .build_config file corresponding to the target.
+  #
+  #   build_config_dep: Dep target to generate the .build_config file.
+  #
+  #   android_manifest: Path to root manifest for the binary.
+  #
+  #   version_code: (optional)
+  #
+  #   version_name: (optional)
+  #
+  #   shared_resources: (optional)
+  #     If true, make all variables in each generated R.java file non-final,
+  #     and provide an onResourcesLoaded() method that can be used to reset
+  #     their package index at load time. Useful when the APK corresponds to
+  #     a library that is loaded at runtime, like system_webview_apk or
+  #     monochrome_apk.
+  #
+  #   app_as_shared_lib: (optional)
+  #     If true, same effect as shared_resources, but also ensures that the
+  #     resources can be used by the APK when it is loaded as a regular
+  #     application as well. Useful for the monochrome_public_apk target
+  #     which is both an application and a shared runtime library that
+  #     implements the system webview feature.
+  #
+  #   shared_resources_allowlist: (optional)
+  #     Path to an R.txt file. If provided, acts similar to shared_resources
+  #     except that it restricts the list of non-final resource variables
+  #     to the list from the input R.txt file. Overrides shared_resources
+  #     when both are specified.
+  #
+  #   shared_resources_allowlist_locales: (optional)
+  #     If shared_resources_allowlist is used, provide an optional list of
+  #     Chromium locale names to determine which localized shared string
+  #     resources to put in the final output, even if aapt_locale_allowlist
+  #     is defined to a smaller subset.
+  #
+  #   support_zh_hk: (optional)
+  #     If true, support zh-HK in Chrome on Android by using the resources
+  #     from zh-TW. See https://crbug.com/780847.
+  #
+  #   aapt_locale_allowlist: (optional)
+  #     Restrict compiled locale-dependent resources to a specific allowlist.
+  #     NOTE: This is a list of Chromium locale names, not Android ones.
+  #
+  #   r_java_root_package_name: (optional)
+  #     Short package name for this target's root R java file (ex. input of
+  #     "base" would become "gen.base_module" for the root R java package name).
+  #     Optional as defaults to "base".
+  #
+  #   resource_exclusion_regex: (optional)
+  #
+  #   resource_exclusion_exceptions: (optional)
+  #
+  #   resource_values_filter_rules: (optional)
+  #
+  #   no_xml_namespaces: (optional)
+  #
+  #   png_to_webp: (optional)
+  #     If true, convert all PNG resources (except 9-patch files) to WebP.
+  #
+  #   post_process_script: (optional)
+  #
+  #   package_name: (optional)
+  #     Name of the package for the purpose of creating R class.
+  #
+  #   package_id: (optional)
+  #     Use a custom package ID in resource IDs.
+  #
+  #   arsc_package_name: (optional)
+  #     Use this package name in the arsc file rather than the package name
+  #     found in the AndroidManifest.xml. Does not affect the package name
+  #     used in AndroidManifest.xml.
+  #
+  #   resource_ids_provider_dep: (optional)
+  #     Use resource IDs provided by another APK target when compiling resources
+  #     (via. "aapt2 link --stable-ids")
+  #
+  #   short_resource_paths: (optional)
+  #     Rename the paths within a the apk to be randomly generated short
+  #     strings to reduce binary size.
+  #
+  #   strip_resource_names: (optional)
+  #     Strip resource names from the resources table of the apk.
+  #
+  # Output variables:
+  #   arsc_output: Path to output .ap_ file (optional).
+  #
+  #   proto_output: Path to output .proto.ap_ file (optional).
+  #
+  #   optimized_arsc_output: Path to optimized .ap_ file (optional).
+  #
+  #   optimized_proto_output: Path to optimized .proto.ap_ file (optional).
+  #
+  #   r_text_out_path: (optional):
+  #       Path for the corresponding generated R.txt file.
+  #
+  #   resources_path_map_out_path: (optional):
+  #       Path for the generated map between original resource paths and
+  #       shortend resource paths.
+  #
+  #   proguard_file: (optional)
+  #       Path to proguard configuration file for this apk target.
+  #
+  #   proguard_file_main_dex: (optional)
+  #
+  template("compile_resources") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _deps = [
+      invoker.android_sdk_dep,
+      invoker.build_config_dep,
+    ]
+    if (defined(invoker.android_manifest_dep)) {
+      _deps += [ invoker.android_manifest_dep ]
+    }
+    foreach(_dep, invoker.deps) {
+      _target_label = get_label_info(_dep, "label_no_toolchain")
+      if (filter_exclude([ _target_label ], _java_library_patterns) == [] &&
+          filter_exclude([ _target_label ], _java_resource_patterns) != []) {
+        # Depend on the java libraries' transitive __assetres target instead.
+        _deps += [ "${_target_label}__assetres" ]
+      } else {
+        _deps += [ _dep ]
+      }
+    }
+
+    if (defined(invoker.arsc_output)) {
+      _arsc_output = invoker.arsc_output
+    }
+    if (defined(invoker.optimized_arsc_output)) {
+      _optimized_arsc_output = invoker.optimized_arsc_output
+    }
+    _final_srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+
+    _script = "//build/android/gyp/compile_resources.py"
+
+    _inputs = [
+      invoker.build_config,
+      android_sdk_tools_bundle_aapt2,
+    ]
+
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+    _args = [
+      "--include-resources=@FileArg($_rebased_build_config:android:sdk_jars)",
+      "--aapt2-path",
+      rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+      "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)",
+      "--extra-res-packages=@FileArg($_rebased_build_config:deps_info:extra_package_names)",
+      "--extra-main-r-text-files=@FileArg($_rebased_build_config:deps_info:extra_main_r_text_files)",
+      "--min-sdk-version=${invoker.min_sdk_version}",
+      "--target-sdk-version=${invoker.target_sdk_version}",
+      "--webp-cache-dir=obj/android-webp-cache",
+    ]
+
+    _inputs += [ invoker.android_manifest ]
+    _outputs = [ _final_srcjar_path ]
+    _args += [
+      "--android-manifest",
+      rebase_path(invoker.android_manifest, root_build_dir),
+      "--srcjar-out",
+      rebase_path(_final_srcjar_path, root_build_dir),
+    ]
+    if (defined(invoker.no_xml_namespaces) && invoker.no_xml_namespaces) {
+      _args += [ "--no-xml-namespaces" ]
+    }
+    if (defined(invoker.version_code)) {
+      _args += [
+        "--version-code",
+        invoker.version_code,
+      ]
+    }
+    if (defined(invoker.version_name)) {
+      _args += [
+        "--version-name",
+        invoker.version_name,
+      ]
+    }
+    if (defined(_arsc_output)) {
+      _outputs += [ _arsc_output ]
+      _args += [
+        "--arsc-path",
+        rebase_path(_arsc_output, root_build_dir),
+      ]
+    }
+    if (defined(invoker.proto_output)) {
+      _outputs += [ invoker.proto_output ]
+      _args += [
+        "--proto-path",
+        rebase_path(invoker.proto_output, root_build_dir),
+      ]
+    }
+    if (defined(invoker.size_info_path)) {
+      _outputs += [ invoker.size_info_path ]
+      _args += [
+        "--info-path",
+        rebase_path(invoker.size_info_path, root_build_dir),
+      ]
+    }
+    if (defined(_optimized_arsc_output)) {
+      _outputs += [ _optimized_arsc_output ]
+      _args += [
+        "--optimized-arsc-path",
+        rebase_path(_optimized_arsc_output, root_build_dir),
+      ]
+    }
+    if (defined(invoker.optimized_proto_output)) {
+      _outputs += [ invoker.optimized_proto_output ]
+      _args += [
+        "--optimized-proto-path",
+        rebase_path(invoker.optimized_proto_output, root_build_dir),
+      ]
+    }
+    if (defined(invoker.resources_config_paths)) {
+      _inputs += invoker.resources_config_paths
+      _rebased_resource_configs =
+          rebase_path(invoker.resources_config_paths, root_build_dir)
+      _args += [ "--resources-config-paths=${_rebased_resource_configs}" ]
+    }
+    if (defined(invoker.short_resource_paths) && invoker.short_resource_paths) {
+      _args += [ "--short-resource-paths" ]
+      if (defined(invoker.resources_path_map_out_path)) {
+        _outputs += [ invoker.resources_path_map_out_path ]
+        _args += [
+          "--resources-path-map-out-path",
+          rebase_path(invoker.resources_path_map_out_path, root_build_dir),
+        ]
+      }
+    }
+
+    if (defined(invoker.r_java_root_package_name)) {
+      _args += [
+        "--r-java-root-package-name",
+        invoker.r_java_root_package_name,
+      ]
+    }
+
+    if (defined(invoker.strip_resource_names) && invoker.strip_resource_names) {
+      _args += [ "--strip-resource-names" ]
+    }
+
+    # Useful to have android:debuggable in the manifest even for Release
+    # builds. Just omit it for officai
+    if (debuggable_apks) {
+      _args += [ "--debuggable" ]
+    }
+
+    if (defined(invoker.r_text_out_path)) {
+      _outputs += [ invoker.r_text_out_path ]
+      _args += [
+        "--r-text-out",
+        rebase_path(invoker.r_text_out_path, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.rename_manifest_package)) {
+      _args += [
+        "--rename-manifest-package",
+        invoker.rename_manifest_package,
+      ]
+    }
+
+    # Define the flags related to shared resources.
+    #
+    # Note the small sanity check to ensure that the package ID of the
+    # generated resources table is correct. It should be 0x02 for runtime
+    # shared libraries, and 0x7f otherwise.
+
+    if (defined(invoker.shared_resources) && invoker.shared_resources) {
+      _args += [ "--shared-resources" ]
+    }
+    if (defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib) {
+      _args += [ "--app-as-shared-lib" ]
+    }
+    if (defined(invoker.package_id)) {
+      _args += [ "--package-id=${invoker.package_id}" ]
+    }
+    if (defined(invoker.package_name)) {
+      _args += [
+        "--package-name",
+        invoker.package_name,
+      ]
+    }
+    if (defined(invoker.arsc_package_name)) {
+      _args += [
+        "--arsc-package-name",
+        invoker.arsc_package_name,
+      ]
+    }
+
+    if (defined(invoker.shared_resources_allowlist)) {
+      _inputs += [ invoker.shared_resources_allowlist ]
+      _args += [
+        "--shared-resources-allowlist",
+        rebase_path(invoker.shared_resources_allowlist, root_build_dir),
+      ]
+    }
+    if (defined(invoker.shared_resources_allowlist_locales)) {
+      _args += [ "--shared-resources-allowlist-locales=" +
+                 "${invoker.shared_resources_allowlist_locales}" ]
+    }
+
+    if (!defined(testonly) || !testonly ||
+        (defined(invoker.enforce_resource_overlays_in_tests) &&
+         invoker.enforce_resource_overlays_in_tests)) {
+      _args += [ "--dependencies-res-zip-overlays=@FileArg($_rebased_build_config:deps_info:dependency_zip_overlays)" ]
+    } else {
+      _args += [ "--dependencies-res-zip-overlays=@FileArg($_rebased_build_config:deps_info:dependency_zips)" ]
+    }
+
+    if (defined(invoker.proguard_file)) {
+      _outputs += [ invoker.proguard_file ]
+      _args += [
+        "--proguard-file",
+        rebase_path(invoker.proguard_file, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.proguard_file_main_dex)) {
+      _outputs += [ invoker.proguard_file_main_dex ]
+      _args += [
+        "--proguard-file-main-dex",
+        rebase_path(invoker.proguard_file_main_dex, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.aapt_locale_allowlist)) {
+      _args += [ "--locale-allowlist=${invoker.aapt_locale_allowlist}" ]
+    }
+    if (defined(invoker.png_to_webp) && invoker.png_to_webp) {
+      _webp_target = "//third_party/libwebp:cwebp($host_toolchain)"
+      _webp_binary = get_label_info(_webp_target, "root_out_dir") + "/cwebp"
+      _deps += [ _webp_target ]
+      _inputs += [ _webp_binary ]
+      _args += [
+        "--png-to-webp",
+        "--webp-binary",
+        rebase_path(_webp_binary, root_build_dir),
+      ]
+    }
+    if (defined(invoker.resource_exclusion_regex)) {
+      _args +=
+          [ "--resource-exclusion-regex=${invoker.resource_exclusion_regex}" ]
+      if (defined(invoker.resource_exclusion_exceptions)) {
+        _args += [ "--resource-exclusion-exceptions=${invoker.resource_exclusion_exceptions}" ]
+      }
+    }
+    if (defined(invoker.resource_values_filter_rules)) {
+      _args +=
+          [ "--values-filter-rules=${invoker.resource_values_filter_rules}" ]
+    }
+
+    if (defined(invoker.support_zh_hk) && invoker.support_zh_hk) {
+      _args += [ "--support-zh-hk" ]
+    }
+
+    if (defined(invoker.include_resource)) {
+      _rebased_include_resources =
+          rebase_path(invoker.include_resource, root_build_dir)
+      _args += [ "--include-resources=$_rebased_include_resources" ]
+    }
+
+    if (defined(invoker._args)) {
+      _args += invoker._args
+    }
+
+    if (defined(invoker.emit_ids_out_path)) {
+      _outputs += [ invoker.emit_ids_out_path ]
+      _rebased_emit_ids_path =
+          rebase_path(invoker.emit_ids_out_path, root_out_dir)
+      _args += [ "--emit-ids-out=$_rebased_emit_ids_path" ]
+    }
+
+    if (defined(invoker.resource_ids_provider_dep)) {
+      _compile_res_dep =
+          "${invoker.resource_ids_provider_dep}__compile_resources"
+      _gen_dir = get_label_info(_compile_res_dep, "target_gen_dir")
+      _name = get_label_info(_compile_res_dep, "name")
+      _resource_ids_path = "$_gen_dir/$_name.resource_ids"
+      _inputs += [ _resource_ids_path ]
+      _rebased_ids_path = rebase_path(_resource_ids_path, root_out_dir)
+      _args += [ "--use-resource-ids-path=$_rebased_ids_path" ]
+      _deps += [ _compile_res_dep ]
+    }
+
+    if (defined(invoker.max_sdk_version)) {
+      _max_sdk_version = invoker.max_sdk_version
+      _args += [ "--max-sdk-version=$_max_sdk_version" ]
+    }
+
+    if (defined(invoker.manifest_package)) {
+      _args += [ "--manifest-package=${invoker.manifest_package}" ]
+    }
+
+    if (defined(invoker.is_bundle_module) && invoker.is_bundle_module) {
+      _args += [ "--is-bundle-module" ]
+    }
+
+    if (defined(invoker.uses_split)) {
+      assert(invoker.is_bundle_module)
+      _args += [ "--uses-split=${invoker.uses_split}" ]
+    }
+
+    if (defined(invoker.expected_android_manifest)) {
+      _expectations_target =
+          "${invoker.top_target_name}_validate_android_manifest"
+      action_with_pydeps(_expectations_target) {
+        _actual_file = "${invoker.android_manifest}.normalized"
+        _failure_file =
+            "$expectations_failure_dir/" +
+            string_replace(invoker.expected_android_manifest, "/", "_")
+        inputs = [
+          invoker.android_manifest,
+          invoker.build_config,
+          invoker.expected_android_manifest,
+        ]
+        outputs = [
+          _actual_file,
+          _failure_file,
+        ]
+        deps = [
+          invoker.android_manifest_dep,
+          invoker.build_config_dep,
+        ]
+        script = _script
+        args = _args + [
+                 "--expected-file",
+                 rebase_path(invoker.expected_android_manifest, root_build_dir),
+                 "--actual-file",
+                 rebase_path(_actual_file, root_build_dir),
+                 "--failure-file",
+                 rebase_path(_failure_file, root_build_dir),
+                 "--only-verify-expectations",
+               ]
+        if (defined(invoker.expected_android_manifest_base)) {
+          args += [
+            "--expected-file-base",
+            rebase_path(invoker.expected_android_manifest_base, root_build_dir),
+          ]
+          inputs += [ invoker.expected_android_manifest_base ]
+        }
+        if (fail_on_android_expectations) {
+          args += [ "--fail-on-expectations" ]
+        }
+        if (defined(invoker.extra_verification_manifest)) {
+          inputs += [ invoker.extra_verification_manifest ]
+          args += [
+            "--extra-verification-manifest",
+            rebase_path(invoker.extra_verification_manifest, root_build_dir),
+          ]
+          if (defined(invoker.extra_verification_manifest_dep)) {
+            deps += [ invoker.extra_verification_manifest_dep ]
+          }
+        }
+      }
+      _deps += [ ":$_expectations_target" ]
+    }
+
+    action_with_pydeps(target_name) {
+      script = _script
+      depfile = "$target_gen_dir/${target_name}.d"
+      inputs = _inputs
+      outputs = _outputs
+      deps = _deps
+      args = _args + [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+             ]
+    }
+  }
+
+  template("unused_resources") {
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    _shrinker_dep = "//build/android/gyp/resources_shrinker:resources_shrinker"
+    _shrinker_script = "$root_build_dir/bin/helper/resources_shrinker"
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      script = "//build/android/gyp/resources_shrinker/shrinker.py"
+      inputs = [
+        invoker.build_config,
+        invoker.proguard_mapping_path,
+        _shrinker_script,
+      ]
+      outputs = [ invoker.output_config ]
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ _shrinker_dep ]
+      args = [
+        "--script",
+        rebase_path(_shrinker_script, root_build_dir),
+        "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)",
+        "--proguard-mapping",
+        rebase_path(invoker.proguard_mapping_path, root_build_dir),
+        "--r-text=@FileArg($_rebased_build_config:deps_info:r_text_path)",
+        "--dex=@FileArg($_rebased_build_config:final_dex:path)",
+        "--android-manifest=@FileArg($_rebased_build_config:deps_info:android_manifest)",
+        "--output-config",
+        rebase_path(invoker.output_config, root_build_dir),
+      ]
+    }
+  }
+
+  # Create an .jar.info file by merging several .jar.info files into one.
+  #
+  # Variables:
+  #   build_config: Path to APK's build config file. Used to extract the
+  #       list of input .jar files from its dependencies.
+  #   name: Name of the apk or app bundle (e.g. "Foo.apk").
+  #   res_size_info_path: Path to input .ap_.info file (for apks).
+  #
+  template("create_size_info_files") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      script = "//build/android/gyp/create_size_info_files.py"
+      _jar_info_path = "$root_build_dir/size-info/${invoker.name}.jar.info"
+      _pak_info_path = "$root_build_dir/size-info/${invoker.name}.pak.info"
+      _res_info_path = "$root_build_dir/size-info/${invoker.name}.res.info"
+      outputs = [
+        _jar_info_path,
+        _pak_info_path,
+        _res_info_path,
+      ]
+      depfile = "$target_gen_dir/$target_name.d"
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--jar-info-path",
+        rebase_path(_jar_info_path, root_build_dir),
+        "--pak-info-path",
+        rebase_path(_pak_info_path, root_build_dir),
+        "--res-info-path",
+        rebase_path(_res_info_path, root_build_dir),
+      ]
+      _is_bundle = defined(invoker.module_build_configs)
+      if (_is_bundle) {
+        inputs = invoker.module_build_configs
+        foreach(_build_config, invoker.module_build_configs) {
+          _rebased_build_config = rebase_path(_build_config, root_build_dir)
+          args += [
+            "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)",
+            "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+            "--in-res-info-path=@FileArg($_rebased_build_config:deps_info:res_size_info)",
+            "--assets=@FileArg($_rebased_build_config:assets)",
+            "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+          ]
+        }
+      } else {
+        inputs = [
+          invoker.build_config,
+          invoker.res_size_info_path,
+        ]
+        _rebased_build_config =
+            rebase_path(invoker.build_config, root_build_dir)
+        args += [
+          "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)",
+          "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+          "--in-res-info-path",
+          rebase_path(invoker.res_size_info_path, root_build_dir),
+          "--assets=@FileArg($_rebased_build_config:assets)",
+          "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+        ]
+      }
+    }
+  }
+
+  # Creates a signed and aligned .apk.
+  #
+  # Variables
+  #   apk_name: (optional) APK name (without .apk suffix). If provided, will
+  #       be used to generate .info files later used by the supersize tool.
+  #   assets_build_config: Path to android_apk .build_config containing merged
+  #       asset information.
+  #   deps: Specifies the dependencies of this target.
+  #   dex_path: Path to classes.dex file to include (optional).
+  #   expected_libs_and_assets: Verify the list of included native libraries
+  #     and assets is consistent with the given expectation file.
+  #   expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff
+  #     with this file as the base.
+  #   jdk_libs_dex: Path to classes.dex for desugar_jdk_libs.
+  #   packaged_resources_path: Path to .ap_ to use.
+  #   output_apk_path: Output path for the generated .apk.
+  #   min_sdk_version: The minimum Android SDK version this target supports.
+  #   native_lib_placeholders: List of placeholder filenames to add to the apk
+  #     (optional).
+  #   secondary_native_lib_placeholders: List of placeholder filenames to add to
+  #     the apk for the secondary ABI (optional).
+  #   loadable_modules: List of native libraries.
+  #   native_libs_filearg: @FileArg() of additionally native libraries.
+  #   secondary_abi_loadable_modules: (optional) List of native libraries for
+  #     secondary ABI.
+  #   secondary_abi_native_libs_filearg: (optional). @FileArg() of additional
+  #     secondary ABI native libs.
+  #   keystore_path: Path to keystore to use for signing.
+  #   keystore_name: Key alias to use.
+  #   keystore_password: Keystore password.
+  #   uncompress_shared_libraries: (optional, default false) Whether to store
+  #     native libraries inside the APK uncompressed and page-aligned.
+  template("package_apk") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "public_deps" ])
+    _deps = invoker.deps
+    _native_lib_placeholders = []
+    if (defined(invoker.native_lib_placeholders)) {
+      _native_lib_placeholders = invoker.native_lib_placeholders
+    }
+    _secondary_native_lib_placeholders = []
+    if (defined(invoker.secondary_native_lib_placeholders)) {
+      _secondary_native_lib_placeholders =
+          invoker.secondary_native_lib_placeholders
+    }
+
+    _script = "//build/android/gyp/apkbuilder.py"
+    _apksigner = "$android_sdk_build_tools/lib/apksigner.jar"
+    _zipalign = "$android_sdk_build_tools/zipalign"
+
+    _inputs = [
+      invoker.build_config,
+      invoker.keystore_path,
+      invoker.packaged_resources_path,
+      _apksigner,
+      _zipalign,
+    ]
+
+    _outputs = [ invoker.output_apk_path ]
+    _data = [ invoker.output_apk_path ]
+
+    _rebased_compiled_resources_path =
+        rebase_path(invoker.packaged_resources_path, root_build_dir)
+    _rebased_packaged_apk_path =
+        rebase_path(invoker.output_apk_path, root_build_dir)
+    _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+    _args = [
+      "--resource-apk=$_rebased_compiled_resources_path",
+      "--output-apk=$_rebased_packaged_apk_path",
+      "--assets=@FileArg($_rebased_build_config:assets)",
+      "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+      "--apksigner-jar",
+      rebase_path(_apksigner, root_build_dir),
+      "--zipalign-path",
+      rebase_path(_zipalign, root_build_dir),
+      "--key-path",
+      rebase_path(invoker.keystore_path, root_build_dir),
+      "--key-name",
+      invoker.keystore_name,
+      "--key-passwd",
+      invoker.keystore_password,
+      "--min-sdk-version=${invoker.min_sdk_version}",
+
+      # TODO(mlopatkin) We are relying on the fact that build_config is an APK
+      # build_config.
+      "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)",
+    ]
+    if (is_official_build) {
+      _args += [ "--best-compression" ]
+    }
+    if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) {
+      _args += [ "--uncompress-dex" ]
+    }
+    if (defined(invoker.uncompress_shared_libraries) &&
+        invoker.uncompress_shared_libraries) {
+      _args += [ "--uncompress-shared-libraries=True" ]
+    }
+    if (defined(invoker.library_always_compress)) {
+      _args +=
+          [ "--library-always-compress=${invoker.library_always_compress}" ]
+    }
+    if (defined(invoker.library_renames)) {
+      _args += [ "--library-renames=${invoker.library_renames}" ]
+    }
+    if (defined(invoker.dex_path)) {
+      _inputs += [ invoker.dex_path ]
+      _args += [
+        "--dex-file",
+        rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+    if (defined(invoker.jdk_libs_dex)) {
+      _inputs += [ invoker.jdk_libs_dex ]
+      _args += [
+        "--jdk-libs-dex-file",
+        rebase_path(invoker.jdk_libs_dex, root_build_dir),
+      ]
+    }
+    if ((defined(invoker.loadable_modules) && invoker.loadable_modules != []) ||
+        defined(invoker.native_libs_filearg) ||
+        _native_lib_placeholders != []) {
+      _args += [ "--android-abi=$android_app_abi" ]
+    }
+    if (defined(android_app_secondary_abi)) {
+      _args += [ "--secondary-android-abi=$android_app_secondary_abi" ]
+    }
+    if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) {
+      _inputs += invoker.loadable_modules
+      _rebased_loadable_modules =
+          rebase_path(invoker.loadable_modules, root_build_dir)
+      _args += [ "--native-libs=$_rebased_loadable_modules" ]
+    }
+    if (defined(invoker.native_libs_filearg)) {
+      _args += [ "--native-libs=${invoker.native_libs_filearg}" ]
+    }
+    if (_native_lib_placeholders != []) {
+      _args += [ "--native-lib-placeholders=$_native_lib_placeholders" ]
+    }
+
+    if (defined(invoker.secondary_abi_native_libs_filearg)) {
+      _args += [
+        "--secondary-native-libs=${invoker.secondary_abi_native_libs_filearg}",
+      ]
+    }
+    if (defined(invoker.secondary_abi_loadable_modules)) {
+      _rebased_secondary_abi_loadable_modules =
+          rebase_path(invoker.secondary_abi_loadable_modules, root_build_dir)
+      _args +=
+          [ "--secondary-native-libs=$_rebased_secondary_abi_loadable_modules" ]
+    }
+    if (_secondary_native_lib_placeholders != []) {
+      _args += [ "--secondary-native-lib-placeholders=$_secondary_native_lib_placeholders" ]
+    }
+    if (treat_warnings_as_errors) {
+      _args += [ "--warnings-as-errors" ]
+    }
+
+    if (defined(invoker.expected_libs_and_assets)) {
+      _expectations_target =
+          "${invoker.top_target_name}_validate_libs_and_assets"
+      action_with_pydeps(_expectations_target) {
+        _actual_file = "$target_gen_dir/$target_name.libs_and_assets"
+        _failure_file =
+            "$expectations_failure_dir/" +
+            string_replace(invoker.expected_libs_and_assets, "/", "_")
+        inputs = [
+          invoker.build_config,
+          invoker.expected_libs_and_assets,
+        ]
+        deps = [ invoker.build_config_dep ]
+        outputs = [
+          _actual_file,
+          _failure_file,
+        ]
+        script = _script
+        args = _args + [
+                 "--expected-file",
+                 rebase_path(invoker.expected_libs_and_assets, root_build_dir),
+                 "--actual-file",
+                 rebase_path(_actual_file, root_build_dir),
+                 "--failure-file",
+                 rebase_path(_failure_file, root_build_dir),
+                 "--only-verify-expectations",
+               ]
+        if (defined(invoker.expected_libs_and_assets_base)) {
+          inputs += [ invoker.expected_libs_and_assets_base ]
+          args += [
+            "--expected-file-base",
+            rebase_path(invoker.expected_libs_and_assets_base, root_build_dir),
+          ]
+        }
+        if (fail_on_android_expectations) {
+          args += [ "--fail-on-expectations" ]
+        }
+      }
+      _deps += [ ":$_expectations_target" ]
+    }
+    action_with_pydeps(target_name) {
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = _inputs
+      deps = _deps
+      data = _data
+      outputs = _outputs
+      script = _script
+      args = _args + [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+             ]
+    }
+  }
+
+  # Compile Java source files into a .jar file, potentially using an
+  # annotation processor, and/or the errorprone compiler.
+  #
+  # Note that the only way to specify custom annotation processors is
+  # by using build_config to point to a file that corresponds to a java-related
+  # target that includes javac:processor_classes entries (i.e. there is no
+  # variable here that can be used for this purpose).
+  #
+  # Note also the peculiar use of java_files / java_sources_file. The content
+  # of the java_files list and the java_sources_file file must match exactly.
+  # This rule uses java_files only to list the inputs to the action that
+  # calls compile_java.py, but will pass the list of Java source files
+  # with the '@${java_sources_file}" command-line syntax. Not a problem in
+  # practice since this is only called from java_library_impl() that sets up
+  # the variables properly.
+  #
+  # Variables:
+  #  main_target_name: Used when extracting srcjars for codesearch.
+  #  java_files: Optional list of Java source file paths.
+  #  srcjar_deps: Optional list of .srcjar dependencies (not file paths).
+  #    The corresponding source files they contain will be compiled too.
+  #  java_sources_file: Optional path to file containing list of Java source
+  #    file paths. This must always be provided if java_files is not empty
+  #    and must match it exactly.
+  #  build_config: Path to the .build_config file of the corresponding
+  #    java_library_impl() target. The following entries will be used by this
+  #    template: javac:srcjars, deps_info:javac_full_classpath,
+  #    deps_info:javac_full_interface_classpath, javac:processor_classpath,
+  #    javac:processor_classes
+  #  javac_jar_path: Path to the final output .jar file.
+  #  javac_args: Optional list of extra arguments to pass to javac.
+  #  chromium_code: Whether this corresponds to Chromium-specific sources.
+  #  requires_android: True if these sources can only run on Android.
+  #  additional_jar_files: Optional list of files to copy into the resulting
+  #    .jar file (by default, only .class files are put there). Each entry
+  #    has the 'srcPath:dstPath' format.
+  #  enable_errorprone: If True, use the errorprone compiler to check for
+  #    error-prone constructs in the language. If not provided, whether this is
+  #    enabled depends on chromium_code and the global
+  #    use_errorprone_java_compiler variable.
+  #  use_turbine: If True, compile headers using turbine.py.
+  #  apk_name: Optional APK name. If provided, will tell compile_java.py to also
+  #    generate an .apk.jar.info file under size-info/${apk_name}.apk.jar.info
+  #  processor_args_javac: List of annotation processor arguments, each one
+  #    will be passed to javac as -A<entry>.
+  #  deps: Dependencies for the corresponding target.
+  #  testonly: Usual meaning (should be True for test-only targets)
+  #
+  # [1] https://docs.oracle.com/javase/7/docs/api/java/util/ServiceLoader.html
+  #
+  template("compile_java") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _build_config = invoker.build_config
+    _chromium_code = invoker.chromium_code
+
+    _processor_args = []
+    if (defined(invoker.processor_args_javac)) {
+      _processor_args = invoker.processor_args_javac
+    }
+
+    _additional_jar_files = []
+    if (defined(invoker.additional_jar_files)) {
+      _additional_jar_files = invoker.additional_jar_files
+    }
+
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps += invoker.srcjar_deps
+    }
+
+    _java_srcjars = []
+    foreach(dep, _srcjar_deps) {
+      _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+      _dep_name = get_label_info(dep, "name")
+      _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+    }
+
+    # generated_jar_path is an output when use_turbine and an input otherwise.
+    if (!invoker.use_turbine && defined(invoker.generated_jar_path)) {
+      _annotation_processing = false
+      _java_srcjars += [ invoker.generated_jar_path ]
+    } else {
+      _annotation_processing = true
+    }
+
+    _javac_args = []
+    if (defined(invoker.javac_args)) {
+      _javac_args = invoker.javac_args
+    }
+
+    action_with_pydeps(target_name) {
+      if (invoker.use_turbine) {
+        script = "//build/android/gyp/turbine.py"
+      } else {
+        script = "//build/android/gyp/compile_java.py"
+      }
+
+      if (target_name == "chrome_java__header") {
+        # Regression test for: https://crbug.com/1154302
+        assert_no_deps = [ "//base:base_java__impl" ]
+      }
+
+      depfile = "$target_gen_dir/$target_name.d"
+      deps = _srcjar_deps
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      outputs = [ invoker.output_jar_path ]
+      if (!invoker.enable_errorprone && !invoker.use_turbine) {
+        outputs += [ invoker.output_jar_path + ".info" ]
+      }
+      inputs = invoker.java_files + _java_srcjars + [ _build_config ]
+      if (invoker.java_files != []) {
+        inputs += [ invoker.java_sources_file ]
+      }
+
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      _rebased_output_jar_path =
+          rebase_path(invoker.output_jar_path, root_build_dir)
+      _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+      _rebased_depfile = rebase_path(depfile, root_build_dir)
+      _rebased_generated_dir = rebase_path(
+              "$target_gen_dir/${invoker.main_target_name}/generated_java",
+              root_build_dir)
+      args = [
+        "--depfile=$_rebased_depfile",
+        "--generated-dir=$_rebased_generated_dir",
+        "--jar-path=$_rebased_output_jar_path",
+        "--java-srcjars=$_rebased_java_srcjars",
+      ]
+
+      if (defined(invoker.header_jar_path)) {
+        inputs += [ invoker.header_jar_path ]
+        args += [
+          "--header-jar",
+          rebase_path(invoker.header_jar_path, root_build_dir),
+        ]
+        _header_jar_classpath =
+            [ rebase_path(invoker.header_jar_path, root_build_dir) ]
+        args += [ "--classpath=$_header_jar_classpath" ]
+      }
+
+      if (invoker.use_turbine) {
+        # Prefer direct deps for turbine as much as possible.
+        args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
+      } else {
+        args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ]
+      }
+
+      if (_annotation_processing) {
+        args += [
+          "--processorpath=@FileArg($_rebased_build_config:javac:processor_classpath)",
+          "--processors=@FileArg($_rebased_build_config:javac:processor_classes)",
+        ]
+      }
+
+      if (invoker.use_turbine) {
+        _turbine_jar_path = "//third_party/turbine/turbine.jar"
+        inputs += [ _turbine_jar_path ]
+        outputs += [ invoker.generated_jar_path ]
+        args += [
+          "--turbine-jar-path",
+          rebase_path(_turbine_jar_path, root_build_dir),
+          "--generated-jar-path",
+          rebase_path(invoker.generated_jar_path, root_build_dir),
+        ]
+      }
+
+      # Currently turbine does not support JDK11.
+      if (invoker.supports_android || invoker.use_turbine) {
+        args += [ "--java-version=1.8" ]
+      }
+      if (use_java_goma) {
+        args += [ "--gomacc-path=$goma_dir/gomacc" ]
+
+        # Override the default action_pool when goma is enabled.
+        pool = "//build/config/android:goma_javac_pool"
+      }
+
+      # Flag enable_kythe_annotations requires
+      # checkout_android_prebuilts_build_tools=True in .gclient.
+      if (enable_kythe_annotations && !invoker.enable_errorprone) {
+        args += [ "--enable-kythe-annotations" ]
+      }
+      if (invoker.requires_android) {
+        args += [ "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ]
+      }
+      if (_chromium_code) {
+        args += [ "--chromium-code=1" ]
+        if (treat_warnings_as_errors) {
+          args += [ "--warnings-as-errors" ]
+        }
+      }
+      if (defined(invoker.jar_excluded_patterns)) {
+        args += [ "--jar-info-exclude-globs=${invoker.jar_excluded_patterns}" ]
+      }
+
+      if (invoker.enable_errorprone) {
+        # Our custom plugin pulls in the main errorprone dep transitively.
+        _errorprone_dep = "//tools/android/errorprone_plugin:errorprone_plugin"
+        deps += [ _errorprone_dep ]
+        _dep_gen_dir = get_label_info(_errorprone_dep, "target_gen_dir")
+        _dep_name = get_label_info(_errorprone_dep, "name")
+        _rebased_errorprone_buildconfig =
+            rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir)
+        args += [
+          "--target-name",
+          get_label_info(target_name, "label_no_toolchain"),
+          "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:host_classpath)",
+          "--enable-errorprone",
+        ]
+      }
+      if (defined(invoker.skip_build_server) && invoker.skip_build_server) {
+        # Nocompile tests need lint to fail through ninja.
+        args += [ "--skip-build-server" ]
+      }
+
+      foreach(e, _processor_args) {
+        args += [ "--processor-arg=" + e ]
+      }
+
+      foreach(file_tuple, _additional_jar_files) {
+        # Each element is of length two, [ path_to_file, path_to_put_in_jar ]
+        inputs += [ file_tuple[0] ]
+        args +=
+            [ "--additional-jar-file=" +
+              rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ]
+      }
+      if (invoker.java_files != []) {
+        args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
+      }
+      foreach(e, _javac_args) {
+        args += [ "--javac-arg=" + e ]
+      }
+    }
+  }
+
+  template("java_lib_group") {
+    forward_variables_from(invoker, [ "testonly" ])
+    _group_name = invoker.group_name
+    not_needed([ "_group_name" ])
+    group(target_name) {
+      if (defined(invoker.deps)) {
+        deps = []
+        foreach(_dep, invoker.deps) {
+          _target_label = get_label_info(_dep, "label_no_toolchain")
+          if (filter_exclude([ _target_label ], _java_library_patterns) == [] &&
+              filter_exclude([ _target_label ], _java_resource_patterns) !=
+              []) {
+            # This is a java library dep, so replace it.
+            deps += [ "${_target_label}__${_group_name}" ]
+          } else {
+            # Transitive java group targets should also include direct deps.
+            deps += [ _dep ]
+          }
+        }
+      }
+    }
+  }
+
+  # Create an interface jar from a normal jar.
+  #
+  # Variables
+  #   input_jar: Path to input .jar.
+  #   output_jar: Path to output .ijar.
+  #
+  template("generate_interface_jar") {
+    action_with_pydeps(target_name) {
+      _ijar_target = "//third_party/ijar:ijar($host_toolchain)"
+      _ijar_executable = get_label_info(_ijar_target, "root_out_dir") + "/ijar"
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "data",
+                                   "data_deps",
+                                   "public_deps",
+                                 ])
+      script = "//build/android/gyp/ijar.py"
+      deps = [ _ijar_target ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      inputs = [
+        invoker.input_jar,
+        _ijar_executable,
+      ]
+      if (defined(invoker.inputs)) {
+        inputs += invoker.inputs
+      }
+      outputs = [ invoker.output_jar ]
+      args = [
+        rebase_path(_ijar_executable, root_build_dir),
+        rebase_path(invoker.input_jar, root_build_dir),
+        rebase_path(invoker.output_jar, root_build_dir),
+      ]
+    }
+  }
+
+  # A rule that will handle multiple Java-related targets.
+  #
+  # The caller can provide a list of source files with 'java_files'
+  # and 'srcjar_deps', or a prebuilt .jar file through 'jar_path'.
+  #
+  # In the case of a 'java_binary' target type, it can even provide none of
+  # that (and the rule will just generate its wrapper script).
+  #
+  # The template will process the input .jar file (either the prebuilt one,
+  # or the result of compiling the sources), for example to apply Proguard,
+  # but also other ranges of bytecode-level rewriting schemes.
+  #
+  # Variables:
+  #  type: type of Java target, valid values: 'java_library', 'java_binary',
+  #    'junit_binary', 'java_annotation_processor', and 'android_apk'
+  #  main_target_name: optional. If provided, overrides target_name when
+  #    creating sub-targets (e.g. "${main_target_name}__dex") and
+  #    some output files (e.g. "${main_target_name}.sources"). Only used
+  #    for 'android_apk' types at the moment, where main_target_name will
+  #    be the name of the main APK target.
+  #  supports_android: Optional. True if target can run on Android.
+  #  requires_android: Optional. True if target can only run on Android.
+  #  java_files: Optional list of Java source file paths for this target.
+  #  javac_args: Optional list of extra arguments to pass to javac.
+  #  errorprone_args: Optional list of extra arguments to pass to.
+  #  srcjar_deps: Optional list of .srcjar targets (not file paths). The Java
+  #    source files they contain will also be compiled for this target.
+  #  java_sources_file: Optional path to a file which will be written with
+  #    the content of java_files. If not provided, the file will be written
+  #    under $target_gen_dir/$main_target_name.sources. Ignored if
+  #    java_files is empty. If not
+  #  jar_path: Optional path to a prebuilt .jar file for this target.
+  #    Mutually exclusive with java_files and srcjar_deps.
+  #  output_name: Optional output name for the final jar path. Used to
+  #    determine the name of the final jar. Default is to use the same
+  #    name as jar_path, if provided, or main_target_name.
+  #  main_class: Main Java class name for 'java_binary', 'junit_binary' and
+  #    'java_annotation_processor' target types. Should not be set for other
+  #    ones.
+  #  deps: Dependencies for this target.
+  #  public_deps: Dependencies that this target exposes as part of its public API.
+  #    public_deps do not need to be listed in both the 'deps' and 'public_deps' lists.
+  #  testonly: True iff target should only be used for tests.
+  #  chromium_code: Optional. Whether this is Chromium-specific code. If not
+  #    provided, this is determined automatically, based on the location of
+  #    the source files (i.e. anything under third_party/ is not
+  #    Chromium-specific unless it is in a 'chromium' sub-directory).
+  #  jacoco_never_instrument: Optional. If provided, whether to forbid
+  #    instrumentation with the Jacoco coverage processor. If not provided,
+  #    this is controlled by the global use_jacoco_coverage build arg variable
+  #    and only used for non-test Chromium code.
+  #  include_android_sdk: Optional. Whether or not the android SDK dep
+  #    should be added to deps. Defaults to true for non-system libraries
+  #    that support android.
+  #  alternative_android_sdk_dep: Optional. Alternative Android system
+  #    android java target to use.
+  #  annotation_processor_deps: Optional list of dependencies corresponding
+  #    to annotation processors used to compile these sources.
+  #  input_jars_paths: Optional list of additional .jar file paths, which will
+  #    be added to the compile-time classpath when building this target (but
+  #    not to the runtime classpath).
+  #  desugar_jars_paths: Optional list of additional .jar file paths, which will
+  #    be added to the desugar classpath when building this target (but not to
+  #    any other classpath). This is only used to break dependency cycles.
+  #  gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this
+  #    library via its built .jar rather than including its .java sources.
+  #  proguard_enabled: Optional. True to enable ProGuard obfuscation.
+  #  proguard_configs: Optional list of additional proguard config file paths.
+  #  bypass_platform_checks: Optional. If True, platform checks will not
+  #    be performed. They are used to verify that every target with
+  #    requires_android only depends on targets that, at least supports_android.
+  #    Similarly, if a target has !supports_android, then it cannot depend on
+  #    any other target that has requires_android.
+  #  include_java_resources: Optional. If True, include Java (not Android)
+  #    resources into final .jar file.
+  #  jar_excluded_patterns: Optional list of .class file patterns to exclude
+  #    from the final .jar file.
+  #  jar_included_patterns: Optional list of .class file patterns to include
+  #    in the final .jar file. jar_excluded_patterns take precedence over this.
+  #  low_classpath_priority: Indicates that the library should be placed at the
+  #    end of the classpath. The default classpath order has libraries ordered
+  #    before the libraries that they depend on. 'low_classpath_priority' is
+  #    useful when one java_library() overrides another via
+  #    'jar_excluded_patterns' and the overriding library does not depend on the
+  #    overridee.
+  #
+  # For 'android_apk' and 'android_app_bundle_module' targets only:
+  #
+  #  apk_path: Path to the final APK file.
+  #  android_manifest: Path to AndroidManifest.xml file for the APK.
+  #  android_manifest_dep: Optional. Dependency target that generates
+  #    android_manifest.
+  #  apk_under_test: For 'android_apk' targets used to test other APKs,
+  #    this is the target name of APK being tested.
+  #  incremental_apk_path: Path to the incremental APK.
+  #  incremental_install_json_path: Path to the incremental install json.
+  #  native_lib_placeholders: Optional. List of placeholder filenames to add to
+  #    the APK.
+  #  proguard_mapping_path: Path to .mapping file produced from ProGuard step.
+  #  shared_libraries_runtime_deps_file: Optional. Path to a file listing the
+  #    native shared libraries required at runtime by the APK.
+  #  secondary_abi_shared_libraries_runtime_deps_file:
+  #  secondary_native_lib_placeholders: Optional. List of placeholder filenames
+  #    to add to the APK for the secondary ABI.
+  #  loadable_modules: Optional list of extra native libraries to
+  #    be stored in the APK.
+  #  secondary_abi_loadable_modules: Optional list of native libraries for
+  #    secondary ABI.
+  #  uncompress_shared_libraries: Optional. True to store native shared
+  #    libraries uncompressed and page-aligned.
+  #  proto_resources_path: The path of an zip archive containing the APK's
+  #    resources compiled to the protocol buffer format (instead of regular
+  #    binary xml + resources.arsc).
+  #  r_text_path: The path of the R.txt file generated when compiling the
+  #    resources for this target.
+  #  module_pathmap_path: The path of the pathmap file generated when compiling
+  #    the resources for the bundle module, if path shortening is enabled.
+  #  base_allowlist_rtxt_path: The path of the R.txt file containing the
+  #    list of string resources to keep in the base split APK for any bundle
+  #    that uses this target.
+  #
+  # For 'java_binary' and 'junit_binary' targets only. Ignored by others:
+  #
+  #  wrapper_script_name: Optional name for the generated wrapper script.
+  #    Default is main target name.
+  #  wrapper_script_args: Optional list of extra arguments used by the
+  #    generated wrapper script.
+  #
+  template("java_library_impl") {
+    # TODO(crbug.com/1042017): Remove.
+    not_needed(invoker, [ "no_build_hooks" ])
+
+    forward_variables_from(invoker, [ "testonly" ])
+    _is_prebuilt = defined(invoker.jar_path)
+    _is_annotation_processor = invoker.type == "java_annotation_processor"
+    _is_java_binary =
+        invoker.type == "java_binary" || invoker.type == "junit_binary"
+    _supports_android =
+        defined(invoker.supports_android) && invoker.supports_android
+    _requires_android =
+        defined(invoker.requires_android) && invoker.requires_android
+
+    _invoker_deps = []
+    if (defined(invoker.deps)) {
+      _invoker_deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      foreach(_public_dep, invoker.public_deps) {
+        if (filter_include([ _public_dep ], _invoker_deps) != []) {
+          assert(false, "'public_deps' and 'deps' overlap: $_public_dep")
+        }
+      }
+      _invoker_deps += invoker.public_deps
+    }
+
+    _main_target_name = target_name
+    if (defined(invoker.main_target_name)) {
+      _main_target_name = invoker.main_target_name
+    }
+
+    if (defined(invoker.resources_package)) {
+      _resources_package = invoker.resources_package
+    }
+
+    _java_files = []
+    if (defined(invoker.sources)) {
+      _java_files = invoker.sources
+    }
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps = invoker.srcjar_deps
+    }
+    _has_sources = _java_files != [] || _srcjar_deps != []
+
+    if (_is_prebuilt) {
+      assert(!_has_sources)
+    } else {
+      # Allow java_binary to not specify any sources. This is needed when a prebuilt
+      # is needed as a library as well as a binary.
+      assert(_is_annotation_processor || _is_java_binary || _has_sources)
+    }
+
+    if (_is_java_binary) {
+      assert(defined(invoker.main_class),
+             "${invoker.type}() must set main_class")
+    } else if (_is_annotation_processor) {
+      assert(defined(invoker.main_class),
+             "java_annotation_processor() must set main_class")
+    } else {
+      assert(!defined(invoker.main_class),
+             "main_class cannot be used for target of type ${invoker.type}")
+    }
+
+    if (defined(invoker.chromium_code)) {
+      _chromium_code = invoker.chromium_code
+    } else {
+      # Default based on whether target is in third_party.
+      _chromium_code =
+          filter_exclude([ get_label_info(":$_main_target_name", "dir") ],
+                         [ "*\bthird_party\b*" ]) != []
+      if (!_chromium_code && !_is_prebuilt && _java_files != []) {
+        # Unless third_party code has an org.chromium file in it.
+        _chromium_code =
+            filter_exclude(_java_files, [ "*\bchromium\b*" ]) != _java_files
+      }
+    }
+
+    # Define build_config_deps which will be a list of targets required to
+    # build the _build_config.
+    _build_config = "$target_gen_dir/$_main_target_name.build_config"
+    _build_config_target_name =
+        "${_main_target_name}$build_config_target_suffix"
+
+    # The only target that might have no prebuilt and no sources is a java_binary.
+    _build_host_jar = false
+    _build_device_jar = false
+    if (_is_prebuilt || _has_sources) {
+      if (defined(invoker.output_name)) {
+        _output_name = invoker.output_name
+      } else if (_is_prebuilt) {
+        _output_name = get_path_info(invoker.jar_path, "name")
+      } else {
+        _output_name = _main_target_name
+      }
+
+      _build_host_jar = _is_java_binary || _is_annotation_processor ||
+                        invoker.type == "java_library"
+      _build_device_jar =
+          invoker.type != "system_java_library" && _supports_android
+      if (_build_host_jar) {
+        # Jar files can be needed at runtime (by Robolectric tests or java binaries),
+        # so do not put them under obj/.
+        # TODO(agrieve): I suspect it would be better to use dist_jar for java_binary
+        #     rather than archiving unnecessary .jar files within lib.java.
+        _target_dir_name = get_label_info(":$_main_target_name", "dir")
+        _host_processed_jar_path =
+            "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar"
+      }
+      if (_build_device_jar) {
+        _device_processed_jar_path =
+            "$target_out_dir/$_output_name.processed.jar"
+        _dex_path = "$target_out_dir/$_main_target_name.dex.jar"
+        _enable_desugar =
+            !defined(invoker.enable_desugar) || invoker.enable_desugar
+      }
+
+      # For static libraries, the javac jar output is created at the intermediate
+      # path so that it can be processed by another target and moved to the final
+      # spot that the .build_config knows about. Technically this should be done
+      # for the ijar as well, but this is only used for APK targets where
+      # the ijar path isn't actually used.
+      if (_has_sources) {
+        _final_ijar_path = "$target_out_dir/$_output_name.turbine.jar"
+      } else {
+        _final_ijar_path = "$target_out_dir/$_output_name.ijar.jar"
+      }
+
+      if (_has_sources) {
+        _javac_jar_path = "$target_out_dir/$_main_target_name.javac.jar"
+        _generated_jar_path =
+            "$target_gen_dir/$_main_target_name.generated.srcjar"
+      }
+
+      if (_is_prebuilt) {
+        _unprocessed_jar_path = invoker.jar_path
+      } else {
+        _unprocessed_jar_path = _javac_jar_path
+      }
+    }
+
+    if (_is_prebuilt || _has_sources) {
+      _java_res_deps = []
+      _java_header_deps = []
+      _java_impl_deps = []
+      _non_java_deps = []
+      foreach(_dep, _invoker_deps) {
+        _target_label = get_label_info(_dep, "label_no_toolchain")
+        if (filter_exclude([ _target_label ], _java_resource_patterns) == []) {
+          _java_res_deps += [ _dep ]
+        } else if (filter_exclude([ _target_label ], _java_library_patterns) ==
+                   []) {
+          # This is a java library dep, so it has header and impl targets.
+          _java_header_deps += [ "${_target_label}__header" ]
+          _java_impl_deps += [ "${_target_label}__impl" ]
+        } else {
+          _non_java_deps += [ _dep ]
+        }
+      }
+
+      # Don't need to depend on the apk-under-test to be packaged.
+      if (defined(invoker.apk_under_test)) {
+        _java_header_deps += [ "${invoker.apk_under_test}__java__header" ]
+        _java_impl_deps += [ "${invoker.apk_under_test}__java__impl" ]
+      }
+
+      # These deps cannot be passed via invoker.deps since bundle_module targets
+      # have bundle_module.build_config without the __java suffix, so they are
+      # special and cannot be passed as regular deps to write_build_config.
+      if (defined(invoker.base_module_target)) {
+        _java_header_deps += [ "${invoker.base_module_target}__java__header" ]
+        _java_impl_deps += [ "${invoker.base_module_target}__java__impl" ]
+      }
+
+      _extra_java_deps = []
+      _jacoco_instrument =
+          use_jacoco_coverage && _chromium_code && _java_files != [] &&
+          _build_device_jar && (!defined(invoker.testonly) || !invoker.testonly)
+      if (defined(invoker.jacoco_never_instrument)) {
+        _jacoco_instrument =
+            !invoker.jacoco_never_instrument && _jacoco_instrument
+      }
+      if (_jacoco_instrument) {
+        _extra_java_deps += [ "//third_party/jacoco:jacocoagent_java" ]
+      }
+
+      _include_android_sdk = _build_device_jar
+      if (defined(invoker.include_android_sdk)) {
+        _include_android_sdk = invoker.include_android_sdk
+      }
+      if (_include_android_sdk) {
+        _sdk_java_dep = "//third_party/android_sdk:android_sdk_java"
+        if (defined(invoker.alternative_android_sdk_dep)) {
+          _sdk_java_dep = invoker.alternative_android_sdk_dep
+        }
+
+        # This is an android_system_java_prebuilt target, so no headers.
+        _extra_java_deps += [ _sdk_java_dep ]
+      }
+
+      # Classpath deps is used for header and dex targets, they do not need
+      # resource deps.
+      _classpath_deps = _java_header_deps + _non_java_deps + _extra_java_deps +
+                        [ ":$_build_config_target_name" ]
+
+      _full_classpath_deps =
+          _java_impl_deps + _java_res_deps + _non_java_deps + _extra_java_deps +
+          [ ":$_build_config_target_name" ]
+    }
+
+    # Often needed, but too hard to figure out when ahead of time.
+    not_needed([
+                 "_classpath_deps",
+                 "_full_classpath_deps",
+               ])
+
+    if (_java_files != []) {
+      _java_sources_file = "$target_gen_dir/$_main_target_name.sources"
+      if (defined(invoker.java_sources_file)) {
+        _java_sources_file = invoker.java_sources_file
+      }
+      write_file(_java_sources_file, rebase_path(_java_files, root_build_dir))
+    }
+
+    write_build_config(_build_config_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "aar_path",
+                               "annotation_processor_deps",
+                               "base_allowlist_rtxt_path",
+                               "gradle_treat_as_prebuilt",
+                               "input_jars_paths",
+                               "low_classpath_priority",
+                               "main_class",
+                               "proguard_configs",
+                               "proguard_enabled",
+                               "proguard_mapping_path",
+                               "public_target_label",
+                               "r_text_path",
+                               "type",
+                             ])
+      if (type == "android_apk" || type == "android_app_bundle_module") {
+        forward_variables_from(
+            invoker,
+            [
+              "android_manifest",
+              "android_manifest_dep",
+              "final_dex_path",
+              "loadable_modules",
+              "native_lib_placeholders",
+              "res_size_info_path",
+              "secondary_abi_loadable_modules",
+              "secondary_abi_shared_libraries_runtime_deps_file",
+              "secondary_native_lib_placeholders",
+              "shared_libraries_runtime_deps_file",
+              "static_library_dependent_targets",
+              "uncompress_shared_libraries",
+              "library_always_compress",
+              "library_renames",
+            ])
+      }
+      if (type == "android_apk") {
+        forward_variables_from(invoker,
+                               [
+                                 "apk_path",
+                                 "apk_under_test",
+                                 "incremental_apk_path",
+                                 "incremental_install_json_path",
+                               ])
+      }
+      if (type == "android_app_bundle_module") {
+        forward_variables_from(invoker,
+                               [
+                                 "base_module_target",
+                                 "is_base_module",
+                                 "module_pathmap_path",
+                                 "proto_resources_path",
+                                 "version_name",
+                                 "version_code",
+                               ])
+      }
+      chromium_code = _chromium_code
+      build_config = _build_config
+      is_prebuilt = _is_prebuilt
+
+      # Specifically avoid passing in invoker.base_module_target as one of the
+      # possible_config_deps.
+      possible_config_deps = _invoker_deps
+      if (defined(_extra_java_deps)) {
+        possible_config_deps += _extra_java_deps
+      }
+      if (defined(apk_under_test)) {
+        possible_config_deps += [ apk_under_test ]
+      }
+
+      if (defined(invoker.public_deps)) {
+        possible_config_public_deps = invoker.public_deps
+      }
+
+      supports_android = _supports_android
+      requires_android = _requires_android
+      bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
+                               invoker.bypass_platform_checks
+
+      if (defined(_resources_package)) {
+        custom_package = _resources_package
+      }
+      if (_is_prebuilt || _has_sources) {
+        ijar_path = _final_ijar_path
+        unprocessed_jar_path = _unprocessed_jar_path
+      }
+      if (_build_host_jar) {
+        host_jar_path = _host_processed_jar_path
+      }
+      if (_build_device_jar) {
+        device_jar_path = _device_processed_jar_path
+        dex_path = _dex_path
+      }
+      if (_java_files != []) {
+        java_sources_file = _java_sources_file
+      }
+
+      bundled_srcjars = []
+      foreach(d, _srcjar_deps) {
+        _dep_gen_dir = get_label_info(d, "target_gen_dir")
+        _dep_name = get_label_info(d, "name")
+        bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+      }
+      if (defined(invoker.include_java_resources) &&
+          invoker.include_java_resources) {
+        java_resources_jar = _unprocessed_jar_path
+        if (defined(invoker.jar_path)) {
+          # Use original jar_path because _jar_path points to a library without
+          # resources.
+        } else {
+          java_resources_jar = _device_processed_jar_path
+        }
+      }
+    }
+
+    if (_is_prebuilt || _has_sources) {
+      _header_target_name = "${target_name}__header"
+    }
+
+    _public_deps = []
+    _analysis_public_deps = []
+    if (_has_sources) {
+      if (defined(invoker.enable_errorprone)) {
+        _enable_errorprone = invoker.enable_errorprone
+      } else {
+        _enable_errorprone =
+            _java_files != [] && _chromium_code && use_errorprone_java_compiler
+      }
+
+      _type = invoker.type
+
+      _uses_fake_rjava = _type == "java_library" && _requires_android
+
+      if (_uses_fake_rjava && defined(_resources_package)) {
+        # has _resources at the end so it looks like a resources pattern, since
+        # it does act like one (and other resources patterns need to depend on
+        # this before they can read its output R.txt).
+        _fake_rjava_target = "${target_name}__rjava_resources"
+        _possible_resource_deps = _invoker_deps
+        generate_r_java(_fake_rjava_target) {
+          deps = [ ":$_build_config_target_name" ]
+          if (defined(_possible_resource_deps)) {
+            possible_resource_deps = _possible_resource_deps
+          }
+          build_config = _build_config
+
+          # Filepath has to be exactly this because compile_java looks for the
+          # srcjar of srcjar_deps at this location $gen_dir/$target_name.srcjar
+          srcjar_path = "$target_gen_dir/$target_name.srcjar"
+          package = _resources_package
+        }
+        _srcjar_deps += [ ":$_fake_rjava_target" ]
+      }
+
+      template("compile_java_helper") {
+        _enable_errorprone =
+            defined(invoker.enable_errorprone) && invoker.enable_errorprone
+        if (_enable_errorprone) {
+          # Rely on the header jar to provide all .class files so that it is
+          # safe to omit generated files entirely for errorprone.
+          _filtered_java_files =
+              filter_exclude(_java_files, [ "$root_gen_dir*" ])
+        }
+        if (_enable_errorprone && _filtered_java_files == []) {
+          # Filtering out generated files resulted in no files left.
+          group(target_name) {
+            not_needed(invoker, "*")
+          }
+        } else {
+          compile_java(target_name) {
+            forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+            output_jar_path = invoker.output_jar_path
+            enable_errorprone = _enable_errorprone
+            use_turbine = defined(invoker.use_turbine) && invoker.use_turbine
+
+            main_target_name = _main_target_name
+            build_config = _build_config
+
+            if (_enable_errorprone) {
+              java_files = _filtered_java_files
+            } else {
+              java_files = _java_files
+              srcjar_deps = _srcjar_deps
+            }
+
+            if (java_files != []) {
+              java_sources_file = _java_sources_file
+            }
+            chromium_code = _chromium_code
+            supports_android = _supports_android
+            requires_android = _requires_android
+            if (!defined(deps)) {
+              deps = []
+            }
+            deps += _classpath_deps
+          }
+        }
+      }
+      _compile_java_forward_variables = [
+        "additional_jar_files",
+        "apk_name",
+        "jar_excluded_patterns",
+        "javac_args",
+        "processor_args_javac",
+        "skip_build_server",
+      ]
+      _annotation_processor_deps = []
+      if (defined(invoker.annotation_processor_deps)) {
+        _annotation_processor_deps = invoker.annotation_processor_deps
+      }
+
+      compile_java_helper(_header_target_name) {
+        forward_variables_from(invoker, _compile_java_forward_variables)
+        use_turbine = true
+        output_jar_path = _final_ijar_path
+        generated_jar_path = _generated_jar_path
+        deps = _annotation_processor_deps
+      }
+      _public_deps += [ ":$_header_target_name" ]
+
+      _compile_java_target = "${_main_target_name}__compile_java"
+      compile_java_helper(_compile_java_target) {
+        forward_variables_from(invoker, _compile_java_forward_variables)
+        output_jar_path = _javac_jar_path
+        deps = [ ":$_header_target_name" ]
+        header_jar_path = _final_ijar_path
+        generated_jar_path = _generated_jar_path
+      }
+      if (_enable_errorprone) {
+        _compile_java_errorprone_target = "${_main_target_name}__errorprone"
+        compile_java_helper(_compile_java_errorprone_target) {
+          forward_variables_from(invoker, _compile_java_forward_variables)
+          enable_errorprone = true
+          if (defined(invoker.errorprone_args)) {
+            if (!defined(javac_args)) {
+              javac_args = []
+            }
+            javac_args += invoker.errorprone_args
+          }
+          deps = [ ":$_header_target_name" ]
+          header_jar_path = _final_ijar_path
+          generated_jar_path = _generated_jar_path
+          output_jar_path = "$target_out_dir/$target_name.errorprone.stamp"
+        }
+        _analysis_public_deps += [ ":$_compile_java_errorprone_target" ]
+      }
+    }  # _has_sources
+
+    if (_is_prebuilt || _build_device_jar || _build_host_jar) {
+      _unprocessed_jar_deps = []
+      if (_has_sources) {
+        _unprocessed_jar_deps += [ ":$_compile_java_target" ]
+      }
+    }
+
+    if (defined(invoker.bytecode_rewriter_target)) {
+      assert(_build_host_jar || _build_device_jar,
+             "A host or device jar must be created to use bytecode rewriting")
+
+      _rewritten_jar = "$target_out_dir/${target_name}_rewritten.jar"
+      _rewritten_jar_target_name = "${target_name}__rewritten"
+      _rewriter_path = root_build_dir + "/bin/helper/" +
+                       get_label_info(invoker.bytecode_rewriter_target, "name")
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      action_with_pydeps(_rewritten_jar_target_name) {
+        script = "//build/android/gyp/bytecode_rewriter.py"
+        inputs = [
+          _rewriter_path,
+          _build_config,
+          _unprocessed_jar_path,
+        ]
+        outputs = [ _rewritten_jar ]
+        depfile = "$target_gen_dir/$target_name.d"
+        args = [
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--script",
+          rebase_path(_rewriter_path, root_build_dir),
+          "--classpath",
+          "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+          "--classpath",
+          "@FileArg($_rebased_build_config:android:sdk_jars)",
+          "--input-jar",
+          rebase_path(_unprocessed_jar_path, root_build_dir),
+          "--output-jar",
+          rebase_path(_rewritten_jar, root_build_dir),
+        ]
+        deps = _unprocessed_jar_deps + _full_classpath_deps +
+               [ invoker.bytecode_rewriter_target ]
+      }
+
+      _unprocessed_jar_deps = []
+      _unprocessed_jar_deps = [ ":$_rewritten_jar_target_name" ]
+      _unprocessed_jar_path = _rewritten_jar
+    }
+
+    if (_is_prebuilt) {
+      generate_interface_jar(_header_target_name) {
+        # Always used the unfiltered .jar to create the interface jar so that
+        # other targets will resolve filtered classes when depending on
+        # BuildConfig, NativeLibraries, etc.
+        input_jar = _unprocessed_jar_path
+        output_jar = _final_ijar_path
+
+        # Normally ijar does not require any deps, but:
+        # 1 - Some jars are bytecode rewritten by _unprocessed_jar_deps.
+        # 2 - Other jars need to be unzipped by _non_java_deps.
+        # 3 - It is expected that depending on a header target implies depending
+        #     on its transitive header target deps via _java_header_deps.
+        deps = _unprocessed_jar_deps + _non_java_deps + _java_header_deps
+      }
+      _public_deps += [ ":$_header_target_name" ]
+    }
+
+    if (_build_host_jar || _build_device_jar) {
+      _process_prebuilt_target_name = "${target_name}__process"
+      process_java_prebuilt(_process_prebuilt_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "jar_excluded_patterns",
+                                 "jar_included_patterns",
+                               ])
+        build_config = _build_config
+        build_config_dep = ":$_build_config_target_name"
+        input_jar_path = _unprocessed_jar_path
+        jar_deps = _unprocessed_jar_deps + _full_classpath_deps
+        if (_build_host_jar) {
+          host_jar_path = _host_processed_jar_path
+        }
+        if (_build_device_jar) {
+          device_jar_path = _device_processed_jar_path
+          jacoco_instrument = _jacoco_instrument
+          if (_jacoco_instrument) {
+            java_files = _java_files
+            java_sources_file = _java_sources_file
+          }
+          enable_desugar = _enable_desugar && enable_bazel_desugar
+          if (enable_desugar) {
+            classpath_deps = _classpath_deps
+            forward_variables_from(invoker, [ "desugar_jars_paths" ])
+          }
+        }
+
+        # proguard_configs listed on java_library targets need to be marked
+        # as inputs to at least one action so that "gn analyze" will know
+        # about them. Although ijar doesn't use them, it's a convenient spot
+        # to list them.
+        # https://crbug.com/827197
+        if (defined(invoker.proguard_configs)) {
+          inputs = invoker.proguard_configs
+          input_deps = _non_java_deps + _srcjar_deps  # For the aapt-generated
+                                                      # proguard rules.
+        }
+      }
+      if (_build_host_jar) {
+        _public_deps += [ ":${_process_prebuilt_target_name}_host" ]
+      }
+      if (_build_device_jar) {
+        _public_deps += [ ":${_process_prebuilt_target_name}_device" ]
+      }
+
+      _enable_bytecode_checks = !defined(invoker.enable_bytecode_checks) ||
+                                invoker.enable_bytecode_checks
+      if (_enable_bytecode_checks) {
+        _bytecode_checks_target = "${target_name}__validate_classpath"
+        bytecode_processor(_bytecode_checks_target) {
+          forward_variables_from(invoker, [ "missing_classes_allowlist" ])
+          deps = _unprocessed_jar_deps + _full_classpath_deps +
+                 [ ":$_build_config_target_name" ]
+          requires_android = _requires_android
+          target_label =
+              get_label_info(":${invoker.target_name}", "label_no_toolchain")
+          input_jar = _unprocessed_jar_path
+          build_config = _build_config
+          is_prebuilt = _is_prebuilt
+        }
+        _analysis_public_deps += [ ":$_bytecode_checks_target" ]
+      }
+    }
+
+    if (_build_device_jar) {
+      dex("${target_name}__dex") {
+        forward_variables_from(invoker,
+                               [
+                                 "desugar_jars_paths",
+                                 "proguard_enable_obfuscation",
+                               ])
+        input_class_jars = [ _device_processed_jar_path ]
+        enable_desugar = _enable_desugar
+        ignore_desugar_missing_deps = !_enable_bytecode_checks
+
+        # There's no value in per-class dexing prebuilts since they never
+        # change just one class at a time.
+        disable_incremental = _is_prebuilt
+        output = _dex_path
+        deps = [ ":${_process_prebuilt_target_name}_device" ]
+
+        if (enable_desugar && !enable_bazel_desugar) {
+          # Desugaring with D8 requires full classpath.
+          build_config = _build_config
+          final_ijar_path = _final_ijar_path
+          deps += _classpath_deps + [ ":$_header_target_name" ]
+        }
+
+        enable_multidex = false
+        is_library = true
+      }
+      _public_deps += [ ":${target_name}__dex" ]
+    }
+
+    if (_is_java_binary) {
+      # Targets might use the generated script while building, so make it a dep
+      # rather than a data_dep.
+      java_binary_script("${target_name}__java_binary_script") {
+        forward_variables_from(invoker,
+                               [
+                                 "tiered_stop_at_level_one",
+                                 "main_class",
+                                 "wrapper_script_args",
+                               ])
+        build_config = _build_config
+        script_name = _main_target_name
+        if (defined(invoker.wrapper_script_name)) {
+          script_name = invoker.wrapper_script_name
+        }
+        deps = [ ":$_build_config_target_name" ]
+      }
+      _public_deps += [ ":${target_name}__java_binary_script" ]
+    }
+
+    # The __impl target contains all non-analysis steps for this template.
+    # Having this separated out from the main target (which contains analysis
+    # steps) allows analysis steps for this target to be run concurrently with
+    # the non-analysis steps of other targets that depend on this one.
+    group("${target_name}__impl") {
+      public_deps = _public_deps
+    }
+
+    java_lib_group("${target_name}__assetres") {
+      deps = _invoker_deps
+      group_name = "assetres"
+
+      if (defined(_fake_rjava_target)) {
+        deps += [ ":$_fake_rjava_target" ]
+      }
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "assert_no_deps",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "public_deps",
+                               "visibility",
+                             ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += [ ":${target_name}__impl" ]
+      if (defined(_analysis_public_deps)) {
+        if (!defined(data_deps)) {
+          data_deps = []
+        }
+        data_deps += _analysis_public_deps
+      }
+    }
+  }
+}
+
+# Create a zip archive corresponding to an application bundle module.
+#
+# Compile all the components of a given android_apk_or_module() target into a
+# zip archive suitable to later create an android_app_bundle() target. This
+# archive's format is very similar to that on an APK, except for a few
+# differences in internal directory layouts, and the fact that resources, as
+# well as xml files, are compiled using a protocol-buffer based format (instead
+# of the regular binary xml + resources.arsc).
+#
+# A final application bundle is built from one or more module bundle modules,
+# plus some configuration file.
+#
+# Variables:
+#   module_zip_path: Output module path.
+#   build_config: Path to build_config of the android_apk_or_module() target.
+#   dex_path: If module is proguarded separately from the base module, dex_path
+#     is the path to its dex file and is passed directly to the creation script.
+#     Otherwise, dex_path is undefined and we retrieve the module's dex file
+#     using its build_config.
+#   expected_libs_and_assets: Verify the list of included native libraries
+#     and assets is consistent with the given expectation file.
+#   expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff
+#     with this file as the base.
+#   is_multi_abi: If true will add a library placeholder for the missing ABI if
+#     either the primary or the secondary ABI has no native libraries set.
+#   module_name: The module's name.
+#   native_libraries_config: Path to file listing native libraries to be
+#     packaged into each module.
+#   proguard_enabled: Optional. True if proguarding is enabled for this
+#     bundle. Default is to enable this only for release builds. Note that
+#     this will always perform synchronized proguarding.
+template("create_android_app_bundle_module") {
+  _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+  _rebased_native_libraries_config =
+      rebase_path(invoker.native_libraries_config, root_build_dir)
+  _proguard_enabled =
+      defined(invoker.proguard_enabled) && invoker.proguard_enabled
+
+  forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+  _deps = invoker.deps
+  _script = "//build/android/gyp/apkbuilder.py"
+
+  # NOTE: Compared to the inputs of the "package_apk" template action,
+  #       this list is much smaller, since finalize_apk is never called
+  #       by apkbuild.py --format=bundle-module. This means not using
+  #       apksigner and zipalign as well, nor the keystore. Other
+  #       dependencies like extra native libraries are all pulled from the
+  #       .build_config through @FileArg() references (see below) and
+  #       will be listed in the generated depfile instead.
+  _inputs = [
+    invoker.build_config,
+    invoker.native_libraries_config,
+  ]
+  _outputs = [ invoker.module_zip_path ]
+  _args = [
+    "--format=bundle-module",
+    "--output-apk",
+    rebase_path(invoker.module_zip_path, root_build_dir),
+    "--resource-apk=@FileArg(" +
+        "$_rebased_build_config:deps_info:proto_resources_path)",
+    "--assets=@FileArg($_rebased_build_config:assets)",
+    "--uncompressed-assets=@FileArg(" +
+        "$_rebased_build_config:uncompressed_assets)",
+    "--native-libs=@FileArg($_rebased_native_libraries_config" +
+        ":${invoker.module_name})",
+    "--native-lib-placeholders=@FileArg($_rebased_build_config" +
+        ":native:native_library_placeholders)",
+    "--secondary-native-lib-placeholders=@FileArg($_rebased_build_config" +
+        ":native:secondary_native_library_placeholders)",
+    "--android-abi=$android_app_abi",
+    "--min-sdk-version=${invoker.min_sdk_version}",
+    "--uncompress-shared-libraries=@FileArg(" +
+        "$_rebased_build_config:native:uncompress_shared_libraries)",
+    "--library-always-compress=@FileArg($_rebased_build_config:native:library_always_compress)",
+    "--library-renames=@FileArg($_rebased_build_config:native:library_renames)",
+  ]
+  if (defined(android_app_secondary_abi)) {
+    _rebased_secondary_abi_native_libraries_config =
+        rebase_path(invoker.secondary_abi_native_libraries_config,
+                    root_build_dir)
+    _args += [
+      "--secondary-native-libs",
+      "@FileArg($_rebased_secondary_abi_native_libraries_config" +
+          ":${invoker.module_name})",
+      "--secondary-android-abi=$android_app_secondary_abi",
+    ]
+  }
+  if (defined(invoker.is_multi_abi) && invoker.is_multi_abi) {
+    _args += [ "--is-multi-abi" ]
+  }
+  if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) {
+    _args += [ "--uncompress-dex" ]
+  }
+
+  # Use either provided dex path or build config path based on type of module.
+  if (defined(invoker.dex_path)) {
+    _inputs += [ invoker.dex_path ]
+    _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir)
+    _args += [ "--dex-file=$_rebased_dex_path" ]
+  } else {
+    _args += [ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)" ]
+  }
+
+  # The library is imported via proguard when proguard is enabled.
+  if (!_proguard_enabled && enable_jdk_library_desugaring &&
+      invoker.module_name == "base") {
+    _all_jdk_libs = "//build/android:all_jdk_libs"
+    _deps += [ _all_jdk_libs ]
+    _jdk_libs_dex =
+        get_label_info(_all_jdk_libs, "target_out_dir") + "/all_jdk_libs.l8.dex"
+    _inputs += [ _jdk_libs_dex ]
+    _args += [
+      "--jdk-libs-dex-file",
+      rebase_path(_jdk_libs_dex, root_build_dir),
+    ]
+  }
+
+  if (treat_warnings_as_errors) {
+    _args += [ "--warnings-as-errors" ]
+  }
+
+  if (defined(invoker.expected_libs_and_assets)) {
+    _expectations_target = "${invoker.top_target_name}_validate_libs_and_assets"
+    action_with_pydeps(_expectations_target) {
+      _actual_file = "$target_gen_dir/$target_name.libs_and_assets"
+      _failure_file = "$expectations_failure_dir/" +
+                      string_replace(invoker.expected_libs_and_assets, "/", "_")
+      inputs = [
+        invoker.expected_libs_and_assets,
+        invoker.build_config,
+        invoker.native_libraries_config,
+      ]
+      deps = [
+        invoker.build_config_target,
+        invoker.native_libraries_config_target,
+      ]
+      if (defined(android_app_secondary_abi)) {
+        inputs += [ invoker.secondary_abi_native_libraries_config ]
+        deps += [ invoker.secondary_abi_native_libraries_config_target ]
+      }
+      outputs = [
+        _actual_file,
+        _failure_file,
+      ]
+      script = _script
+      args = _args + [
+               "--expected-file",
+               rebase_path(invoker.expected_libs_and_assets, root_build_dir),
+               "--actual-file",
+               rebase_path(_actual_file, root_build_dir),
+               "--failure-file",
+               rebase_path(_failure_file, root_build_dir),
+               "--only-verify-expectations",
+             ]
+      if (defined(invoker.expected_libs_and_assets_base)) {
+        inputs += [ invoker.expected_libs_and_assets_base ]
+        args += [
+          "--expected-file-base",
+          rebase_path(invoker.expected_libs_and_assets_base, root_build_dir),
+        ]
+      }
+      if (fail_on_android_expectations) {
+        args += [ "--fail-on-expectations" ]
+      }
+    }
+    _deps += [ ":$_expectations_target" ]
+  }
+
+  action_with_pydeps(target_name) {
+    deps = _deps
+    inputs = _inputs
+    outputs = _outputs
+    script = _script
+    depfile = "$target_gen_dir/$target_name.d"
+    args = _args + [
+             "--depfile",
+             rebase_path(depfile, root_build_dir),
+           ]
+  }
+}
+
+# Splits input dex file(s) based on given feature jars into seperate dex files
+# for each feature.
+#
+# Variables:
+#   proguard_mapping: Path to input proguard mapping produced by synchronized
+#     proguarding.
+#   input_dex_zip: Path to zipped dex files to split.
+#   all_modules: Path to list of all modules. Each Module must have
+#     build_config, name, and build_config_target properties.
+#   feature_jars_args: Optional list of args to be passed to dexsplitter.py.
+#     If used should include the jars owned by each feature (in the same order
+#     as all_modules). Allows invoker to pull the list of jars from a different
+#     .build_config than the module's .build_config.
+template("dexsplitter") {
+  action_with_pydeps(target_name) {
+    forward_variables_from(invoker, [ "deps" ])
+    script = "//build/android/gyp/dexsplitter.py"
+    _stamp = "${target_gen_dir}/${target_name}.stamp"
+    outputs = [ _stamp ]
+
+    depfile = "${target_gen_dir}/${target_name}.d"
+    args = [
+      "--stamp",
+      rebase_path(_stamp, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--r8-path",
+      rebase_path(_r8_path, root_build_dir),
+      "--input-dex-zip",
+      rebase_path(invoker.input_dex_zip, root_build_dir),
+      "--proguard-mapping-file",
+      rebase_path(invoker.proguard_mapping, root_build_dir),
+    ]
+
+    foreach(_feature_module, invoker.all_modules) {
+      _rebased_module_build_config =
+          rebase_path(_feature_module.build_config, root_build_dir)
+      args += [
+        "--feature-name",
+        _feature_module.name,
+        "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)",
+      ]
+      if (!defined(invoker.feature_jars_args)) {
+        args += [ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:device_classpath)" ]
+      }
+      deps += [ _feature_module.build_config_target ]
+    }
+    if (defined(invoker.feature_jars_args)) {
+      args += invoker.feature_jars_args
+    }
+  }
+}
+
+# Allots native libraries depended on by feature modules to the module the
+# libraries should be packaged into. The packaging module may be different from
+# the dependee module in case a library is depended on by multiple modules. In
+# that case the library will be allotted to the closest ancestor given a module
+# dependency tree (see |parent| below).
+#
+# Variables:
+#   modules: List of scopes with the following format:
+#     name: The module's name.
+#     parent: The module's parent's name.
+#     build_config: Path to the module's build config.
+#     build_config_target: Target creating |build_config|.
+#   native_libraries_filearg_keys: Keys to be used in
+#     @FileArg(|build_config|:<keys>) expressions pointing to a list of native
+#     libraries to consider in |build_config|.
+#   output: Path to native libraries config.
+template("allot_native_libraries") {
+  action_with_pydeps(target_name) {
+    script = "//build/android/gyp/allot_native_libraries.py"
+    args = [
+      "--output",
+      rebase_path(invoker.output, root_build_dir),
+    ]
+    outputs = [ invoker.output ]
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    inputs = []
+    foreach(_module, invoker.modules) {
+      deps += [ _module.build_config_target ]
+      inputs += [ _module.build_config ]
+      _rebased_build_config = rebase_path(_module.build_config, root_out_dir)
+      foreach(_key, invoker.native_libraries_filearg_keys) {
+        args += [
+          "--libraries",
+          "${_module.name},@FileArg($_rebased_build_config:$_key)",
+        ]
+      }
+      if (defined(_module.parent)) {
+        args += [
+          "--dep",
+          "${_module.parent}:${_module.name}",
+        ]
+      }
+    }
+  }
+}
diff --git a/src/build/config/android/linker_version_script.gni b/src/build/config/android/linker_version_script.gni
new file mode 100644
index 0000000..96d8b66
--- /dev/null
+++ b/src/build/config/android/linker_version_script.gni
@@ -0,0 +1,43 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+
+# Generate a custom linker version script that can later be used with
+# "-Wl,--version-script=<path>" ldflags.
+#
+# Variables:
+#    export_java_symbols: Optional. If true, also export all Java_* symbols
+#      exported for JNI.
+#    export_symbol_allowlist_files: Optional. List of paths to input files containing
+#      lists of symbols to export.
+#    linker_script: Path to output linker version script.
+#
+template("generate_linker_version_script") {
+  action_with_pydeps(target_name) {
+    script = "//build/android/gyp/generate_linker_version_script.py"
+    outputs = [ invoker.linker_script ]
+    inputs = []
+    args = [ "--output=" + rebase_path(invoker.linker_script, root_build_dir) ]
+
+    if (defined(invoker.export_java_symbols) && invoker.export_java_symbols) {
+      args += [ "--export-java-symbols" ]
+    }
+
+    if (defined(invoker.export_feature_registrations) &&
+        invoker.export_feature_registrations) {
+      args += [ "--export-feature-registrations" ]
+    }
+
+    if (defined(invoker.export_symbol_allowlist_files)) {
+      foreach(file_, invoker.export_symbol_allowlist_files) {
+        inputs += [ file_ ]
+        args += [
+          "--export-symbol-allowlist-file",
+          rebase_path(file_, root_build_dir),
+        ]
+      }
+    }
+  }
+}
diff --git a/src/build/config/android/rules.gni b/src/build/config/android/rules.gni
new file mode 100644
index 0000000..e52396b
--- /dev/null
+++ b/src/build/config/android/rules.gni
@@ -0,0 +1,5136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+
+import("//build/config/android/channel.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/python.gni")
+import("//build/config/zip.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_android)
+
+declare_args() {
+  enable_jni_tracing = false
+}
+
+if (target_cpu == "arm") {
+  _sanitizer_arch = "arm"
+} else if (target_cpu == "arm64") {
+  _sanitizer_arch = "aarch64"
+} else if (target_cpu == "x86") {
+  _sanitizer_arch = "i686"
+}
+
+_sanitizer_runtimes = []
+if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
+  _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ]
+}
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+#   dist_dir: Directory for the exe and libraries. Everything in this directory
+#     will be deleted before copying in the exe and libraries.
+#   binary: Path to (stripped) executable.
+#   extra_files: List of extra files to copy in (optional).
+#
+# Example
+#   create_native_executable_dist("foo_dist") {
+#     dist_dir = "$root_build_dir/foo_dist"
+#     binary = "$root_build_dir/foo"
+#     deps = [ ":the_thing_that_makes_foo" ]
+#   }
+template("create_native_executable_dist") {
+  forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+  _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list"
+
+  _sanitizer_runtimes_target_name = "${target_name}__sanitizer_runtimes"
+  group(_sanitizer_runtimes_target_name) {
+    metadata = {
+      shared_libraries = _sanitizer_runtimes
+    }
+  }
+
+  generated_file("${target_name}__library_list") {
+    forward_variables_from(invoker, [ "deps" ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":${_sanitizer_runtimes_target_name}" ]
+    output_conversion = "json"
+    outputs = [ _libraries_list ]
+    data_keys = [ "shared_libraries" ]
+    walk_keys = [ "shared_libraries_barrier" ]
+    rebase = root_build_dir
+  }
+
+  copy_ex(target_name) {
+    inputs = [
+      _libraries_list,
+      invoker.binary,
+    ]
+
+    dest = invoker.dist_dir
+    data = [ "${invoker.dist_dir}/" ]
+
+    _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir)
+    _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir)
+    args = [
+      "--clear",
+      "--files=@FileArg($_rebased_libraries_list)",
+      "--files=$_rebased_binaries_list",
+    ]
+    if (defined(invoker.extra_files)) {
+      _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir)
+      args += [ "--files=$_rebased_extra_files" ]
+    }
+
+    _depfile = "$target_gen_dir/$target_name.d"
+    _stamp_file = "$target_gen_dir/$target_name.stamp"
+    outputs = [ _stamp_file ]
+    args += [
+      "--depfile",
+      rebase_path(_depfile, root_build_dir),
+      "--stamp",
+      rebase_path(_stamp_file, root_build_dir),
+    ]
+
+    deps = [ ":${target_name}__library_list" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+}
+
+if (enable_java_templates) {
+  import("//build/config/sanitizers/sanitizers.gni")
+
+  # JNI target implementation. See generate_jni or generate_jar_jni for usage.
+  template("generate_jni_impl") {
+    _jni_output_dir = "${target_gen_dir}/${target_name}"
+    if (defined(invoker.jni_generator_include)) {
+      _jni_generator_include = invoker.jni_generator_include
+      _jni_generator_include_deps = []
+    } else {
+      _jni_generator_include =
+          "//base/android/jni_generator/jni_generator_helper.h"
+      _jni_generator_include_deps = [
+        # Using //base/android/jni_generator/jni_generator_helper.h introduces
+        # a dependency on buildflags targets indirectly through
+        # base/android/jni_android.h, which is part of the //base target.
+        # This can't depend directly on //base without causing a dependency
+        # cycle, though.
+        "//base:debugging_buildflags",
+        "//base:logging_buildflags",
+        "//build:chromeos_buildflags",
+      ]
+    }
+
+    action_with_pydeps(target_name) {
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      script = "//base/android/jni_generator/jni_generator.py"
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "deps",
+                                   "public_deps",
+                                 ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += _jni_generator_include_deps
+      inputs = []
+      args = [
+        "--ptr_type=long",
+        "--includes",
+        rebase_path(_jni_generator_include, _jni_output_dir),
+      ]
+
+      if (defined(invoker.classes)) {
+        if (defined(invoker.jar_file)) {
+          _jar_file = invoker.jar_file
+        } else {
+          _jar_file = android_sdk_jar
+        }
+        inputs += [ _jar_file ]
+        args += [
+          "--jar_file",
+          rebase_path(_jar_file, root_build_dir),
+        ]
+        _input_args = invoker.classes
+        _input_names = invoker.classes
+        if (defined(invoker.always_mangle) && invoker.always_mangle) {
+          args += [ "--always_mangle" ]
+        }
+      } else {
+        assert(defined(invoker.sources))
+        inputs += invoker.sources
+        _input_args = rebase_path(invoker.sources, root_build_dir)
+        _input_names = invoker.sources
+        if (use_hashed_jni_names) {
+          args += [ "--use_proxy_hash" ]
+        }
+        if (defined(invoker.namespace)) {
+          args += [ "-n ${invoker.namespace}" ]
+        }
+      }
+      if (defined(invoker.split_name)) {
+        args += [ "--split_name=${invoker.split_name}" ]
+      }
+
+      outputs = []
+      foreach(_name, _input_names) {
+        _name_part = get_path_info(_name, "name")
+        outputs += [ "${_jni_output_dir}/${_name_part}_jni.h" ]
+      }
+
+      # Avoid passing GN lists because not all webrtc embedders use //build.
+      foreach(_output, outputs) {
+        args += [
+          "--output_file",
+          rebase_path(_output, root_build_dir),
+        ]
+      }
+      foreach(_input, _input_args) {
+        args += [ "--input_file=$_input" ]
+      }
+
+      if (enable_profiling) {
+        args += [ "--enable_profiling" ]
+      }
+      if (enable_jni_tracing) {
+        args += [ "--enable_tracing" ]
+      }
+    }
+  }
+
+  # Declare a jni target
+  #
+  # This target generates the native jni bindings for a set of .java files.
+  #
+  # See base/android/jni_generator/jni_generator.py for more info about the
+  # format of generating JNI bindings.
+  #
+  # Variables
+  #   sources: list of .java files to generate jni for
+  #   namespace: Specify the namespace for the generated header file.
+  #   deps, public_deps: As normal
+  #
+  # Example
+  #   # Target located in base/BUILD.gn.
+  #   generate_jni("foo_jni") {
+  #     # Generates gen/base/foo_jni/Foo_jni.h
+  #     # To use: #include "base/foo_jni/Foo_jni.h"
+  #     sources = [
+  #       "android/java/src/org/chromium/foo/Foo.java",
+  #       ...,
+  #     ]
+  #   }
+  template("generate_jni") {
+    generate_jni_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    }
+  }
+
+  # Declare a jni target for a prebuilt jar
+  #
+  # This target generates the native jni bindings for a set of classes in a .jar.
+  #
+  # See base/android/jni_generator/jni_generator.py for more info about the
+  # format of generating JNI bindings.
+  #
+  # Variables
+  #   classes: list of .class files in the jar to generate jni for. These should
+  #     include the full path to the .class file.
+  #   jar_file: the path to the .jar. If not provided, will default to the sdk's
+  #     android.jar
+  #   always_mangle: Mangle all generated method names. By default, the script
+  #     only mangles methods that cause ambiguity due to method overload.
+  #   deps, public_deps: As normal
+  #
+  # Example
+  #   # Target located in base/BUILD.gn.
+  #   generate_jar_jni("foo_jni") {
+  #     # Generates gen/base/foo_jni/Runnable_jni.h
+  #     # To use: #include "base/foo_jni/Runnable_jni.h"
+  #     classes = [
+  #       "android/view/Foo.class",
+  #     ]
+  #   }
+  template("generate_jar_jni") {
+    generate_jni_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    }
+  }
+
+  # Declare a jni registration target.
+  #
+  # This target generates a srcjar containing a copy of GEN_JNI.java, which has
+  # the native methods of all dependent java files. It can also create a .h file
+  # for use with manual JNI registration.
+  #
+  # The script does not scan any generated sources (those within .srcjars, or
+  # within root_build_dir). This could be fixed by adding deps & logic to scan
+  # .srcjars, but isn't currently needed.
+  #
+  # See base/android/jni_generator/jni_registration_generator.py for more info
+  # about the format of the header file.
+  #
+  # Variables
+  #   targets: List of .build_config supported targets to provide java sources.
+  #   header_output: Path to the generated .h file (optional).
+  #   sources_exclusions: List of .java files that should be skipped. (optional)
+  #   namespace: Registration functions will be wrapped into this. (optional)
+  #   require_native_mocks: Enforce that any native calls using
+  #     org.chromium.base.annotations.NativeMethods must have a mock set
+  #     (optional).
+  #   enable_native_mocks: Allow native calls using
+  #     org.chromium.base.annotations.NativeMethods to be mocked in tests
+  #     (optional).
+  #   no_transitive_deps: Generate registration for only the Java source in the
+  #     specified target(s). This is useful for generating registration for
+  #     feature modules, without including base module dependencies.
+  #
+  # Example
+  #   generate_jni_registration("chrome_jni_registration") {
+  #     targets = [ ":chrome_public_apk" ]
+  #     header_output = "$target_gen_dir/$target_name.h"
+  #     sources_exclusions = [
+  #       "//path/to/Exception.java",
+  #     ]
+  #   }
+  template("generate_jni_registration") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+      script = "//base/android/jni_generator/jni_registration_generator.py"
+      inputs = []
+      deps = []
+      _srcjar_output = "$target_gen_dir/$target_name.srcjar"
+      outputs = [ _srcjar_output ]
+      depfile = "$target_gen_dir/$target_name.d"
+
+      args = [
+        "--srcjar-path",
+        rebase_path(_srcjar_output, root_build_dir),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+      ]
+      foreach(_target, invoker.targets) {
+        deps += [ "${_target}$build_config_target_suffix($default_toolchain)" ]
+        _build_config =
+            get_label_info("${_target}($default_toolchain)", "target_gen_dir") +
+            "/" + get_label_info("${_target}($default_toolchain)", "name") +
+            ".build_config"
+        _rebased_build_config = rebase_path(_build_config, root_build_dir)
+        inputs += [ _build_config ]
+
+        if (defined(invoker.no_transitive_deps) && invoker.no_transitive_deps) {
+          args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:java_sources_file)" ]
+        } else {
+          args += [
+            # This is a list of .sources files.
+            "--sources-files=@FileArg($_rebased_build_config:deps_info:jni:all_source)",
+          ]
+        }
+      }
+
+      if (use_hashed_jni_names) {
+        args += [ "--use_proxy_hash" ]
+      }
+
+      if (defined(invoker.enable_native_mocks) && invoker.enable_native_mocks) {
+        args += [ "--enable_proxy_mocks" ]
+
+        if (defined(invoker.require_native_mocks) &&
+            invoker.require_native_mocks) {
+          args += [ "--require_mocks" ]
+        }
+      }
+
+      if (defined(invoker.header_output)) {
+        outputs += [ invoker.header_output ]
+        args += [
+          "--header-path",
+          rebase_path(invoker.header_output, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.sources_exclusions)) {
+        _rebase_sources_exclusions =
+            rebase_path(invoker.sources_exclusions, root_build_dir)
+        args += [ "--sources-exclusions=$_rebase_sources_exclusions" ]
+      }
+
+      if (defined(invoker.namespace)) {
+        args += [ "--namespace=${invoker.namespace}" ]
+      }
+    }
+  }
+
+  # Declare a target for c-preprocessor-generated java files
+  #
+  # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+  #       rule instead.
+  #
+  # This target generates java files using the host C pre-processor. Each file in
+  # sources will be compiled using the C pre-processor. If include_path is
+  # specified, it will be passed (with --I) to the pre-processor.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the C pre-processor. For each
+  #     file in sources, there will be one .java file in the final .srcjar. For a
+  #     file named FooBar.template, a java file will be created with name
+  #     FooBar.java.
+  #   inputs: additional compile-time dependencies. Any files
+  #     `#include`-ed in the templates should be listed here.
+  #   defines: List of -D arguments for the preprocessor.
+  #
+  # Example
+  #   java_cpp_template("foo_generated_enum") {
+  #     sources = [
+  #       "android/java/templates/Foo.template",
+  #     ]
+  #     inputs = [
+  #       "android/java/templates/native_foo_header.h",
+  #     ]
+  #   }
+  template("java_cpp_template") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "data_deps",
+                               "deps",
+                               "inputs",
+                               "public_deps",
+                               "sources",
+                               "testonly",
+                               "visibility",
+                             ])
+      script = "//build/android/gyp/gcc_preprocess.py"
+      outputs = [ "$target_gen_dir/$target_name.srcjar" ]
+
+      _include_dirs = [
+        "//",
+        root_gen_dir,
+      ]
+      _rebased_include_dirs = rebase_path(_include_dirs, root_build_dir)
+      args = [
+        "--include-dirs=$_rebased_include_dirs",
+        "--output",
+        rebase_path(outputs[0], root_build_dir),
+      ]
+      if (defined(invoker.defines)) {
+        foreach(_define, invoker.defines) {
+          args += [
+            "--define",
+            _define,
+          ]
+        }
+      }
+      args += rebase_path(sources, root_build_dir)
+    }
+  }
+
+  # Declare a target for generating Java classes from C++ enums.
+  #
+  # This target generates Java files from C++ enums using a script.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the script. For each annotated
+  #     enum contained in the sources files the script will generate a .java
+  #     file with the same name as the name of the enum.
+  #
+  # Example
+  #   java_cpp_enum("foo_generated_enum") {
+  #     sources = [
+  #       "src/native_foo_header.h",
+  #     ]
+  #   }
+  template("java_cpp_enum") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ])
+
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      script = "//build/android/gyp/java_cpp_enum.py"
+
+      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+      _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+
+      args = [ "--srcjar=$_rebased_srcjar_path" ] + _rebased_sources
+      outputs = [ _srcjar_path ]
+    }
+  }
+
+  # Declare a target for generating Java classes with string constants matching
+  # those found in C++ files using a python script.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the script. For each string
+  #            constant in the source files, the script will add a corresponding
+  #            Java string to the specified template file.
+  # Example
+  #   java_cpp_strings("foo_switches") {
+  #     sources = [
+  #       "src/foo_switches.cc",
+  #     ]
+  #     template = "src/templates/FooSwitches.java.tmpl
+  #   }
+  #
+  # foo_switches.cc:
+  #
+  # // A switch.
+  # const char kASwitch = "a-switch";
+  #
+  # FooSwitches.java.tmpl
+  #
+  # // Copyright {YEAR} The Chromium Authors. All rights reserved.
+  # // Use of this source code is governed by a BSD-style license that can be
+  # // found in the LICENSE file.
+  #
+  # // This file is autogenerated by
+  # //     {SCRIPT_NAME}
+  # // From
+  # //     {SOURCE_PATH}, and
+  # //     {TEMPLATE_PATH}
+  #
+  # package my.java.package;
+  #
+  # public abstract class FooSwitches {{
+  #     // ...snip...
+  # {NATIVE_STRINGS}
+  #     // ...snip...
+  # }}
+  #
+  # result:
+  #   A FooSwitches.java file, defining a class named FooSwitches in the package
+  #   my.java.package.
+  template("java_cpp_strings") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ])
+
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      script = "//build/android/gyp/java_cpp_strings.py"
+
+      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+      _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+      _rebased_template = rebase_path(invoker.template, root_build_dir)
+
+      args = [
+        "--srcjar=$_rebased_srcjar_path",
+        "--template=$_rebased_template",
+      ]
+      args += _rebased_sources
+      sources += [ invoker.template ]
+
+      outputs = [ _srcjar_path ]
+    }
+  }
+
+  # Declare a target for generating Java classes with string constants matching
+  # those found in C++ base::Feature declarations, using a python script.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the script. For each
+  #            base::Feature in the source files, the script will add a
+  #            corresponding Java string for that feature's name to the
+  #            specified template file.
+  # Example
+  #   java_cpp_features("foo_features") {
+  #     sources = [
+  #       "src/foo_features.cc",
+  #     ]
+  #     template = "src/templates/FooFeatures.java.tmpl
+  #   }
+  #
+  # foo_features.cc:
+  #
+  # // A feature.
+  # const base::Feature kSomeFeature{"SomeFeature",
+  #                                  base::FEATURE_DISABLED_BY_DEFAULT};
+  #
+  # FooFeatures.java.tmpl
+  #
+  # // Copyright $YEAR The Chromium Authors. All rights reserved.
+  # // Use of this source code is governed by a BSD-style license that can be
+  # // found in the LICENSE file.
+  #
+  # package my.java.package;
+  #
+  # public final class FooFeatures {{
+  #     // ...snip...
+  # {NATIVE_STRINGS}
+  #     // ...snip...
+  #     // Do not instantiate this class.
+  #     private FooFeatures() {{}}
+  # }}
+  #
+  # result:
+  #   A FooFeatures.java file, defining a class named FooFeatures in the package
+  #   my.java.package.
+  template("java_cpp_features") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ])
+
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      script = "//build/android/gyp/java_cpp_features.py"
+
+      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+      _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+      _rebased_template = rebase_path(invoker.template, root_build_dir)
+
+      args = [
+        "--srcjar=$_rebased_srcjar_path",
+        "--template=$_rebased_template",
+      ]
+      args += _rebased_sources
+      sources += [ invoker.template ]
+
+      outputs = [ _srcjar_path ]
+    }
+  }
+
+  # Declare a target for processing a Jinja template.
+  #
+  # Variables
+  #   input: The template file to be processed.
+  #   includes: List of files {% include %}'ed by input.
+  #   output: Where to save the result.
+  #   variables: (Optional) A list of variables to make available to the template
+  #     processing environment, e.g. ["name=foo", "color=red"].
+  #
+  # Example
+  #   jinja_template("chrome_public_manifest") {
+  #     input = "java/AndroidManifest.xml"
+  #     output = "$target_gen_dir/AndroidManifest.xml"
+  #   }
+  template("jinja_template") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      inputs = [ invoker.input ]
+      if (defined(invoker.includes)) {
+        inputs += invoker.includes
+      }
+      script = "//build/android/gyp/jinja_template.py"
+
+      outputs = [ invoker.output ]
+
+      args = [
+        "--loader-base-dir",
+        rebase_path("//", root_build_dir),
+        "--inputs",
+        rebase_path(invoker.input, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+        "--check-includes",
+      ]
+      if (defined(invoker.includes)) {
+        _rebased_includes = rebase_path(invoker.includes, root_build_dir)
+        args += [ "--includes=$_rebased_includes" ]
+      }
+      if (defined(invoker.variables)) {
+        args += [ "--variables=${invoker.variables}" ]
+      }
+    }
+  }
+
+  # Writes native libraries to a NativeLibaries.java file.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables:
+  #   native_libraries_list_file: (Optional) Path to file listing all native
+  #     libraries to write.
+  #   version_number: (Optional) String of expected version of 'main' native
+  #     library.
+  #   enable_chromium_linker: (Optional) Whether to use the Chromium linker.
+  #   load_library_from_apk: (Optional) Whether libraries should be loaded from
+  #     the APK without uncompressing.
+  #   use_final_fields: True to use final fields. When false, all other
+  #       variables must not be set.
+  template("write_native_libraries_java") {
+    _native_libraries_file = "$target_gen_dir/$target_name.srcjar"
+    if (current_cpu == "arm" || current_cpu == "arm64") {
+      _cpu_family = "CPU_FAMILY_ARM"
+    } else if (current_cpu == "x86" || current_cpu == "x64") {
+      _cpu_family = "CPU_FAMILY_X86"
+    } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+      _cpu_family = "CPU_FAMILY_MIPS"
+    } else {
+      assert(false, "Unsupported CPU family")
+    }
+
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      script = "//build/android/gyp/write_native_libraries_java.py"
+      outputs = [ _native_libraries_file ]
+      args = [
+        "--output",
+        rebase_path(_native_libraries_file, root_build_dir),
+        "--cpu-family",
+        _cpu_family,
+      ]
+      if (invoker.use_final_fields) {
+        # Write native_libraries_list_file via depfile rather than specifyin it
+        # as a dep in order allow R8 to run in parallel with native compilation.
+        depfile = "$target_gen_dir/$target_name.d"
+        args += [
+          "--final",
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--native-libraries-list",
+          rebase_path(invoker.native_libraries_list_file, root_build_dir),
+        ]
+        if (defined(invoker.main_component_library)) {
+          args += [
+            "--main-component-library",
+            invoker.main_component_library,
+          ]
+        }
+        if (defined(invoker.enable_chromium_linker) &&
+            invoker.enable_chromium_linker) {
+          args += [ "--enable-chromium-linker" ]
+        }
+        if (defined(invoker.load_library_from_apk) &&
+            invoker.load_library_from_apk) {
+          args += [ "--load-library-from-apk" ]
+        }
+        if (defined(invoker.use_modern_linker) && invoker.use_modern_linker) {
+          args += [ "--use-modern-linker" ]
+        }
+      }
+    }
+  }
+
+  # Declare a target for a set of Android resources generated at build
+  # time and stored in a single zip archive. The content of the archive
+  # should match the layout of a regular Android res/ folder (but the
+  # archive should not include a top-level res/ directory).
+  #
+  # Note that there is no associated .srcjar, R.txt or package name
+  # associated with this target.
+  #
+  # Variables:
+  #   generated_resources_zip: Generated zip archive path.
+  #   generating_target: Name of the target generating
+  #     generated_resources_zip. This rule will check that it is part
+  #     of its outputs.
+  #   deps: Specifies the dependencies of this target. Any Android resources
+  #     listed here will be also be included *after* this one when compiling
+  #     all resources for a final apk or junit binary. This is useful to
+  #     ensure that the resources of the current target override those of the
+  #     dependency as well (and would not work if you have these deps to the
+  #     generating target's dependencies).
+  #
+  # Example
+  #   _zip_archive = "$target_gen_dir/${target_name}.resources_zip"
+  #
+  #   action("my_resources__create_zip") {
+  #     _depfile = "$target_gen_dir/${target_name}.d"
+  #     script = "//build/path/to/create_my_resources_zip.py"
+  #     args = [
+  #       "--depfile", rebase_path(_depfile, root_build_dir),
+  #       "--output-zip", rebase_path(_zip_archive, root_build_dir),
+  #     ]
+  #     inputs = []
+  #     outputs = _zip_archive
+  #     depfile = _depfile
+  #   }
+  #
+  #   android_generated_resources("my_resources") {
+  #      generated_resources_zip = _zip_archive
+  #      generating_target = ":my_resources__create_zip"
+  #   }
+  #
+  template("android_generated_resources") {
+    forward_variables_from(invoker, [ "testonly" ])
+    _build_config = "$target_gen_dir/${target_name}.build_config"
+    _rtxt_out_path = "$target_gen_dir/${target_name}.R.txt"
+    write_build_config("$target_name$build_config_target_suffix") {
+      forward_variables_from(invoker, [ "resource_overlay" ])
+
+      build_config = _build_config
+      resources_zip = invoker.generated_resources_zip
+      type = "android_resources"
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+      r_text = _rtxt_out_path
+    }
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      public_deps = [
+        ":$target_name$build_config_target_suffix",
+        invoker.generating_target,
+      ]
+      inputs = [ invoker.generated_resources_zip ]
+      outputs = [ _rtxt_out_path ]
+      script = "//build/android/gyp/create_r_txt.py"
+      args = [
+        "--resources-zip-path",
+        rebase_path(invoker.generated_resources_zip, root_build_dir),
+        "--rtxt-path",
+        rebase_path(_rtxt_out_path, root_build_dir),
+      ]
+    }
+  }
+
+  # Declare a target for processing Android resources as Jinja templates.
+  #
+  # This takes an Android resource directory where each resource is a Jinja
+  # template, processes each template, then packages the results in a zip file
+  # which can be consumed by an android resources, library, or apk target.
+  #
+  # If this target is included in the deps of an android resources/library/apk,
+  # the resources will be included with that target.
+  #
+  # Variables
+  #   resources: The list of resources files to process.
+  #   res_dir: The resource directory containing the resources.
+  #   variables: (Optional) A list of variables to make available to the template
+  #     processing environment, e.g. ["name=foo", "color=red"].
+  #
+  # Example
+  #   jinja_template_resources("chrome_public_template_resources") {
+  #     res_dir = "res_template"
+  #     resources = ["res_template/xml/syncable.xml"]
+  #     variables = ["color=red"]
+  #   }
+  template("jinja_template_resources") {
+    _resources_zip = "$target_out_dir/${target_name}.resources.zip"
+    _generating_target_name = "${target_name}__template"
+
+    action_with_pydeps(_generating_target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      inputs = invoker.resources
+      script = "//build/android/gyp/jinja_template.py"
+
+      outputs = [ _resources_zip ]
+
+      _rebased_resources = rebase_path(invoker.resources, root_build_dir)
+      args = [
+        "--inputs=${_rebased_resources}",
+        "--inputs-base-dir",
+        rebase_path(invoker.res_dir, root_build_dir),
+        "--outputs-zip",
+        rebase_path(_resources_zip, root_build_dir),
+        "--check-includes",
+      ]
+      if (defined(invoker.variables)) {
+        variables = invoker.variables
+        args += [ "--variables=${variables}" ]
+      }
+    }
+
+    android_generated_resources(target_name) {
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "deps",
+                                   "resource_overlay",
+                                 ])
+      generating_target = ":$_generating_target_name"
+      generated_resources_zip = _resources_zip
+    }
+  }
+
+  # Declare a prebuilt android native library.
+  #
+  # This takes a base directory and library name and then looks for the library
+  # in <base dir>/$android_app_abi/<library name>.
+  #
+  # If you depend on this target, the library is stripped and output to the
+  # same locations non-prebuilt libraries are output.
+  #
+  # Variables
+  #   base_dir: Directory where all ABIs of the library live.
+  #   library_name: Name of the library .so file.
+  #
+  # Example
+  #   android_native_prebuilt("elements_native") {
+  #     base_dir = "//third_party/elements"
+  #     lib_name = "elements.so"
+  #   }
+  template("android_native_prebuilt") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "testonly",
+                             ])
+      script = "//build/android/gyp/process_native_prebuilt.py"
+      _lib_path = "${invoker.base_dir}/$android_app_abi/${invoker.lib_name}"
+      _stripped_output_path = "$root_out_dir/${invoker.lib_name}"
+      _unstripped_output_path =
+          "$root_out_dir/lib.unstripped/${invoker.lib_name}"
+      inputs = [ _lib_path ]
+      outputs = [
+        _stripped_output_path,
+        _unstripped_output_path,
+      ]
+
+      # Add unstripped output to runtime deps for use by bots during stacktrace
+      # symbolization.
+      data = [ _unstripped_output_path ]
+
+      _rebased_lib_path = rebase_path(_lib_path, root_build_dir)
+      _rebased_stripped_ouput_path =
+          rebase_path(_stripped_output_path, root_build_dir)
+      _rebased_unstripped_ouput_path =
+          rebase_path(_unstripped_output_path, root_build_dir)
+      _strip_tool_path =
+          rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip",
+                      root_build_dir)
+
+      args = [
+        "--strip-path=$_strip_tool_path",
+        "--input-path=$_rebased_lib_path",
+        "--stripped-output-path=$_rebased_stripped_ouput_path",
+        "--unstripped-output-path=$_rebased_unstripped_ouput_path",
+      ]
+    }
+  }
+
+  # Declare an Android resources target
+  #
+  # This creates a resources zip file that will be used when building an Android
+  # library or apk and included into a final apk.
+  #
+  # To include these resources in a library/apk, this target should be listed in
+  # the library's deps. A library/apk will also include any resources used by its
+  # own dependencies.
+  #
+  # Variables
+  #   sources: List of resource files for this target.
+  #   deps: Specifies the dependencies of this target. Any Android resources
+  #     listed in deps will be included by libraries/apks that depend on this
+  #     target.
+  #   alternative_android_sdk_dep: Optional. Alternative Android system
+  #     android java target to use.
+  #   android_manifest: AndroidManifest.xml for this target (optional). Will be
+  #     merged into apks that directly or indirectly depend on this target.
+  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+  #   custom_package: java package for generated .java files.
+  #   shared_resources: If true make a resource package that can be loaded by a
+  #     different application at runtime to access the package's resources.
+  #   resource_overlay: Whether the resources in 'sources' should override
+  #     resources with the same name. Does not affect the behaviour of any
+  #     android_resources() deps of this target. If a target with
+  #     resource_overlay=true depends on another target with
+  #     resource_overlay=true the target with the dependency overrides the
+  #     other.
+  #   r_text_file: (optional) path to pre-generated R.txt to be used when
+  #     generating R.java instead of resource-based aapt-generated one.
+  #   recursive_resource_deps: (optional) whether deps should be walked
+  #     recursively to find resource deps.
+  #
+  # Example:
+  #   android_resources("foo_resources") {
+  #     deps = [":foo_strings_grd"]
+  #     sources = [
+  #       "res/drawable/foo1.xml",
+  #       "res/drawable/foo2.xml",
+  #     ]
+  #     custom_package = "org.chromium.foo"
+  #   }
+  #
+  #   android_resources("foo_resources_overrides") {
+  #     deps = [":foo_resources"]
+  #     sources = [
+  #       "res_overrides/drawable/foo1.xml",
+  #       "res_overrides/drawable/foo2.xml",
+  #     ]
+  #   }
+  template("android_resources") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _base_path = "$target_gen_dir/$target_name"
+    if (defined(invoker.v14_skip)) {
+      not_needed(invoker, [ "v14_skip" ])
+    }
+
+    assert(!defined(invoker.resource_dirs) || defined(invoker.sources),
+           "resource_dirs in android_resources is deprecated. Please use " +
+               "sources=[] and list resource files instead. Details: " +
+               "https://crbug.com/1026378")
+    _res_sources_path = "$target_gen_dir/${invoker.target_name}.res.sources"
+
+    _resources_zip = "$target_out_dir/$target_name.resources.zip"
+    _r_text_out_path = _base_path + "_R.txt"
+    _build_config = _base_path + ".build_config"
+    _build_config_target_name = "$target_name$build_config_target_suffix"
+
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps += invoker.deps
+    }
+
+    if (defined(invoker.alternative_android_sdk_dep)) {
+      _deps += [ invoker.alternative_android_sdk_dep ]
+    } else {
+      _deps += [ "//third_party/android_sdk:android_sdk_java" ]
+    }
+
+    _resource_files = []
+    if (defined(invoker.sources)) {
+      _resource_files += invoker.sources
+    }
+
+    _rebased_resource_files = rebase_path(_resource_files, root_build_dir)
+    write_file(_res_sources_path, _rebased_resource_files)
+
+    # This is necessary so we only lint chromium resources.
+    if (defined(invoker.chromium_code)) {
+      _chromium_code = invoker.chromium_code
+    } else {
+      # Default based on whether target is in third_party.
+      _chromium_code =
+          filter_exclude([ get_label_info(":$target_name", "dir") ],
+                         [ "*\bthird_party\b*" ]) != []
+    }
+
+    write_build_config(_build_config_target_name) {
+      type = "android_resources"
+      build_config = _build_config
+      resources_zip = _resources_zip
+      res_sources_path = _res_sources_path
+      chromium_code = _chromium_code
+
+      forward_variables_from(invoker,
+                             [
+                               "android_manifest",
+                               "android_manifest_dep",
+                               "custom_package",
+                               "mergeable_android_manifests",
+                               "resource_overlay",
+                               "recursive_resource_deps",
+                             ])
+
+      r_text = _r_text_out_path
+      possible_config_deps = _deps
+
+      # Always merge manifests from resources.
+      # * Might want to change this at some point for consistency and clarity,
+      #   but keeping for backwards-compatibility.
+      if (!defined(mergeable_android_manifests) && defined(android_manifest)) {
+        mergeable_android_manifests = [ android_manifest ]
+      }
+    }
+
+    prepare_resources(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "strip_drawables",
+                               "visibility",
+                             ])
+      deps = _deps
+
+      res_sources_path = _res_sources_path
+      sources = _resource_files
+
+      resources_zip = _resources_zip
+      r_text_out_path = _r_text_out_path
+
+      if (defined(invoker.r_text_file)) {
+        r_text_in_path = invoker.r_text_file
+      }
+    }
+  }
+
+  # Declare an Android assets target.
+  #
+  # Defines a set of files to include as assets in a dependent apk.
+  #
+  # To include these assets in an apk, this target should be listed in
+  # the apk's deps, or in the deps of a library target used by an apk.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Any Android assets
+  #     listed in deps will be included by libraries/apks that depend on this
+  #     target.
+  #   sources: List of files to include as assets.
+  #   renaming_sources: List of files to include as assets and be renamed.
+  #   renaming_destinations: List of asset paths for files in renaming_sources.
+  #   disable_compression: Whether to disable compression for files that are
+  #     known to be compressable (default: false).
+  #   treat_as_locale_paks: Causes base's BuildConfig.java to consider these
+  #     assets to be locale paks.
+  #
+  # Example:
+  # android_assets("content_shell_assets") {
+  #   deps = [
+  #     ":generates_foo",
+  #     ":other_assets",
+  #     ]
+  #   sources = [
+  #     "//path/asset1.png",
+  #     "//path/asset2.png",
+  #     "$target_gen_dir/foo.dat",
+  #   ]
+  # }
+  #
+  # android_assets("overriding_content_shell_assets") {
+  #   deps = [ ":content_shell_assets" ]
+  #   # Override foo.dat from content_shell_assets.
+  #   sources = [ "//custom/foo.dat" ]
+  #   renaming_sources = [ "//path/asset2.png" ]
+  #   renaming_destinations = [ "renamed/asset2.png" ]
+  # }
+  template("android_assets") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "$target_name$build_config_target_suffix"
+
+    write_build_config(_build_config_target_name) {
+      type = "android_assets"
+      build_config = _build_config
+
+      forward_variables_from(invoker,
+                             [
+                               "disable_compression",
+                               "treat_as_locale_paks",
+                             ])
+
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+
+      if (defined(invoker.sources)) {
+        asset_sources = invoker.sources
+      }
+      if (defined(invoker.renaming_sources)) {
+        assert(defined(invoker.renaming_destinations))
+        _source_count = 0
+        foreach(_, invoker.renaming_sources) {
+          _source_count += 1
+        }
+        _dest_count = 0
+        foreach(_, invoker.renaming_destinations) {
+          _dest_count += 1
+        }
+        assert(
+            _source_count == _dest_count,
+            "android_assets() renaming_sources.length != renaming_destinations.length")
+        asset_renaming_sources = invoker.renaming_sources
+        asset_renaming_destinations = invoker.renaming_destinations
+      }
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker, [ "deps" ])
+      public_deps = [ ":$_build_config_target_name" ]
+    }
+  }
+
+  # Declare a group() that supports forwarding java dependency information.
+  #
+  # Example
+  #  java_group("conditional_deps") {
+  #    if (enable_foo) {
+  #      deps = [":foo_java"]
+  #    }
+  #  }
+  template("java_group") {
+    _build_config_vars = [
+      "input_jars_paths",
+      "mergeable_android_manifests",
+      "proguard_configs",
+    ]
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    write_build_config("$target_name$build_config_target_suffix") {
+      forward_variables_from(invoker, _build_config_vars)
+      type = "group"
+      build_config = "$target_gen_dir/${invoker.target_name}.build_config"
+      supports_android = true
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+    }
+    foreach(_group_name,
+            [
+              "header",
+              "impl",
+              "assetres",
+            ]) {
+      java_lib_group("${target_name}__${_group_name}") {
+        forward_variables_from(invoker, [ "deps" ])
+        group_name = _group_name
+      }
+    }
+    group(target_name) {
+      forward_variables_from(invoker,
+                             "*",
+                             _build_config_vars + TESTONLY_AND_VISIBILITY)
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$target_name$build_config_target_suffix" ]
+    }
+  }
+
+  # Declare a Java executable target
+  #
+  # Same as java_library, but also creates a wrapper script within
+  # $root_out_dir/bin.
+  #
+  # Supports all variables of java_library(), plus:
+  #   main_class: When specified, a wrapper script is created within
+  #     $root_build_dir/bin to launch the binary with the given class as the
+  #     entrypoint.
+  #   wrapper_script_name: Filename for the wrapper script (default=target_name)
+  #   wrapper_script_args: List of additional arguments for the wrapper script.
+  #
+  # Example
+  #   java_binary("foo") {
+  #     sources = [ "org/chromium/foo/FooMain.java" ]
+  #     deps = [ ":bar_java" ]
+  #     main_class = "org.chromium.foo.FooMain"
+  #   }
+  #
+  #   java_binary("foo") {
+  #     jar_path = "lib/prebuilt.jar"
+  #     deps = [ ":bar_java" ]
+  #     main_class = "org.chromium.foo.FooMain"
+  #   }
+  template("java_binary") {
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      type = "java_binary"
+      if (!defined(data_deps)) {
+        data_deps = []
+      }
+      data_deps += [ "//third_party/jdk:java_data" ]
+    }
+  }
+
+  # Declare a Java Annotation Processor.
+  #
+  # Supports all variables of java_library(), plus:
+  #   jar_path: Path to a prebuilt jar. Mutually exclusive with sources &
+  #     srcjar_deps.
+  #   main_class: The fully-quallified class name of the processor's entry
+  #       point.
+  #
+  # Example
+  #   java_annotation_processor("foo_processor") {
+  #     sources = [ "org/chromium/foo/FooProcessor.java" ]
+  #     deps = [ ":bar_java" ]
+  #     main_class = "org.chromium.foo.FooProcessor"
+  #   }
+  #
+  #   java_annotation_processor("foo_processor") {
+  #     jar_path = "lib/prebuilt.jar"
+  #     main_class = "org.chromium.foo.FooMain"
+  #   }
+  #
+  #   java_library("...") {
+  #     annotation_processor_deps = [":foo_processor"]
+  #   }
+  #
+  template("java_annotation_processor") {
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      type = "java_annotation_processor"
+    }
+  }
+
+  # Declare a Junit executable target
+  #
+  # This target creates an executable from java code for running as a junit test
+  # suite. The executable will be in the output folder's /bin/ directory.
+  #
+  # Supports all variables of java_binary().
+  #
+  # Example
+  #   junit_binary("foo") {
+  #     sources = [ "org/chromium/foo/FooTest.java" ]
+  #     deps = [ ":bar_java" ]
+  #   }
+  template("junit_binary") {
+    testonly = true
+
+    _java_binary_target_name = "${target_name}__java_binary"
+    _test_runner_target_name = "${target_name}__test_runner_script"
+    _main_class = "org.chromium.testing.local.JunitTestMain"
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "$target_name$build_config_target_suffix"
+    _deps = [
+      "//testing/android/junit:junit_test_support",
+      "//third_party/android_deps:robolectric_all_java",
+      "//third_party/junit",
+      "//third_party/mockito:mockito_java",
+    ]
+    if (defined(invoker.deps)) {
+      _deps += invoker.deps
+    }
+    if (defined(invoker.alternative_android_sdk_dep)) {
+      _android_sdk_dep = invoker.alternative_android_sdk_dep
+    } else {
+      _android_sdk_dep = "//third_party/android_sdk:android_sdk_java"
+    }
+
+    # a package name or a manifest is required to have resources. This is
+    # added so that junit tests that do not care about the package name can
+    # still use resources without having to explicitly set one.
+    if (defined(invoker.package_name)) {
+      _package_name = invoker.package_name
+    } else if (!defined(invoker.android_manifest)) {
+      _package_name = "org.chromium.test"
+    }
+
+    _resource_arsc_output = "${target_gen_dir}/${target_name}.ap_"
+    _compile_resources_target = "${target_name}__compile_resources"
+    compile_resources(_compile_resources_target) {
+      forward_variables_from(invoker, [ "android_manifest" ])
+      deps = _deps
+      android_sdk_dep = _android_sdk_dep
+      build_config_dep = ":$_build_config_target_name"
+      build_config = _build_config
+      if (defined(_package_name)) {
+        rename_manifest_package = _package_name
+      }
+      if (!defined(android_manifest)) {
+        android_manifest = "//build/android/AndroidManifest.xml"
+      }
+      arsc_output = _resource_arsc_output
+      min_sdk_version = default_min_sdk_version
+      target_sdk_version = android_sdk_version
+    }
+
+    _jni_srcjar_target = "${target_name}__final_jni"
+    _outer_target_name = target_name
+    generate_jni_registration(_jni_srcjar_target) {
+      enable_native_mocks = true
+      require_native_mocks = true
+      targets = [ ":$_outer_target_name" ]
+    }
+
+    java_library_impl(_java_binary_target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY + [ "deps" ])
+      type = "junit_binary"
+      main_target_name = invoker.target_name
+
+      # Include the android SDK jar(s) for resource processing.
+      include_android_sdk = true
+
+      # Robolectric can handle deps that set !supports_android as well those
+      # that set requires_android.
+      bypass_platform_checks = true
+      deps = _deps
+      testonly = true
+      main_class = _main_class
+      wrapper_script_name = "helper/$main_target_name"
+
+      # As of April 2021, adding -XX:TieredStopAtLevel=1 does not affect the
+      # wall time of a single robolectric shard, but does reduce the CPU time by
+      # 66%, which makes sharding more effective.
+      tiered_stop_at_level_one = true
+
+      if (!defined(srcjar_deps)) {
+        srcjar_deps = []
+      }
+      srcjar_deps += [
+        ":$_compile_resources_target",
+        ":$_jni_srcjar_target",
+
+        # This dep is required for any targets that depend on //base:base_java.
+        "//build/android:build_config_gen",
+      ]
+    }
+
+    test_runner_script(_test_runner_target_name) {
+      test_name = invoker.target_name
+      test_suite = invoker.target_name
+      test_type = "junit"
+      ignore_all_data_deps = true
+      resource_apk = _resource_arsc_output
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "assert_no_deps",
+                               "visibility",
+                             ])
+      public_deps = [
+        ":$_build_config_target_name",
+        ":$_java_binary_target_name",
+        ":$_test_runner_target_name",
+      ]
+    }
+  }
+
+  # Declare a java library target
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be added to the javac classpath.
+  #   public_deps: Dependencies that this target exposes as part of its public API.
+  #     public_deps do not need to be listed in both the 'deps' and 'public_deps' lists.
+  #   annotation_processor_deps: List of java_annotation_processor targets to
+  #     use when compiling.
+  #
+  #   jar_path: Path to a prebuilt jar. Mutually exclusive with sources &
+  #     srcjar_deps.
+  #   sources: List of .java files included in this library.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #     will be added to sources and be included in this library.
+  #
+  #   input_jars_paths: A list of paths to the jars that should be included
+  #     in the compile-time classpath. These are in addition to library .jars
+  #     that appear in deps.
+  #
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #   enable_errorprone: If true, enables the errorprone compiler.
+  #   skip_build_server: If true, avoids sending tasks to the build server.
+  #
+  #   jar_excluded_patterns: List of patterns of .class files to exclude.
+  #   jar_included_patterns: List of patterns of .class files to include.
+  #     When omitted, all classes not matched by jar_excluded_patterns are
+  #     included. When specified, all non-matching .class files are stripped.
+  #
+  #   low_classpath_priority: Indicates that the library should be placed at the
+  #     end of the classpath. The default classpath order has libraries ordered
+  #     before the libraries that they depend on. 'low_classpath_priority' is
+  #     useful when one java_library() overrides another via
+  #     'jar_excluded_patterns' and the overriding library does not depend on
+  #     the overridee.
+  #
+  #   output_name: File name for the output .jar (not including extension).
+  #     Defaults to the input .jar file name.
+  #
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #
+  #   supports_android: If true, Android targets (android_library, android_apk)
+  #     may depend on this target. Note: if true, this target must only use the
+  #     subset of Java available on Android.
+  #   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+  #     dependencies for this target. This will allow depending on an
+  #     android_library target, for example.
+  #   enable_desugar: If false, disables desugaring of lambdas, etc. Use this
+  #     only when you are sure the library does not require desugaring. E.g.
+  #     to hide warnings shown from desugaring.
+  #
+  #   additional_jar_files: Use to package additional files (Java resources)
+  #     into the output jar. Pass a list of length-2 lists with format:
+  #         [ [ path_to_file, path_to_put_in_jar ] ]
+  #
+  #   javac_args: Additional arguments to pass to javac.
+  #   errorprone_args: Additional arguments to pass to errorprone.
+  #
+  #   data_deps, testonly
+  #
+  # Example
+  #   java_library("foo_java") {
+  #     sources = [
+  #       "org/chromium/foo/Foo.java",
+  #       "org/chromium/foo/FooInterface.java",
+  #       "org/chromium/foo/FooService.java",
+  #     ]
+  #     deps = [
+  #       ":bar_java"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     jar_excluded_patterns = [
+  #       "*/FooService.class", "org/chromium/FooService\$*.class"
+  #     ]
+  #   }
+  template("java_library") {
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      type = "java_library"
+    }
+  }
+
+  # Declare a java library target for a prebuilt jar
+  #
+  # Supports all variables of java_library().
+  #
+  # Example
+  #   java_prebuilt("foo_java") {
+  #     jar_path = "foo.jar"
+  #     deps = [
+  #       ":foo_resources",
+  #       ":bar_java"
+  #     ]
+  #   }
+  template("java_prebuilt") {
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      type = "java_library"
+    }
+  }
+
+  # Combines all dependent .jar files into a single .jar file.
+  #
+  # Variables:
+  #   output: Path to the output jar.
+  #   override_build_config: Use a pre-existing .build_config. Must be of type
+  #     "apk".
+  #   use_interface_jars: Use all dependent interface .jars rather than
+  #     implementation .jars.
+  #   use_unprocessed_jars: Use unprocessed / undesugared .jars.
+  #   direct_deps_only: Do not recurse on deps.
+  #   jar_excluded_patterns (optional)
+  #     List of globs for paths to exclude.
+  #
+  # Example
+  #   dist_jar("lib_fatjar") {
+  #     deps = [ ":my_java_lib" ]
+  #     output = "$root_build_dir/MyLibrary.jar"
+  #   }
+  template("dist_jar") {
+    # TODO(crbug.com/1042017): Remove.
+    not_needed(invoker, [ "no_build_hooks" ])
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    _supports_android =
+        !defined(invoker.supports_android) || invoker.supports_android
+    _use_interface_jars =
+        defined(invoker.use_interface_jars) && invoker.use_interface_jars
+    _use_unprocessed_jars =
+        defined(invoker.use_unprocessed_jars) && invoker.use_unprocessed_jars
+    _direct_deps_only =
+        defined(invoker.direct_deps_only) && invoker.direct_deps_only
+    assert(!(_use_unprocessed_jars && _use_interface_jars),
+           "Cannot set both use_interface_jars and use_unprocessed_jars")
+
+    _jar_target_name = target_name
+
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps = invoker.deps
+    }
+    if (_supports_android) {
+      _deps += [ "//third_party/android_sdk:android_sdk_java" ]
+    }
+
+    if (defined(invoker.override_build_config)) {
+      _build_config = invoker.override_build_config
+    } else {
+      _build_config = "$target_gen_dir/$target_name.build_config"
+      _build_config_target_name = "$target_name$build_config_target_suffix"
+
+      write_build_config(_build_config_target_name) {
+        type = "dist_jar"
+        supports_android = _supports_android
+        requires_android =
+            defined(invoker.requires_android) && invoker.requires_android
+        possible_config_deps = _deps
+        ignore_dependency_public_deps = _direct_deps_only
+        build_config = _build_config
+      }
+
+      _deps += [ ":$_build_config_target_name" ]
+    }
+
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    action_with_pydeps(_jar_target_name) {
+      forward_variables_from(invoker, [ "data" ])
+      script = "//build/android/gyp/zip.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      deps = _deps
+
+      inputs = [ _build_config ]
+
+      outputs = [ invoker.output ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+        "--no-compress",
+      ]
+
+      if (_direct_deps_only) {
+        if (_use_interface_jars) {
+          args += [ "--input-zips=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
+        } else if (_use_unprocessed_jars) {
+          args += [
+            "--input-zips=@FileArg($_rebased_build_config:javac:classpath)",
+          ]
+        } else {
+          assert(
+              false,
+              "direct_deps_only does not work without use_interface_jars or use_unprocessed_jars")
+        }
+      } else {
+        if (_use_interface_jars) {
+          args += [ "--input-zips=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)" ]
+        } else if (_use_unprocessed_jars) {
+          args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
+        } else {
+          args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:device_classpath)" ]
+        }
+      }
+      _excludes = []
+      if (defined(invoker.jar_excluded_patterns)) {
+        _excludes += invoker.jar_excluded_patterns
+      }
+      if (_use_interface_jars) {
+        # Turbine adds files like: META-INF/TRANSITIVE/.../Foo.class
+        # These confuse proguard: https://crbug.com/1081443
+        _excludes += [ "META-INF/*" ]
+      }
+      if (_excludes != []) {
+        args += [ "--input-zips-excluded-globs=$_excludes" ]
+      }
+    }
+  }
+
+  # Combines all dependent .jar files into a single proguarded .dex file.
+  #
+  # Variables:
+  #   output: Path to the output dex.
+  #   proguard_enabled: Whether to enable R8.
+  #   proguard_configs: List of proguard configs.
+  #   proguard_enable_obfuscation: Whether to enable obfuscation (default=true).
+  #
+  # Example
+  #   dist_dex("lib_fatjar") {
+  #     deps = [ ":my_java_lib" ]
+  #     output = "$root_build_dir/MyLibrary.jar"
+  #   }
+  template("dist_dex") {
+    _deps = [ "//third_party/android_sdk:android_sdk_java" ]
+    if (defined(invoker.deps)) {
+      _deps += invoker.deps
+    }
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "$target_name$build_config_target_suffix"
+
+    write_build_config(_build_config_target_name) {
+      type = "dist_jar"
+      forward_variables_from(invoker,
+                             [
+                               "proguard_configs",
+                               "proguard_enabled",
+                             ])
+      supports_android = true
+      requires_android = true
+      possible_config_deps = _deps
+      build_config = _build_config
+    }
+
+    _deps += [ ":$_build_config_target_name" ]
+
+    dex(target_name) {
+      forward_variables_from(invoker,
+                             TESTONLY_AND_VISIBILITY + [
+                                   "data",
+                                   "data_deps",
+                                   "proguard_configs",
+                                   "proguard_enabled",
+                                   "proguard_enable_obfuscation",
+                                   "min_sdk_version",
+                                 ])
+      deps = _deps
+      build_config = _build_config
+      enable_multidex = false
+      output = invoker.output
+      if (defined(proguard_enabled) && proguard_enabled) {
+        # The individual dependencies would have caught real missing deps in
+        # their respective dex steps. False positives that were suppressed at
+        # per-target dex steps are emitted here since this is using jar files
+        # rather than dex files.
+        ignore_desugar_missing_deps = true
+
+        # When trying to build a stand-alone .dex, don't add in jdk_libs_dex.
+        supports_jdk_library_desugaring = false
+      } else {
+        _rebased_build_config = rebase_path(_build_config, root_build_dir)
+        input_dex_filearg =
+            "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)"
+      }
+    }
+  }
+
+  # Creates an Android .aar library.
+  #
+  # Currently supports:
+  #   * AndroidManifest.xml
+  #   * classes.jar
+  #   * jni/
+  #   * res/
+  #   * R.txt
+  #   * proguard.txt
+  # Does not yet support:
+  #   * public.txt
+  #   * annotations.zip
+  #   * assets/
+  # See: https://developer.android.com/studio/projects/android-library.html#aar-contents
+  #
+  # Variables:
+  #   output: Path to the output .aar.
+  #   proguard_configs: List of proguard configs (optional).
+  #   android_manifest: Path to AndroidManifest.xml (optional).
+  #   native_libraries: list of native libraries (optional).
+  #   direct_deps_only: Do not recurse on deps. (optional, defaults false).
+  #   jar_excluded_patterns (optional): List of globs for paths to exclude.
+  #   jar_included_patterns (optional): List of globs for paths to include.
+  #
+  # Example
+  #   dist_aar("my_aar") {
+  #     deps = [ ":my_java_lib" ]
+  #     output = "$root_build_dir/MyLibrary.aar"
+  #   }
+  template("dist_aar") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps = invoker.deps
+    }
+
+    _direct_deps_only =
+        defined(invoker.direct_deps_only) && invoker.direct_deps_only
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "$target_name$build_config_target_suffix"
+
+    write_build_config(_build_config_target_name) {
+      type = "dist_aar"
+      forward_variables_from(invoker, [ "proguard_configs" ])
+      possible_config_deps = _deps
+      supports_android = true
+      requires_android = true
+      ignore_dependency_public_deps = _direct_deps_only
+      build_config = _build_config
+    }
+
+    _deps += [ ":$_build_config_target_name" ]
+
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, [ "data" ])
+      depfile = "$target_gen_dir/$target_name.d"
+      deps = _deps
+      script = "//build/android/gyp/dist_aar.py"
+
+      inputs = [ _build_config ]
+
+      # Although these will be listed as deps in the depfile, they must also
+      # appear here so that "gn analyze" knows about them.
+      # https://crbug.com/827197
+      if (defined(invoker.proguard_configs)) {
+        inputs += invoker.proguard_configs
+      }
+
+      outputs = [ invoker.output ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+        "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)",
+        "--r-text-files=@FileArg($_rebased_build_config:deps_info:dependency_r_txt_files)",
+        "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
+      ]
+      if (_direct_deps_only) {
+        args += [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ]
+      } else {
+        args += [ "--jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
+      }
+      if (defined(invoker.android_manifest)) {
+        args += [
+          "--android-manifest",
+          rebase_path(invoker.android_manifest, root_build_dir),
+        ]
+      }
+      if (defined(invoker.native_libraries) && invoker.native_libraries != []) {
+        inputs += invoker.native_libraries
+        _rebased_native_libraries =
+            rebase_path(invoker.native_libraries, root_build_dir)
+
+        args += [
+          "--native-libraries=$_rebased_native_libraries",
+          "--abi=$android_app_abi",
+        ]
+      }
+      if (defined(invoker.jar_excluded_patterns)) {
+        args += [ "--jar-excluded-globs=${invoker.jar_excluded_patterns}" ]
+      }
+      if (defined(invoker.jar_included_patterns)) {
+        args += [ "--jar-included-globs=${invoker.jar_included_patterns}" ]
+      }
+      if (defined(invoker.resource_included_patterns)) {
+        args += [
+          "--resource-included-globs=${invoker.resource_included_patterns}",
+        ]
+      }
+    }
+  }
+
+  # Declare an Android library target
+  #
+  # This target creates an Android library containing java code and Android
+  # resources.
+  #
+  # Supports all variables of java_library(), plus:
+  #   deps: In addition to defining java deps, this can also include
+  #     android_assets() and android_resources() targets.
+  #   alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
+  #     replaces the default android_sdk_ijar.
+  #   alternative_android_sdk_ijar_dep: the target that generates
+  #      alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
+  #      is used.
+  #   alternative_android_sdk_jar: actual jar corresponding to
+  #      alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
+  #      is used.
+  #
+  # Example
+  #   android_library("foo_java") {
+  #     sources = [
+  #       "android/org/chromium/foo/Foo.java",
+  #       "android/org/chromium/foo/FooInterface.java",
+  #       "android/org/chromium/foo/FooService.java",
+  #     ]
+  #     deps = [
+  #       ":bar_java"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     jar_excluded_patterns = [
+  #       "*/FooService.class", "org/chromium/FooService\$*.class"
+  #     ]
+  #   }
+  template("android_library") {
+    java_library(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+      supports_android = true
+      requires_android = true
+
+      if (!defined(jar_excluded_patterns)) {
+        jar_excluded_patterns = []
+      }
+      jar_excluded_patterns += [
+        "*/R.class",
+        "*/R\$*.class",
+        "*/Manifest.class",
+        "*/Manifest\$*.class",
+        "*/GEN_JNI.class",
+      ]
+    }
+  }
+
+  # Declare an Android library target for a prebuilt jar
+  #
+  # This target creates an Android library containing java code and Android
+  # resources.
+  #
+  # Supports all variables of android_library().
+  #
+  # Example
+  #   android_java_prebuilt("foo_java") {
+  #     jar_path = "foo.jar"
+  #     deps = [
+  #       ":foo_resources",
+  #       ":bar_java"
+  #     ]
+  #   }
+  template("android_java_prebuilt") {
+    android_library(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    }
+  }
+
+  template("android_system_java_prebuilt") {
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      supports_android = true
+      type = "system_java_library"
+    }
+  }
+
+  # Creates org/chromium/build/BuildConfig.java
+  # This doesn't really belong in //build since it genates a file for //base.
+  # However, we don't currently have a better way to include this file in all
+  # apks that depend on //base:base_java.
+  #
+  # Variables:
+  #   use_final_fields: True to use final fields. When false, all other
+  #       variables must not be set.
+  #   enable_multidex: Value for ENABLE_MULTIDEX.
+  #   min_sdk_version: Value for MIN_SDK_VERSION.
+  #   bundles_supported: Whether or not this target can be treated as a bundle.
+  #   resources_version_variable:
+  #   is_incremental_install:
+  #   isolated_splits_enabled: Value for ISOLATED_SPLITS_ENABLED.
+  template("generate_build_config_srcjar") {
+    java_cpp_template(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      sources = [ "//build/android/java/templates/BuildConfig.template" ]
+      defines = []
+
+      # Set these even when !use_final_fields so that they have correct default
+      # values within junit_binary(), which ignores jar_excluded_patterns.
+      if (enable_java_asserts) {
+        defines += [ "_ENABLE_ASSERTS" ]
+      }
+      if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
+        defines += [ "_IS_UBSAN" ]
+      }
+
+      if (is_chrome_branded) {
+        defines += [ "_IS_CHROME_BRANDED" ]
+      }
+
+      if (is_chromecast && chromecast_branding == "internal") {
+        defines += [ "_IS_CHROMECAST_BRANDING_INTERNAL" ]
+      }
+
+      if (defined(invoker.bundles_supported) && invoker.bundles_supported) {
+        defines += [ "_BUNDLES_SUPPORTED" ]
+      }
+
+      if (defined(invoker.isolated_splits_enabled) &&
+          invoker.isolated_splits_enabled) {
+        defines += [ "_ISOLATED_SPLITS_ENABLED" ]
+      }
+
+      if (defined(invoker.is_incremental_install) &&
+          invoker.is_incremental_install) {
+        defines += [ "_IS_INCREMENTAL_INSTALL" ]
+      }
+
+      if (invoker.use_final_fields) {
+        forward_variables_from(invoker, [ "deps" ])
+        defines += [ "USE_FINAL" ]
+        if (invoker.enable_multidex) {
+          defines += [ "ENABLE_MULTIDEX" ]
+        }
+        if (defined(invoker.min_sdk_version)) {
+          defines += [ "_MIN_SDK_VERSION=${invoker.min_sdk_version}" ]
+        }
+        if (defined(invoker.resources_version_variable)) {
+          defines += [
+            "_RESOURCES_VERSION_VARIABLE=${invoker.resources_version_variable}",
+          ]
+        }
+      }
+    }
+  }
+
+  # Creates ProductConfig.java, a file containing product-specific configuration.
+  #
+  # Currently, this includes the list of locales, both in their compressed and
+  # uncompressed format, as well as library loading
+  #
+  # Variables:
+  #   build_config: Path to build_config used for locale lists.
+  #   is_bundle_module: Whether or not this target is part of a bundle build.
+  #   java_package: Java package for the generated class.
+  #   use_chromium_linker:
+  #   use_modern_linker:
+  template("generate_product_config_srcjar") {
+    java_cpp_template(target_name) {
+      defines = []
+      _use_final =
+          defined(invoker.build_config) ||
+          defined(invoker.use_chromium_linker) ||
+          defined(invoker.use_modern_linker) || defined(invoker.is_bundle)
+      if (_use_final) {
+        defines += [ "USE_FINAL" ]
+      }
+
+      sources = [ "//build/android/java/templates/ProductConfig.template" ]
+      defines += [ "PACKAGE=${invoker.java_package}" ]
+
+      _use_chromium_linker =
+          defined(invoker.use_chromium_linker) && invoker.use_chromium_linker
+      _use_modern_linker =
+          defined(invoker.use_modern_linker) && invoker.use_modern_linker
+      _is_bundle = defined(invoker.is_bundle_module) && invoker.is_bundle_module
+      defines += [
+        "USE_CHROMIUM_LINKER_VALUE=$_use_chromium_linker",
+        "USE_MODERN_LINKER_VALUE=$_use_modern_linker",
+        "IS_BUNDLE_VALUE=$_is_bundle",
+      ]
+      if (defined(invoker.build_config)) {
+        forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+        _rebased_build_config =
+            rebase_path(invoker.build_config, root_build_dir)
+        defines += [ "LOCALE_LIST=@FileArg($_rebased_build_config:deps_info:locales_java_list)" ]
+      }
+    }
+  }
+
+  # Declare an Android app module target, which is used as the basis for an
+  # Android APK or an Android app bundle module.
+  #
+  # Supports all variables of android_library(), plus:
+  #   android_manifest: Path to AndroidManifest.xml. NOTE: This manifest must
+  #     not contain a <uses-sdk> element. Use [min|target|max]_sdk_version
+  #     instead.
+  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+  #   png_to_webp: If true, pngs (with the exception of 9-patch) are
+  #     converted to webp during resource packaging.
+  #   loadable_modules: List of paths to native libraries to include. Different
+  #     from |shared_libraries| in that:
+  #       * dependencies of this .so are not automatically included
+  #       * ".cr.so" is never added
+  #       * they are not side-loaded when incremental_install=true.
+  #       * load_library_from_apk, use_chromium_linker,
+  #         and enable_relocation_packing do not apply
+  #     Use this instead of shared_libraries when you are going to load the library
+  #     conditionally, and only when shared_libraries doesn't work for you.
+  #   secondary_abi_loadable_modules: This is the loadable_modules analog to
+  #     secondary_abi_shared_libraries.
+  #   shared_libraries: List shared_library targets to bundle. If these
+  #     libraries depend on other shared_library targets, those dependencies will
+  #     also be included in the apk (e.g. for is_component_build).
+  #   secondary_abi_shared_libraries: secondary abi shared_library targets to
+  #     bundle. If these libraries depend on other shared_library targets, those
+  #     dependencies will also be included in the apk (e.g. for is_component_build).
+  #   native_lib_placeholders: List of placeholder filenames to add to the apk
+  #     (optional).
+  #   secondary_native_lib_placeholders: List of placeholder filenames to add to
+  #     the apk for the secondary ABI (optional).
+  #   generate_buildconfig_java: If defined and false, skip generating the
+  #     BuildConfig java class describing the build configuration. The default
+  #     is true when building with Chromium for non-test APKs.
+  #   generate_final_jni: If defined and false, skip generating the
+  #     GEN_JNI srcjar.
+  #   jni_registration_header: If specified, causes the
+  #     ${target_name}__final_jni target to additionally output a
+  #     header file to this path for use with manual JNI registration.
+  #   jni_sources_exclusions: List of source path to exclude from the
+  #     final_jni step.
+  #   aapt_locale_allowlist: If set, all locales not in this list will be
+  #     stripped from resources.arsc.
+  #   resource_exclusion_regex: Causes all drawable images matching the regex to
+  #     be excluded (mipmaps are still included).
+  #   resource_exclusion_exceptions: A list of globs used when
+  #     resource_exclusion_regex is set. Files that match this list will
+  #     still be included.
+  #   resource_values_filter_rules: List of "source_path:name_regex" used to
+  #     filter out unwanted values/ resources.
+  #   shared_resources: True if this is a runtime shared library APK, like
+  #     the system_webview_apk target. Ensures that its resources can be
+  #     used by the loading application process.
+  #   app_as_shared_lib: True if this is a regular application apk that can
+  #     also serve as a runtime shared library, like the monochrome_public_apk
+  #     target. Ensures that the resources are usable both by the APK running
+  #     as an application, or by another process that loads it at runtime.
+  #   shared_resources_allowlist_target: Optional name of a target specifying
+  #     an input R.txt file that lists the resources that can be exported
+  #     by the APK when shared_resources or app_as_shared_lib is defined.
+  #   uncompress_shared_libraries: True if shared libraries should be stored
+  #     uncompressed in the APK. Must be unset or true if load_library_from_apk
+  #     is set to true.
+  #   uncompress_dex: Store final .dex files uncompressed in the apk.
+  #   strip_resource_names: True if resource names should be stripped from the
+  #     resources.arsc file in the apk or module.
+  #   short_resource_paths: True if resource paths should be shortened in the
+  #     apk or module.
+  #   resources_config_paths: List of paths to the aapt2 optimize config files
+  #     that tags resources with acceptable/non-acceptable optimizations.
+  #   expected_android_manifest: Enables verification of expected merged
+  #     manifest based on a golden file.
+  #   resource_ids_provider_dep: If passed, this target will use the resource
+  #     IDs generated by {resource_ids_provider_dep}__compile_res during
+  #     resource compilation.
+  #   enforce_resource_overlays_in_tests: Enables check for testonly targets that
+  #     dependent resource targets which override another target set
+  #     overlay_resources=true. This check is on for non-test targets and
+  #     cannot be disabled.
+  #   static_library_dependent_targets: A list of scopes describing targets that
+  #     use this target as a static library. Common Java code from the targets
+  #     listed in static_library_dependent_targets will be moved into this
+  #     target. Scope members are name and is_resource_ids_provider.
+  #   static_library_provider: Specifies a single target that this target will
+  #     use as a static library APK.
+  #   static_library_synchronized_proguard: When proguard is enabled, the
+  #     static_library_provider target will provide the dex file(s) for this
+  #     target.
+  #   min_sdk_version: The minimum Android SDK version this target supports.
+  #     Optional, default $default_min_sdk_version.
+  #   target_sdk_version: The target Android SDK version for this target.
+  #     Optional, default to android_sdk_version.
+  #   max_sdk_version: The maximum Android SDK version this target supports.
+  #     Optional, default not set.
+  #   require_native_mocks: Enforce that any native calls using
+  #     org.chromium.base.annotations.NativeMethods must have a mock set
+  #     (optional).
+  #   enable_native_mocks: Allow native calls using
+  #     org.chromium.base.annotations.NativeMethods to be mocked in tests
+  #     (optional).
+  #   product_config_java_packages: Optional list of java packages. If given, a
+  #     ProductConfig.java file will be generated for each package.
+  #   enable_proguard_checks: Turns on -checkdiscard directives and missing
+  #     symbols check in the proguard step (default=true).
+  #   disable_r8_outlining: Turn off outlining during the proguard step.
+  #   annotation_processor_deps: List of java_annotation_processor targets to
+  #     use when compiling the sources given to this target (optional).
+  #   processor_args_javac: List of args to pass to annotation processors when
+  #     compiling sources given to this target (optional).
+  #   bundles_supported: Enable Java code to treat this target as a bundle
+  #     whether (by default determined by the target type).
+  #   main_component_library: Specifies the name of the base component's library
+  #     in a component build. If given, the system will find dependent native
+  #     libraries at runtime by inspecting this library (optional).
+  #   expected_libs_and_assets: Verify the list of included native libraries
+  #     and assets is consistent with the given expectation file.
+  #   expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff
+  #     with this file as the base.
+  #   expected_proguard_config: Checks that the merged set of proguard flags
+  #     matches the given config.
+  #   expected_proguard_config_base: Treat expected_proguard_config as a diff
+  #     with this file as the base.
+  template("android_apk_or_module") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    assert(defined(invoker.android_manifest))
+    _base_path = "$target_out_dir/$target_name/$target_name"
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target = "$target_name$build_config_target_suffix"
+
+    _min_sdk_version = default_min_sdk_version
+    _target_sdk_version = android_sdk_version
+    if (defined(invoker.min_sdk_version)) {
+      _min_sdk_version = invoker.min_sdk_version
+    }
+    if (defined(invoker.target_sdk_version)) {
+      _target_sdk_version = invoker.target_sdk_version
+    }
+
+    _template_name = target_name
+
+    _is_bundle_module =
+        defined(invoker.is_bundle_module) && invoker.is_bundle_module
+    if (_is_bundle_module) {
+      _is_base_module =
+          defined(invoker.is_base_module) && invoker.is_base_module
+    }
+
+    _enable_multidex =
+        !defined(invoker.enable_multidex) || invoker.enable_multidex
+
+    if (!_is_bundle_module) {
+      _final_apk_path = invoker.final_apk_path
+      _final_rtxt_path = "${_final_apk_path}.R.txt"
+    }
+
+    _short_resource_paths =
+        defined(invoker.short_resource_paths) && invoker.short_resource_paths &&
+        enable_arsc_obfuscation
+    _strip_resource_names =
+        defined(invoker.strip_resource_names) && invoker.strip_resource_names &&
+        enable_arsc_obfuscation
+    _optimize_resources = _strip_resource_names || _short_resource_paths
+
+    if (!_is_bundle_module && _short_resource_paths) {
+      _final_pathmap_path = "${_final_apk_path}.pathmap.txt"
+    }
+    _res_size_info_path = "$target_out_dir/$target_name.ap_.info"
+    if (!_is_bundle_module) {
+      _final_apk_path_no_ext_list =
+          process_file_template([ _final_apk_path ],
+                                "{{source_dir}}/{{source_name_part}}")
+      _final_apk_path_no_ext = _final_apk_path_no_ext_list[0]
+      not_needed([ "_final_apk_path_no_ext" ])
+    }
+
+    # Non-base bundle modules create only proto resources.
+    if (!_is_bundle_module || _is_base_module) {
+      _arsc_resources_path = "$target_out_dir/$target_name.ap_"
+    }
+    if (_is_bundle_module) {
+      # Path to the intermediate proto-format resources zip file.
+      _proto_resources_path = "$target_out_dir/$target_name.proto.ap_"
+      if (_optimize_resources) {
+        _optimized_proto_resources_path =
+            "$target_out_dir/$target_name.optimized.proto.ap_"
+      }
+    } else {
+      # resource_sizes.py needs to be able to find the unpacked resources.arsc
+      # file based on apk name to compute normatlized size.
+      _resource_sizes_arsc_path =
+          "$root_out_dir/arsc/" +
+          rebase_path(_final_apk_path_no_ext, root_build_dir) + ".ap_"
+      if (_optimize_resources) {
+        _optimized_arsc_resources_path =
+            "$target_out_dir/$target_name.optimized.ap_"
+      }
+    }
+
+    if (defined(invoker.version_code)) {
+      _version_code = invoker.version_code
+    } else {
+      _version_code = android_default_version_code
+    }
+
+    if (android_override_version_code != "") {
+      _version_code = android_override_version_code
+    }
+
+    if (defined(invoker.version_name)) {
+      _version_name = invoker.version_name
+    } else {
+      _version_name = android_default_version_name
+    }
+
+    if (android_override_version_name != "") {
+      _version_name = android_override_version_name
+    }
+
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps = invoker.deps
+    }
+
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps = invoker.srcjar_deps
+    }
+
+    _android_root_manifest_deps = []
+    if (defined(invoker.android_manifest_dep)) {
+      _android_root_manifest_deps = [ invoker.android_manifest_dep ]
+    }
+    _android_root_manifest = invoker.android_manifest
+
+    _use_chromium_linker =
+        defined(invoker.use_chromium_linker) && invoker.use_chromium_linker
+    _use_modern_linker =
+        defined(invoker.use_modern_linker) && invoker.use_modern_linker
+
+    _load_library_from_apk =
+        defined(invoker.load_library_from_apk) && invoker.load_library_from_apk
+
+    not_needed([
+                 "_use_chromium_linker",
+                 "_use_modern_linker",
+               ])
+    assert(!_load_library_from_apk || _use_chromium_linker,
+           "load_library_from_apk requires use_chromium_linker")
+
+    # Make sure that uncompress_shared_libraries is set to true if
+    # load_library_from_apk is true.
+    if (defined(invoker.uncompress_shared_libraries)) {
+      _uncompress_shared_libraries = invoker.uncompress_shared_libraries
+      assert(!_load_library_from_apk || _uncompress_shared_libraries)
+    } else {
+      _uncompress_shared_libraries = _load_library_from_apk
+    }
+
+    # The dependency that makes the chromium linker, if any is needed.
+    _native_libs_deps = []
+    _shared_libraries_is_valid =
+        defined(invoker.shared_libraries) && invoker.shared_libraries != []
+
+    if (_shared_libraries_is_valid) {
+      _native_libs_deps += invoker.shared_libraries
+
+      # Write shared library output files of all dependencies to a file. Those
+      # will be the shared libraries packaged into the APK.
+      _shared_library_list_file =
+          "$target_gen_dir/${_template_name}.native_libs"
+      generated_file("${_template_name}__shared_library_list") {
+        deps = _native_libs_deps
+        outputs = [ _shared_library_list_file ]
+        data_keys = [ "shared_libraries" ]
+        walk_keys = [ "shared_libraries_barrier" ]
+        rebase = root_build_dir
+      }
+    } else {
+      # Must exist for instrumentation_test_apk() to depend on.
+      group("${_template_name}__shared_library_list") {
+      }
+    }
+
+    _secondary_abi_native_libs_deps = []
+
+    if (defined(invoker.secondary_abi_shared_libraries) &&
+        invoker.secondary_abi_shared_libraries != []) {
+      _secondary_abi_native_libs_deps = invoker.secondary_abi_shared_libraries
+
+      # Write shared library output files of all dependencies to a file. Those
+      # will be the shared libraries packaged into the APK.
+      _secondary_abi_shared_library_list_file =
+          "$target_gen_dir/${_template_name}.secondary_abi_native_libs"
+      generated_file("${_template_name}__secondary_abi_shared_library_list") {
+        deps = _secondary_abi_native_libs_deps
+        outputs = [ _secondary_abi_shared_library_list_file ]
+        data_keys = [ "shared_libraries" ]
+        walk_keys = [ "shared_libraries_barrier" ]
+        rebase = root_build_dir
+      }
+    } else {
+      # Must exist for instrumentation_test_apk() to depend on.
+      group("${_template_name}__secondary_abi_shared_library_list") {
+      }
+    }
+
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    assert(_rebased_build_config != "")  # Mark as used.
+
+    _generate_buildconfig_java = !defined(invoker.apk_under_test)
+    if (defined(invoker.generate_buildconfig_java)) {
+      _generate_buildconfig_java = invoker.generate_buildconfig_java
+    }
+
+    _generate_productconfig_java = defined(invoker.product_config_java_packages)
+
+    # JNI generation usually goes hand-in-hand with buildconfig generation.
+    _generate_final_jni = _generate_buildconfig_java
+    if (defined(invoker.generate_final_jni)) {
+      _generate_final_jni = invoker.generate_final_jni
+    }
+
+    _proguard_enabled =
+        defined(invoker.proguard_enabled) && invoker.proguard_enabled
+
+    if (!_is_bundle_module && _proguard_enabled) {
+      _proguard_mapping_path = "$_final_apk_path.mapping"
+    }
+
+    # TODO(http://crbug.com/901465): Move shared Java code to static libraries
+    # when !_proguard_enabled too.
+    _is_static_library_provider =
+        defined(invoker.static_library_dependent_targets) && _proguard_enabled
+    if (_is_static_library_provider) {
+      _static_library_sync_dex_path = "$_base_path.synchronized.r8dex.jar"
+      _resource_ids_provider_deps = []
+      foreach(_target, invoker.static_library_dependent_targets) {
+        if (_target.is_resource_ids_provider) {
+          assert(_resource_ids_provider_deps == [],
+                 "Can only have 1 resource_ids_provider_dep")
+          _resource_ids_provider_deps += [ _target.name ]
+        }
+      }
+      _resource_ids_provider_dep = _resource_ids_provider_deps[0]
+    } else if (defined(invoker.resource_ids_provider_dep)) {
+      _resource_ids_provider_dep = invoker.resource_ids_provider_dep
+    }
+
+    if (_is_static_library_provider) {
+      _shared_resources_allowlist_target = _resource_ids_provider_dep
+    } else if (defined(invoker.shared_resources_allowlist_target)) {
+      _shared_resources_allowlist_target =
+          invoker.shared_resources_allowlist_target
+    }
+
+    _uses_static_library = defined(invoker.static_library_provider)
+    _uses_static_library_synchronized_proguard =
+        defined(invoker.static_library_synchronized_proguard) &&
+        invoker.static_library_synchronized_proguard
+
+    if (_uses_static_library_synchronized_proguard) {
+      assert(_uses_static_library)
+
+      # These will be provided by the static library APK.
+      _generate_buildconfig_java = false
+      _generate_final_jni = false
+    }
+
+    # TODO(crbug.com/864142): Allow incremental installs of bundle modules.
+    _incremental_apk = !_is_bundle_module &&
+                       !(defined(invoker.never_incremental) &&
+                         invoker.never_incremental) && incremental_install
+    if (_incremental_apk) {
+      _target_dir_name = get_label_info(target_name, "dir")
+      _incremental_install_json_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.incremental.json"
+      _incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk"
+    }
+
+    if (!_incremental_apk) {
+      # Bundle modules don't build the dex here, but need to write this path
+      # to their .build_config file.
+      if (_proguard_enabled) {
+        _final_dex_path = "$_base_path.r8dex.jar"
+      } else {
+        _final_dex_path = "$_base_path.mergeddex.jar"
+      }
+    }
+
+    _android_manifest =
+        "$target_gen_dir/${_template_name}_manifest/AndroidManifest.xml"
+    _merge_manifest_target = "${_template_name}__merge_manifests"
+    merge_manifests(_merge_manifest_target) {
+      forward_variables_from(invoker,
+                             [
+                               "manifest_package",
+                               "max_sdk_version",
+                             ])
+      input_manifest = _android_root_manifest
+      output_manifest = _android_manifest
+      build_config = _build_config
+      min_sdk_version = _min_sdk_version
+      target_sdk_version = _target_sdk_version
+      deps = _android_root_manifest_deps + [ ":$_build_config_target" ]
+    }
+
+    _final_deps = []
+
+    _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21
+    if (_enable_main_dex_list) {
+      _generated_proguard_main_dex_config =
+          "$_base_path.resources.main-dex-proguard.txt"
+    }
+    _generated_proguard_config = "$_base_path.resources.proguard.txt"
+
+    if (_generate_buildconfig_java &&
+        defined(invoker.product_version_resources_dep)) {
+      # Needs to be added as a .build_config dep to pick up resources.
+      _deps += [ invoker.product_version_resources_dep ]
+    }
+
+    if (defined(invoker.alternative_android_sdk_dep)) {
+      _android_sdk_dep = invoker.alternative_android_sdk_dep
+    } else {
+      _android_sdk_dep = "//third_party/android_sdk:android_sdk_java"
+    }
+
+    if (defined(_shared_resources_allowlist_target)) {
+      _allowlist_gen_dir =
+          get_label_info(_shared_resources_allowlist_target, "target_gen_dir")
+      _allowlist_target_name =
+          get_label_info(_shared_resources_allowlist_target, "name")
+      _allowlist_r_txt_path =
+          "${_allowlist_gen_dir}/${_allowlist_target_name}" +
+          "__compile_resources_R.txt"
+      _allowlist_deps =
+          "${_shared_resources_allowlist_target}__compile_resources"
+    }
+
+    if (_short_resource_paths) {
+      _resources_path_map_out_path =
+          "${target_gen_dir}/${_template_name}_resources_path_map.txt"
+    }
+
+    _compile_resources_target = "${_template_name}__compile_resources"
+    _compile_resources_rtxt_out =
+        "${target_gen_dir}/${_compile_resources_target}_R.txt"
+    _compile_resources_emit_ids_out =
+        "${target_gen_dir}/${_compile_resources_target}.resource_ids"
+    compile_resources(_compile_resources_target) {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_allowlist",
+                               "app_as_shared_lib",
+                               "enforce_resource_overlays_in_tests",
+                               "expected_android_manifest",
+                               "expected_android_manifest_base",
+                               "extra_verification_manifest",
+                               "extra_verification_manifest_dep",
+                               "manifest_package",
+                               "max_sdk_version",
+                               "no_xml_namespaces",
+                               "package_id",
+                               "package_name",
+                               "png_to_webp",
+                               "r_java_root_package_name",
+                               "resource_exclusion_exceptions",
+                               "resource_exclusion_regex",
+                               "resource_values_filter_rules",
+                               "resources_config_paths",
+                               "shared_resources",
+                               "shared_resources_allowlist_locales",
+                               "support_zh_hk",
+                               "uses_split",
+                             ])
+      short_resource_paths = _short_resource_paths
+      strip_resource_names = _strip_resource_names
+      android_manifest = _android_manifest
+      android_manifest_dep = ":$_merge_manifest_target"
+      version_code = _version_code
+      version_name = _version_name
+      min_sdk_version = _min_sdk_version
+      target_sdk_version = _target_sdk_version
+
+      if (defined(expected_android_manifest)) {
+        top_target_name = _template_name
+      }
+
+      if (defined(_resource_ids_provider_dep)) {
+        resource_ids_provider_dep = _resource_ids_provider_dep
+      }
+
+      if (defined(invoker.post_process_package_resources_script)) {
+        post_process_script = invoker.post_process_package_resources_script
+      }
+      r_text_out_path = _compile_resources_rtxt_out
+      emit_ids_out_path = _compile_resources_emit_ids_out
+      size_info_path = _res_size_info_path
+      proguard_file = _generated_proguard_config
+      if (_enable_main_dex_list) {
+        proguard_file_main_dex = _generated_proguard_main_dex_config
+      }
+      if (_short_resource_paths) {
+        resources_path_map_out_path = _resources_path_map_out_path
+      }
+
+      build_config = _build_config
+      build_config_dep = ":$_build_config_target"
+      android_sdk_dep = _android_sdk_dep
+      deps = _deps
+
+      # The static library uses the R.txt files generated by the
+      # static_library_dependent_targets when generating the final R.java file.
+      if (_is_static_library_provider) {
+        foreach(_dep, invoker.static_library_dependent_targets) {
+          deps += [ "${_dep.name}__compile_resources" ]
+        }
+      }
+
+      if (defined(invoker.apk_under_test)) {
+        # Set the arsc package name to match the apk_under_test package name
+        # So that test resources can references under_test resources via
+        # @type/name syntax.
+        r_java_root_package_name = "test"
+        arsc_package_name =
+            "@FileArg($_rebased_build_config:deps_info:arsc_package_name)"
+
+        # Passing in the --emit-ids mapping will cause aapt2 to assign resources
+        # IDs that do not conflict with those from apk_under_test.
+        assert(!defined(resource_ids_provider_dep))
+        resource_ids_provider_dep = invoker.apk_under_test
+
+        include_resource =
+            get_label_info(invoker.apk_under_test, "target_out_dir") + "/" +
+            get_label_info(invoker.apk_under_test, "name") + ".ap_"
+        _link_against = invoker.apk_under_test
+      }
+
+      if (_is_bundle_module) {
+        is_bundle_module = true
+        proto_output = _proto_resources_path
+        if (_optimize_resources) {
+          optimized_proto_output = _optimized_proto_resources_path
+        }
+
+        if (defined(invoker.base_module_target)) {
+          include_resource =
+              get_label_info(invoker.base_module_target, "target_out_dir") +
+              "/" + get_label_info(invoker.base_module_target, "name") + ".ap_"
+          _link_against = invoker.base_module_target
+        }
+      } else if (_optimize_resources) {
+        optimized_arsc_output = _optimized_arsc_resources_path
+      }
+
+      if (defined(_link_against)) {
+        deps += [ "${_link_against}__compile_resources" ]
+        include_resource = get_label_info(_link_against, "target_out_dir") +
+                           "/" + get_label_info(_link_against, "name") + ".ap_"
+      }
+
+      # Bundle modules have to reference resources from the base module.
+      if (!_is_bundle_module || _is_base_module) {
+        arsc_output = _arsc_resources_path
+      }
+
+      if (defined(_shared_resources_allowlist_target)) {
+        # Used to ensure that the WebView resources are properly shared
+        # (i.e. are non-final and with package ID 0).
+        shared_resources_allowlist = _allowlist_r_txt_path
+        deps += [ _allowlist_deps ]
+      }
+    }
+    _srcjar_deps += [ ":$_compile_resources_target" ]
+
+    if (defined(_resource_sizes_arsc_path)) {
+      _copy_arsc_target = "${_template_name}__copy_arsc"
+      copy(_copy_arsc_target) {
+        deps = [ ":$_compile_resources_target" ]
+
+        # resource_sizes.py doesn't care if it gets the optimized .arsc.
+        sources = [ _arsc_resources_path ]
+        outputs = [ _resource_sizes_arsc_path ]
+      }
+      _final_deps += [ ":$_copy_arsc_target" ]
+    }
+
+    if (!_is_bundle_module) {
+      # Output the R.txt file to a more easily discoverable location for
+      # archiving. This is necessary when stripping resource names so that we
+      # have an archive of resource names to ids for shipped apks (for
+      # debugging purposes). We copy the file rather than change the location
+      # of the original because other targets rely on the location of the R.txt
+      # file.
+      _copy_rtxt_target = "${_template_name}__copy_rtxt"
+      copy(_copy_rtxt_target) {
+        deps = [ ":$_compile_resources_target" ]
+        sources = [ _compile_resources_rtxt_out ]
+        outputs = [ _final_rtxt_path ]
+      }
+      _final_deps += [ ":$_copy_rtxt_target" ]
+
+      if (_short_resource_paths) {
+        # Do the same for path map
+        _copy_pathmap_target = "${_template_name}__copy_pathmap"
+        copy(_copy_pathmap_target) {
+          deps = [ ":$_compile_resources_target" ]
+          sources = [ _resources_path_map_out_path ]
+          outputs = [ _final_pathmap_path ]
+
+          # The monochrome_public_apk_checker test needs pathmap when run on swarming.
+          data = [ _final_pathmap_path ]
+        }
+        _final_deps += [ ":$_copy_pathmap_target" ]
+      }
+    }
+
+    _generate_native_libraries_java =
+        (!_is_bundle_module || _is_base_module) &&
+        (_native_libs_deps != [] || _secondary_abi_native_libs_deps != []) &&
+        !_uses_static_library_synchronized_proguard
+    if (_generate_native_libraries_java) {
+      write_native_libraries_java("${_template_name}__native_libraries") {
+        forward_variables_from(invoker, [ "main_component_library" ])
+
+        # Do not add a dep on the generated_file target in order to avoid having
+        # to build the native libraries before this target. The dependency is
+        # instead captured via a depfile.
+        if (_native_libs_deps != []) {
+          native_libraries_list_file = _shared_library_list_file
+        } else {
+          native_libraries_list_file = _secondary_abi_shared_library_list_file
+        }
+        enable_chromium_linker = _use_chromium_linker
+        load_library_from_apk = _load_library_from_apk
+        use_modern_linker = _use_modern_linker
+        use_final_fields = true
+      }
+      _srcjar_deps += [ ":${_template_name}__native_libraries" ]
+    }
+
+    _loadable_modules = []
+    if (defined(invoker.loadable_modules)) {
+      _loadable_modules = invoker.loadable_modules
+    }
+
+    if (_native_libs_deps != []) {
+      _loadable_modules += _sanitizer_runtimes
+    }
+
+    if (_generate_buildconfig_java) {
+      generate_build_config_srcjar("${_template_name}__build_config_srcjar") {
+        forward_variables_from(invoker,
+                               [
+                                 "min_sdk_version",
+                                 "isolated_splits_enabled",
+                               ])
+        _bundles_supported = _is_bundle_module || _is_static_library_provider
+        if (defined(invoker.bundles_supported)) {
+          _bundles_supported = invoker.bundles_supported
+        }
+        bundles_supported = _bundles_supported
+        use_final_fields = true
+        enable_multidex = _enable_multidex
+        is_incremental_install = _incremental_apk
+        if (defined(invoker.product_version_resources_dep)) {
+          resources_version_variable =
+              "org.chromium.base.R.string.product_version"
+        }
+        deps = [ ":$_build_config_target" ]
+      }
+      _srcjar_deps += [ ":${_template_name}__build_config_srcjar" ]
+    }
+
+    if (_generate_productconfig_java) {
+      foreach(_package, invoker.product_config_java_packages) {
+        _locale_target_name =
+            "${_template_name}_${_package}__product_config_srcjar"
+        generate_product_config_srcjar("$_locale_target_name") {
+          forward_variables_from(invoker, [ "is_bundle_module" ])
+          build_config = _build_config
+          java_package = _package
+          use_chromium_linker = _use_chromium_linker
+          use_modern_linker = _use_modern_linker
+          deps = [ ":$_build_config_target" ]
+        }
+        _srcjar_deps += [ ":$_locale_target_name" ]
+      }
+    }
+
+    if (_generate_final_jni) {
+      generate_jni_registration("${_template_name}__final_jni") {
+        forward_variables_from(invoker,
+                               [
+                                 "enable_native_mocks",
+                                 "require_native_mocks",
+                               ])
+        if (defined(invoker.bundle_target)) {
+          targets = [ invoker.bundle_target ]
+        } else {
+          targets = [ ":$_template_name" ]
+        }
+        if (_is_static_library_provider) {
+          foreach(_target, invoker.static_library_dependent_targets) {
+            targets += [ _target.name ]
+          }
+        }
+        if (defined(invoker.jni_registration_header)) {
+          header_output = invoker.jni_registration_header
+        }
+        if (defined(invoker.jni_sources_exclusions)) {
+          sources_exclusions = invoker.jni_sources_exclusions
+        }
+      }
+      _srcjar_deps += [ ":${_template_name}__final_jni" ]
+    } else {
+      not_needed(invoker,
+                 [
+                   "enable_native_mocks",
+                   "jni_registration_header",
+                 ])
+    }
+
+    _java_target = "${_template_name}__java"
+
+    java_library_impl(_java_target) {
+      forward_variables_from(invoker,
+                             [
+                               "alternative_android_sdk_dep",
+                               "android_manifest",
+                               "android_manifest_dep",
+                               "annotation_processor_deps",
+                               "apk_under_test",
+                               "base_module_target",
+                               "chromium_code",
+                               "jacoco_never_instrument",
+                               "jar_excluded_patterns",
+                               "javac_args",
+                               "native_lib_placeholders",
+                               "processor_args_javac",
+                               "secondary_abi_loadable_modules",
+                               "secondary_native_lib_placeholders",
+                               "sources",
+                               "static_library_dependent_targets",
+                               "library_always_compress",
+                               "library_renames",
+                             ])
+      deps = _deps
+      if (_uses_static_library_synchronized_proguard) {
+        if (!defined(jar_excluded_patterns)) {
+          jar_excluded_patterns = []
+        }
+
+        # The static library will provide all R.java files, but we still need to
+        # make the base module R.java files available at compile time since DFM
+        # R.java classes extend base module classes.
+        jar_excluded_patterns += [
+          "*/R.class",
+          "*/R\$*.class",
+        ]
+      }
+      if (_is_bundle_module) {
+        type = "android_app_bundle_module"
+        res_size_info_path = _res_size_info_path
+        is_base_module = _is_base_module
+        forward_variables_from(invoker,
+                               [
+                                 "version_code",
+                                 "version_name",
+                               ])
+      } else {
+        type = "android_apk"
+      }
+      r_text_path = _compile_resources_rtxt_out
+      main_target_name = _template_name
+      supports_android = true
+      requires_android = true
+      srcjar_deps = _srcjar_deps
+      if (defined(_final_dex_path)) {
+        final_dex_path = _final_dex_path
+      }
+
+      if (_is_bundle_module) {
+        proto_resources_path = _proto_resources_path
+        if (_optimize_resources) {
+          proto_resources_path = _optimized_proto_resources_path
+          if (_short_resource_paths) {
+            module_pathmap_path = _resources_path_map_out_path
+          }
+        }
+      } else {
+        apk_path = _final_apk_path
+        if (_incremental_apk) {
+          incremental_apk_path = _incremental_apk_path
+          incremental_install_json_path = _incremental_install_json_path
+        }
+      }
+
+      proguard_enabled = _proguard_enabled
+      if (_proguard_enabled) {
+        proguard_configs = [ _generated_proguard_config ]
+        if (defined(invoker.proguard_configs)) {
+          proguard_configs += invoker.proguard_configs
+        }
+        if (_enable_main_dex_list) {
+          proguard_configs += [ "//build/android/multidex.flags" ]
+        }
+        if (!enable_java_asserts && (!defined(testonly) || !testonly) &&
+            # Injected JaCoCo code causes -checkdiscards to fail.
+            !use_jacoco_coverage) {
+          proguard_configs += [ "//build/android/dcheck_is_off.flags" ]
+        }
+        if (!_is_bundle_module) {
+          proguard_mapping_path = _proguard_mapping_path
+        }
+      }
+
+      # Do not add a dep on the generated_file target in order to avoid having
+      # to build the native libraries before this target. The dependency is
+      # instead captured via a depfile.
+      if (_native_libs_deps != []) {
+        shared_libraries_runtime_deps_file = _shared_library_list_file
+      }
+      if (defined(_secondary_abi_shared_library_list_file)) {
+        secondary_abi_shared_libraries_runtime_deps_file =
+            _secondary_abi_shared_library_list_file
+      }
+
+      loadable_modules = _loadable_modules
+
+      uncompress_shared_libraries = _uncompress_shared_libraries
+
+      if (defined(_allowlist_r_txt_path) && _is_bundle_module) {
+        # Used to write the file path to the target's .build_config only.
+        base_allowlist_rtxt_path = _allowlist_r_txt_path
+      }
+    }
+
+    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+    # able to just do that calculation at build time instead.
+    if (defined(invoker.dist_ijar_path)) {
+      _dist_ijar_path = invoker.dist_ijar_path
+      dist_jar("${_template_name}_dist_ijar") {
+        override_build_config = _build_config
+        output = _dist_ijar_path
+        data = [ _dist_ijar_path ]
+        use_interface_jars = true
+        deps = [
+          ":$_build_config_target",
+          ":$_java_target",
+        ]
+      }
+    }
+
+    if (_uses_static_library_synchronized_proguard) {
+      _final_dex_target_dep = "${invoker.static_library_provider}__dexsplitter"
+    } else if (_is_bundle_module && _proguard_enabled) {
+      _final_deps += [ ":$_java_target" ]
+    } else if (_incremental_apk) {
+      if (defined(invoker.enable_proguard_checks)) {
+        not_needed(invoker, [ "enable_proguard_checks" ])
+      }
+      if (defined(invoker.disable_r8_outlining)) {
+        not_needed(invoker, [ "disable_r8_outlining" ])
+      }
+      if (defined(invoker.dexlayout_profile)) {
+        not_needed(invoker, [ "dexlayout_profile" ])
+      }
+    } else {
+      # Dex generation for app bundle modules with proguarding enabled takes
+      # place later due to synchronized proguarding. For more details,
+      # read build/android/docs/android_app_bundles.md
+      _final_dex_target_name = "${_template_name}__final_dex"
+      dex(_final_dex_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "disable_r8_outlining",
+                                 "dexlayout_profile",
+                                 "enable_proguard_checks",
+                                 "proguard_enable_obfuscation",
+                               ])
+        min_sdk_version = _min_sdk_version
+        proguard_enabled = _proguard_enabled
+        build_config = _build_config
+        deps = [
+          ":$_build_config_target",
+          ":$_java_target",
+        ]
+        if (_proguard_enabled) {
+          deps += _deps + [ ":$_compile_resources_target" ]
+          proguard_mapping_path = _proguard_mapping_path
+          proguard_sourcefile_suffix = "$android_channel-$_version_code"
+          has_apk_under_test = defined(invoker.apk_under_test)
+        } else if (_min_sdk_version >= default_min_sdk_version) {
+          # Enable dex merging only when min_sdk_version is >= what the library
+          # .dex files were created with.
+          input_dex_filearg =
+              "@FileArg(${_rebased_build_config}:final_dex:all_dex_files)"
+        } else {
+          input_classes_filearg =
+              "@FileArg($_rebased_build_config:deps_info:device_classpath)"
+        }
+
+        if (_is_static_library_provider) {
+          # The list of input jars is already recorded in the .build_config, but
+          # we need to explicitly add the java deps here to ensure they're
+          # available to be used as inputs to the dex step.
+          foreach(_dep, invoker.static_library_dependent_targets) {
+            _target_label = get_label_info(_dep.name, "label_no_toolchain")
+            deps += [ "${_target_label}__java" ]
+          }
+          output = _static_library_sync_dex_path
+          is_static_library = true
+        } else {
+          output = _final_dex_path
+        }
+        enable_multidex = _enable_multidex
+
+        # The individual dependencies would have caught real missing deps in
+        # their respective dex steps. False positives that were suppressed at
+        # per-target dex steps are emitted here since this may use jar files
+        # rather than dex files.
+        ignore_desugar_missing_deps = true
+
+        if (_enable_main_dex_list) {
+          extra_main_dex_proguard_config = _generated_proguard_main_dex_config
+          deps += [ ":$_compile_resources_target" ]
+        }
+      }
+
+      _final_dex_target_dep = ":$_final_dex_target_name"
+
+      # For static libraries, a single Proguard run is performed that includes
+      # code from the static library APK and the APKs that use the static
+      # library (done via. classpath merging in write_build_config.py).
+      # This dexsplitter target splits the synchronized dex output into dex
+      # files for each APK/Bundle. In the Bundle case, another dexsplitter step
+      # is later performed to split the dex further for each feature module.
+      if (_is_static_library_provider && _proguard_enabled) {
+        _static_library_modules = []
+        foreach(_target, invoker.static_library_dependent_targets) {
+          _apk_as_module = _target.name
+          _module_config_target = "${_apk_as_module}$build_config_target_suffix"
+          _module_gen_dir = get_label_info(_apk_as_module, "target_gen_dir")
+          _module_name = get_label_info(_apk_as_module, "name")
+          _module_config = "$_module_gen_dir/$_module_name.build_config"
+          _static_library_modules += [
+            {
+              name = _module_name
+              build_config = _module_config
+              build_config_target = _module_config_target
+            },
+          ]
+        }
+
+        _static_library_dexsplitter_target = "${_template_name}__dexsplitter"
+        dexsplitter(_static_library_dexsplitter_target) {
+          input_dex_zip = _static_library_sync_dex_path
+          proguard_mapping = _proguard_mapping_path
+          deps = [
+            ":$_build_config_target",
+            "$_final_dex_target_dep",
+          ]
+          all_modules = [
+                          {
+                            name = "base"
+                            build_config = _build_config
+                            build_config_target = ":$_build_config_target"
+                          },
+                        ] + _static_library_modules
+          feature_jars_args = [
+            "--feature-jars",
+            "@FileArg($_rebased_build_config:deps_info:" +
+                "static_library_dependent_classpath_configs:" +
+                "$_rebased_build_config)",
+          ]
+          foreach(_module, _static_library_modules) {
+            _rebased_module_config =
+                rebase_path(_module.build_config, root_build_dir)
+            feature_jars_args += [
+              "--feature-jars",
+              "@FileArg($_rebased_build_config:deps_info:" +
+                  "static_library_dependent_classpath_configs:" +
+                  "$_rebased_module_config)",
+            ]
+          }
+        }
+        _final_deps += [ ":$_static_library_dexsplitter_target" ]
+        _validate_dex_target = "${_template_name}__validate_dex"
+        action_with_pydeps(_validate_dex_target) {
+          depfile = "$target_gen_dir/$target_name.d"
+          script =
+              "//build/android/gyp/validate_static_library_dex_references.py"
+          inputs = [ _build_config ]
+          _stamp = "$target_gen_dir/$target_name.stamp"
+          outputs = [ _stamp ]
+          deps = [
+            ":$_build_config_target",
+            ":$_static_library_dexsplitter_target",
+          ]
+          args = [
+            "--depfile",
+            rebase_path(depfile, root_build_dir),
+            "--stamp",
+            rebase_path(_stamp, root_build_dir),
+            "--static-library-dex",
+            "@FileArg($_rebased_build_config:final_dex:path)",
+          ]
+          foreach(_module, _static_library_modules) {
+            inputs += [ _module.build_config ]
+            _rebased_config = rebase_path(_module.build_config, root_build_dir)
+            deps += [ _module.build_config_target ]
+            args += [
+              "--static-library-dependent-dex",
+              "@FileArg($_rebased_config:final_dex:path)",
+            ]
+          }
+        }
+
+        # TODO(crbug.com/1032609): Switch to using R8's support for feature
+        # aware ProGuard and get rid of "_validate_dex_target" or figure out
+        # why some classes aren't properly being kept.
+        # _final_deps += [ ":$_validate_dex_target" ]
+        _final_dex_target_dep = ":$_static_library_dexsplitter_target"
+      }
+    }
+
+    _all_native_libs_deps = _native_libs_deps + _secondary_abi_native_libs_deps
+    if (_all_native_libs_deps != []) {
+      _native_libs_filearg_dep = ":$_build_config_target"
+      _all_native_libs_deps += [ _native_libs_filearg_dep ]
+
+      if (!_is_bundle_module) {
+        _native_libs_filearg =
+            "@FileArg($_rebased_build_config:native:libraries)"
+      }
+    }
+
+    if (_is_bundle_module) {
+      _final_deps += [
+                       ":$_merge_manifest_target",
+                       ":$_build_config_target",
+                       ":$_compile_resources_target",
+                     ] + _all_native_libs_deps
+      if (defined(_final_dex_target_dep)) {
+        not_needed([ "_final_dex_target_dep" ])
+      }
+    } else {
+      # Generate size-info/*.jar.info files.
+      if (defined(invoker.name)) {
+        # Create size info files for targets that care about size
+        # (have proguard enabled).
+        _include_size_info =
+            defined(invoker.include_size_info) && invoker.include_size_info
+        if (_include_size_info || _proguard_enabled) {
+          _size_info_target = "${target_name}__size_info"
+          create_size_info_files(_size_info_target) {
+            name = "${invoker.name}.apk"
+            build_config = _build_config
+            res_size_info_path = _res_size_info_path
+            deps = _deps + [
+                     ":$_build_config_target",
+                     ":$_compile_resources_target",
+                     ":$_java_target",
+                   ]
+          }
+          _final_deps += [ ":$_size_info_target" ]
+        } else {
+          not_needed(invoker, [ "name" ])
+        }
+      }
+
+      _keystore_path = android_keystore_path
+      _keystore_name = android_keystore_name
+      _keystore_password = android_keystore_password
+
+      if (defined(invoker.keystore_path)) {
+        _keystore_path = invoker.keystore_path
+        _keystore_name = invoker.keystore_name
+        _keystore_password = invoker.keystore_password
+      }
+
+      if (_incremental_apk) {
+        _incremental_compiled_resources_path = "${_base_path}_incremental.ap_"
+        _incremental_compile_resources_target_name =
+            "${target_name}__compile_incremental_resources"
+
+        action_with_pydeps(_incremental_compile_resources_target_name) {
+          deps = [
+            ":$_build_config_target",
+            ":$_compile_resources_target",
+            ":$_merge_manifest_target",
+          ]
+          script =
+              "//build/android/incremental_install/generate_android_manifest.py"
+          inputs = [
+            _android_manifest,
+            _build_config,
+            _arsc_resources_path,
+          ]
+          outputs = [ _incremental_compiled_resources_path ]
+
+          args = [
+            "--disable-isolated-processes",
+            "--src-manifest",
+            rebase_path(_android_manifest, root_build_dir),
+            "--in-apk",
+            rebase_path(_arsc_resources_path, root_build_dir),
+            "--out-apk",
+            rebase_path(_incremental_compiled_resources_path, root_build_dir),
+            "--aapt2-path",
+            rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+            "--android-sdk-jars=@FileArg($_rebased_build_config:android:sdk_jars)",
+          ]
+        }
+      }
+
+      _create_apk_target = "${_template_name}__create"
+      _final_deps += [ ":$_create_apk_target" ]
+      package_apk("$_create_apk_target") {
+        forward_variables_from(invoker,
+                               [
+                                 "expected_libs_and_assets",
+                                 "expected_libs_and_assets_base",
+                                 "native_lib_placeholders",
+                                 "secondary_abi_loadable_modules",
+                                 "secondary_native_lib_placeholders",
+                                 "uncompress_dex",
+                                 "uncompress_shared_libraries",
+                                 "library_always_compress",
+                                 "library_renames",
+                               ])
+
+        if (defined(expected_libs_and_assets)) {
+          build_config_dep = ":$_build_config_target"
+          top_target_name = _template_name
+        }
+
+        build_config = _build_config
+        keystore_name = _keystore_name
+        keystore_path = _keystore_path
+        keystore_password = _keystore_password
+        min_sdk_version = _min_sdk_version
+        uncompress_shared_libraries = _uncompress_shared_libraries
+
+        deps = _deps + [ ":$_build_config_target" ]
+
+        if ((!_proguard_enabled || _incremental_apk) &&
+            enable_jdk_library_desugaring) {
+          _all_jdk_libs = "//build/android:all_jdk_libs"
+          deps += [ _all_jdk_libs ]
+          jdk_libs_dex = get_label_info(_all_jdk_libs, "target_out_dir") +
+                         "/all_jdk_libs.l8.dex"
+        }
+
+        if (_incremental_apk) {
+          _dex_target = "//build/android/incremental_install:apk_dex"
+
+          deps += [
+            ":${_incremental_compile_resources_target_name}",
+            _dex_target,
+          ]
+
+          dex_path = get_label_info(_dex_target, "target_out_dir") + "/apk.dex"
+
+          # All native libraries are side-loaded, so use a placeholder to force
+          # the proper bitness for the app.
+          _has_native_libs =
+              defined(_native_libs_filearg) || _loadable_modules != []
+          if (_has_native_libs && !defined(native_lib_placeholders)) {
+            native_lib_placeholders = [ "libfix.crbug.384638.so" ]
+          }
+
+          packaged_resources_path = _incremental_compiled_resources_path
+          output_apk_path = _incremental_apk_path
+        } else {
+          loadable_modules = _loadable_modules
+          deps += _all_native_libs_deps + [
+                    ":$_merge_manifest_target",
+                    ":$_compile_resources_target",
+                  ]
+
+          if (defined(_final_dex_path)) {
+            dex_path = _final_dex_path
+            deps += [ _final_dex_target_dep ]
+          }
+
+          if (_optimize_resources) {
+            packaged_resources_path = _optimized_arsc_resources_path
+          } else {
+            packaged_resources_path = _arsc_resources_path
+          }
+
+          if (defined(_native_libs_filearg)) {
+            native_libs_filearg = _native_libs_filearg
+            secondary_abi_native_libs_filearg = "@FileArg($_rebased_build_config:native:secondary_abi_libraries)"
+          }
+          output_apk_path = _final_apk_path
+        }
+      }
+    }
+
+    if (_incremental_apk) {
+      _write_installer_json_rule_name = "${_template_name}__incremental_json"
+      action_with_pydeps(_write_installer_json_rule_name) {
+        script = "//build/android/incremental_install/write_installer_json.py"
+        deps = [ ":$_build_config_target" ] + _all_native_libs_deps
+
+        data = [ _incremental_install_json_path ]
+        inputs = [ _build_config ]
+        outputs = [ _incremental_install_json_path ]
+
+        _rebased_incremental_apk_path =
+            rebase_path(_incremental_apk_path, root_build_dir)
+        _rebased_incremental_install_json_path =
+            rebase_path(_incremental_install_json_path, root_build_dir)
+        args = [
+          "--apk-path=$_rebased_incremental_apk_path",
+          "--output-path=$_rebased_incremental_install_json_path",
+          "--dex-file=@FileArg($_rebased_build_config:final_dex:all_dex_files)",
+        ]
+        if (_proguard_enabled) {
+          args += [ "--show-proguard-warning" ]
+        }
+        if (defined(_native_libs_filearg)) {
+          args += [ "--native-libs=$_native_libs_filearg" ]
+          deps += [ _native_libs_filearg_dep ]
+        }
+        if (_loadable_modules != []) {
+          _rebased_loadable_modules =
+              rebase_path(_loadable_modules, root_build_dir)
+          args += [ "--native-libs=$_rebased_loadable_modules" ]
+        }
+      }
+      _final_deps += [
+        ":$_java_target",
+        ":$_write_installer_json_rule_name",
+      ]
+    }
+
+    # Generate apk operation related script.
+    if (!_is_bundle_module &&
+        (!defined(invoker.create_apk_script) || invoker.create_apk_script)) {
+      if (_uses_static_library) {
+        _install_artifacts_target = "${target_name}__install_artifacts"
+        _install_artifacts_json =
+            "${target_gen_dir}/${target_name}.install_artifacts"
+        generated_file(_install_artifacts_target) {
+          output_conversion = "json"
+          deps = [ invoker.static_library_provider ]
+          outputs = [ _install_artifacts_json ]
+          data_keys = [ "install_artifacts" ]
+          rebase = root_build_dir
+        }
+      }
+      _apk_operations_target_name = "${target_name}__apk_operations"
+      action_with_pydeps(_apk_operations_target_name) {
+        _generated_script = "$root_build_dir/bin/${invoker.target_name}"
+        script = "//build/android/gyp/create_apk_operations_script.py"
+        outputs = [ _generated_script ]
+        args = [
+          "--script-output-path",
+          rebase_path(_generated_script, root_build_dir),
+          "--target-cpu=$target_cpu",
+        ]
+        if (defined(invoker.command_line_flags_file)) {
+          args += [
+            "--command-line-flags-file",
+            invoker.command_line_flags_file,
+          ]
+        }
+        if (_incremental_apk) {
+          args += [
+            "--incremental-install-json-path",
+            rebase_path(_incremental_install_json_path, root_build_dir),
+          ]
+        } else {
+          args += [
+            "--apk-path",
+            rebase_path(_final_apk_path, root_build_dir),
+          ]
+        }
+        if (_uses_static_library) {
+          deps = [ ":$_install_artifacts_target" ]
+          _rebased_install_artifacts_json =
+              rebase_path(_install_artifacts_json, root_build_dir)
+          _static_library_apk_path =
+              "@FileArg($_rebased_install_artifacts_json[])"
+          args += [
+            "--additional-apk",
+            _static_library_apk_path,
+          ]
+        }
+        data = []
+        data_deps = [
+          "//build/android:apk_operations_py",
+          "//build/android:stack_tools",
+        ]
+
+        if (_proguard_enabled && !_incremental_apk) {
+          # Required by logcat command.
+          data_deps += [ "//build/android/stacktrace:java_deobfuscate" ]
+          data += [ "$_final_apk_path.mapping" ]
+          args += [
+            "--proguard-mapping-path",
+            rebase_path("$_final_apk_path.mapping", root_build_dir),
+          ]
+        }
+      }
+      _final_deps += [ ":$_apk_operations_target_name" ]
+    }
+
+    _enable_lint = defined(invoker.enable_lint) && invoker.enable_lint &&
+                   !disable_android_lint
+    if (_enable_lint) {
+      android_lint("${target_name}__lint") {
+        forward_variables_from(invoker,
+                               [
+                                 "lint_baseline_file",
+                                 "lint_suppressions_file",
+                                 "min_sdk_version",
+                               ])
+        build_config = _build_config
+        build_config_dep = ":$_build_config_target"
+        deps = [ ":$_java_target" ]
+        if (defined(invoker.lint_suppressions_dep)) {
+          deps += [ invoker.lint_suppressions_dep ]
+        }
+        if (defined(invoker.lint_min_sdk_version)) {
+          min_sdk_version = invoker.lint_min_sdk_version
+        }
+      }
+    } else {
+      not_needed(invoker,
+                 [
+                   "lint_baseline_file",
+                   "lint_min_sdk_version",
+                   "lint_suppressions_dep",
+                   "lint_suppressions_file",
+                 ])
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "assert_no_deps",
+                               "data",
+                               "data_deps",
+                               "metadata",
+                             ])
+
+      # Generate apk related operations at runtime.
+      public_deps = _final_deps
+
+      if (!defined(data_deps)) {
+        data_deps = []
+      }
+
+      # Include unstripped native libraries so tests can symbolize stacks.
+      data_deps += _all_native_libs_deps
+
+      if (_enable_lint) {
+        data_deps += [ ":${target_name}__lint" ]
+      }
+
+      if (_uses_static_library) {
+        data_deps += [ invoker.static_library_provider ]
+      }
+    }
+  }
+
+  # Declare an Android APK target
+  #
+  # This target creates an Android APK containing java code, resources, assets,
+  # and (possibly) native libraries.
+  #
+  # Supports all variables of android_apk_or_module(), plus:
+  #   apk_name: Name for final apk.
+  #   final_apk_path: (Optional) path to output APK.
+  #
+  # Example
+  #   android_apk("foo_apk") {
+  #     android_manifest = "AndroidManifest.xml"
+  #     sources = [
+  #       "android/org/chromium/foo/FooApplication.java",
+  #       "android/org/chromium/foo/FooActivity.java",
+  #     ]
+  #     deps = [
+  #       ":foo_support_java"
+  #       ":foo_resources"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     shared_libraries = [
+  #       ":my_shared_lib",
+  #     ]
+  #   }
+  template("android_apk") {
+    # TODO(crbug.com/1042017): Remove.
+    not_needed(invoker, [ "no_build_hooks" ])
+    android_apk_or_module(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_allowlist",
+                               "additional_jar_files",
+                               "alternative_android_sdk_dep",
+                               "android_manifest",
+                               "android_manifest_dep",
+                               "annotation_processor_deps",
+                               "apk_under_test",
+                               "app_as_shared_lib",
+                               "assert_no_deps",
+                               "bundles_supported",
+                               "chromium_code",
+                               "command_line_flags_file",
+                               "create_apk_script",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "dexlayout_profile",
+                               "disable_r8_outlining",
+                               "dist_ijar_path",
+                               "enable_lint",
+                               "enable_multidex",
+                               "enable_native_mocks",
+                               "enable_proguard_checks",
+                               "enforce_resource_overlays_in_tests",
+                               "expected_android_manifest",
+                               "expected_android_manifest_base",
+                               "expected_libs_and_assets",
+                               "expected_libs_and_assets_base",
+                               "generate_buildconfig_java",
+                               "generate_final_jni",
+                               "include_size_info",
+                               "input_jars_paths",
+                               "use_modern_linker",
+                               "jacoco_never_instrument",
+                               "javac_args",
+                               "jni_registration_header",
+                               "jni_sources_exclusions",
+                               "keystore_name",
+                               "keystore_password",
+                               "keystore_path",
+                               "lint_baseline_file",
+                               "lint_min_sdk_version",
+                               "lint_suppressions_dep",
+                               "lint_suppressions_file",
+                               "load_library_from_apk",
+                               "loadable_modules",
+                               "manifest_package",
+                               "max_sdk_version",
+                               "product_config_java_packages",
+                               "main_component_library",
+                               "min_sdk_version",
+                               "native_lib_placeholders",
+                               "never_incremental",
+                               "no_xml_namespaces",
+                               "png_to_webp",
+                               "post_process_package_resources_script",
+                               "processor_args_javac",
+                               "product_version_resources_dep",
+                               "proguard_configs",
+                               "proguard_enabled",
+                               "proguard_enable_obfuscation",
+                               "r_java_root_package_name",
+                               "resource_exclusion_exceptions",
+                               "resource_exclusion_regex",
+                               "resource_ids_provider_dep",
+                               "resource_values_filter_rules",
+                               "resources_config_paths",
+                               "require_native_mocks",
+                               "secondary_abi_loadable_modules",
+                               "secondary_abi_shared_libraries",
+                               "secondary_native_lib_placeholders",
+                               "shared_libraries",
+                               "shared_resources",
+                               "shared_resources_allowlist_locales",
+                               "shared_resources_allowlist_target",
+                               "short_resource_paths",
+                               "sources",
+                               "srcjar_deps",
+                               "static_library_dependent_targets",
+                               "static_library_provider",
+                               "static_library_synchronized_proguard",
+                               "strip_resource_names",
+                               "support_zh_hk",
+                               "target_sdk_version",
+                               "testonly",
+                               "uncompress_dex",
+                               "uncompress_shared_libraries",
+                               "library_always_compress",
+                               "library_renames",
+                               "use_chromium_linker",
+                               "version_code",
+                               "version_name",
+                               "visibility",
+                             ])
+      is_bundle_module = false
+      name = invoker.apk_name
+      if (defined(invoker.final_apk_path)) {
+        final_apk_path = invoker.final_apk_path
+      } else {
+        final_apk_path = "$root_build_dir/apks/${invoker.apk_name}.apk"
+      }
+      metadata = {
+        install_artifacts = [ final_apk_path ]
+      }
+      if (defined(invoker.static_library_provider)) {
+        metadata.install_artifacts_barrier = []
+      }
+    }
+  }
+
+  # Declare an Android app bundle module target.
+  #
+  # The module can be used for an android_apk_or_module().
+  #
+  # Supports all variables of android_library(), plus:
+  #   module_name: Name of the module.
+  #   is_base_module: If defined and true, indicates that this is the bundle's
+  #     base module (optional).
+  #   base_module_target: Base module target of the bundle this module will be
+  #     added to (optional). Can only be specified for non-base modules.
+  #   bundle_target: Bundle target that this module belongs to (optional).
+  #     Can only be specified for base modules.
+  template("android_app_bundle_module") {
+    _is_base_module = defined(invoker.is_base_module) && invoker.is_base_module
+
+    if (_is_base_module) {
+      assert(!defined(invoker.base_module_target))
+    } else {
+      assert(!defined(invoker.app_as_shared_lib))
+      assert(!defined(invoker.shared_resources))
+      assert(!defined(invoker.shared_resources_allowlist_target))
+      assert(!defined(invoker.shared_resources_allowlist_locales))
+      assert(defined(invoker.base_module_target))
+      assert(!defined(invoker.bundle_target))
+    }
+
+    # TODO(tiborg): We have several flags that are necessary for workarounds
+    # that come from the fact that the resources get compiled in the bundle
+    # module target, but bundle modules have to have certain flags in
+    # common or bundle modules have to know information about the base module.
+    # Those flags include version_code, version_name, and base_module_target.
+    # It would be better to move the resource compile target into the bundle
+    # target. Doing so would keep the bundle modules independent from the bundle
+    # and potentially reuse the same bundle modules for multiple bundles.
+    android_apk_or_module(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_allowlist",
+                               "additional_jar_files",
+                               "alternative_android_sdk_dep",
+                               "android_manifest",
+                               "android_manifest_dep",
+                               "annotation_processor_deps",
+                               "app_as_shared_lib",
+                               "assert_no_deps",
+                               "base_module_target",
+                               "bundle_target",
+                               "chromium_code",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "enable_multidex",
+                               "expected_android_manifest",
+                               "expected_android_manifest_base",
+                               "extra_verification_manifest",
+                               "extra_verification_manifest_dep",
+                               "generate_buildconfig_java",
+                               "generate_final_jni",
+                               "input_jars_paths",
+                               "isolated_splits_enabled",
+                               "is_base_module",
+                               "jacoco_never_instrument",
+                               "jar_excluded_patterns",
+                               "javac_args",
+                               "jni_registration_header",
+                               "jni_sources_exclusions",
+                               "load_library_from_apk",
+                               "loadable_modules",
+                               "product_config_java_packages",
+                               "manifest_package",
+                               "max_sdk_version",
+                               "min_sdk_version",
+                               "native_lib_placeholders",
+                               "no_xml_namespaces",
+                               "package_id",
+                               "package_name",
+                               "png_to_webp",
+                               "processor_args_javac",
+                               "product_version_resources_dep",
+                               "proguard_configs",
+                               "proguard_enabled",
+                               "proguard_enable_obfuscation",
+                               "resource_exclusion_exceptions",
+                               "resource_exclusion_regex",
+                               "resource_ids_provider_dep",
+                               "resource_values_filter_rules",
+                               "resources_config_paths",
+                               "secondary_abi_loadable_modules",
+                               "secondary_abi_shared_libraries",
+                               "secondary_native_lib_placeholders",
+                               "shared_libraries",
+                               "shared_resources",
+                               "shared_resources_allowlist_locales",
+                               "shared_resources_allowlist_target",
+                               "short_resource_paths",
+                               "srcjar_deps",
+                               "static_library_provider",
+                               "static_library_synchronized_proguard",
+                               "strip_resource_names",
+                               "support_zh_hk",
+                               "target_sdk_version",
+                               "testonly",
+                               "uncompress_shared_libraries",
+                               "library_always_compress",
+                               "library_renames",
+                               "use_chromium_linker",
+                               "use_modern_linker",
+                               "uses_split",
+                               "version_code",
+                               "version_name",
+                               "visibility",
+                             ])
+      is_bundle_module = true
+      generate_buildconfig_java = _is_base_module
+    }
+  }
+
+  # Declare an Android instrumentation test runner.
+  #
+  # This target creates a wrapper script to run Android instrumentation tests.
+  #
+  # Arguments:
+  #   android_test_apk: The target containing the tests.
+  #   android_test_apk_name: The apk_name in android_test_apk
+  #
+  #   The following args are optional:
+  #   apk_under_test: The target being tested.
+  #   additional_apks: Additional targets to install on device.
+  #   data: List of runtime data file dependencies.
+  #   data_deps: List of non-linked dependencies.
+  #   deps: List of private dependencies.
+  #   extra_args: Extra arguments set for test runner.
+  #   ignore_all_data_deps: Don't build data_deps and additional_apks.
+  #   modules: Extra dynamic feature modules to install for test target. Can
+  #     only be used if |apk_under_test| is an Android app bundle.
+  #   fake_modules: Similar to |modules| but fake installed instead.
+  #   never_incremental: Disable incremental builds.
+  #   proguard_enabled: Enable proguard
+  #   public_deps: List of public dependencies
+  #
+  # Example
+  #   instrumentation_test_runner("foo_test_for_bar") {
+  #     android_test_apk: ":foo"
+  #     android_test_apk_name: "Foo"
+  #     apk_under_test: ":bar"
+  #   }
+  template("instrumentation_test_runner") {
+    test_runner_script(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "additional_apks",
+                               "additional_locales",
+                               "apk_under_test",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "extra_args",
+                               "fake_modules",
+                               "ignore_all_data_deps",
+                               "modules",
+                               "proguard_enabled",
+                               "public_deps",
+                               "use_webview_provider",
+                             ])
+      test_name = invoker.target_name
+      test_type = "instrumentation"
+      _apk_target_name = get_label_info(invoker.android_test_apk, "name")
+      apk_target = ":$_apk_target_name"
+      test_jar = "$root_build_dir/test.lib.java/" +
+                 invoker.android_test_apk_name + ".jar"
+      incremental_apk = !(defined(invoker.never_incremental) &&
+                          invoker.never_incremental) && incremental_install
+
+      public_deps = [
+        ":$_apk_target_name",
+
+        # Required by test runner to enumerate test list.
+        ":${_apk_target_name}_dist_ijar",
+      ]
+      if (defined(invoker.apk_under_test)) {
+        public_deps += [ invoker.apk_under_test ]
+      }
+      if (defined(invoker.additional_apks)) {
+        public_deps += invoker.additional_apks
+      }
+    }
+  }
+
+  # Declare an Android instrumentation test apk
+  #
+  # This target creates an Android instrumentation test apk.
+  #
+  # Supports all variables of android_apk(), plus:
+  #   apk_under_test: The apk being tested (optional).
+  #
+  # Example
+  #   android_test_apk("foo_test_apk") {
+  #     android_manifest = "AndroidManifest.xml"
+  #     apk_name = "FooTest"
+  #     apk_under_test = "Foo"
+  #     sources = [
+  #       "android/org/chromium/foo/FooTestCase.java",
+  #       "android/org/chromium/foo/FooExampleTest.java",
+  #     ]
+  #     deps = [
+  #       ":foo_test_support_java"
+  #     ]
+  #   }
+  template("android_test_apk") {
+    android_apk(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      testonly = true
+
+      # The size info enables the test_runner to find the source file location
+      # of a test after it is ran.
+      include_size_info = true
+      data = [ "$root_build_dir/size-info/${invoker.apk_name}.apk.jar.info" ]
+      if (defined(invoker.data)) {
+        data += invoker.data
+      }
+
+      deps = [ "//testing/android/broker:broker_java" ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      data_deps = [
+        # Ensure unstripped libraries are included in runtime deps so that
+        # symbolization can be done.
+        ":${target_name}__secondary_abi_shared_library_list",
+        ":${target_name}__shared_library_list",
+      ]
+      if (defined(invoker.data_deps)) {
+        data_deps += invoker.data_deps
+      }
+      if (defined(invoker.apk_under_test)) {
+        data_deps += [ invoker.apk_under_test ]
+      } else {
+        enable_native_mocks = true
+      }
+
+      if (defined(invoker.apk_under_test)) {
+        _under_test_label =
+            get_label_info(invoker.apk_under_test, "label_no_toolchain")
+        data_deps += [
+          "${_under_test_label}__secondary_abi_shared_library_list",
+          "${_under_test_label}__shared_library_list",
+        ]
+      }
+
+      if (defined(invoker.additional_apks)) {
+        data_deps += invoker.additional_apks
+      }
+      if (defined(invoker.use_webview_provider)) {
+        data_deps += [ invoker.use_webview_provider ]
+      }
+
+      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled &&
+          !incremental_install) {
+        # When ProGuard is on, we use ProGuard to combine the under test java
+        # code and the test java code. This is to allow us to apply all ProGuard
+        # optimizations that we ship with, but not have them break tests. The
+        # apk under test will still have the same resources, assets, and
+        # manifest, all of which are the ones used in the tests.
+        proguard_configs = [ "//testing/android/proguard_for_test.flags" ]
+        if (defined(invoker.proguard_configs)) {
+          proguard_configs += invoker.proguard_configs
+        }
+        enable_proguard_checks = false
+        if (defined(invoker.final_apk_path)) {
+          _final_apk_path = final_apk_path
+        } else {
+          _final_apk_path = "$root_build_dir/apks/${invoker.apk_name}.apk"
+        }
+        data += [ "$_final_apk_path.mapping" ]
+      }
+
+      dist_ijar_path = "$root_build_dir/test.lib.java/${invoker.apk_name}.jar"
+      create_apk_script = false
+
+      forward_variables_from(invoker,
+                             "*",
+                             TESTONLY_AND_VISIBILITY + [
+                                   "data",
+                                   "data_deps",
+                                   "deps",
+                                   "proguard_configs",
+                                 ])
+    }
+  }
+
+  # Declare an Android instrumentation test apk with wrapper script.
+  #
+  # This target creates an Android instrumentation test apk with wrapper script
+  # to run the test.
+  #
+  # Supports all variables of android_test_apk.
+  template("instrumentation_test_apk") {
+    assert(defined(invoker.apk_name))
+    _apk_target_name = "${target_name}__test_apk"
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    android_test_apk(_apk_target_name) {
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+    }
+    instrumentation_test_runner(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "additional_apks",
+                               "apk_under_test",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "extra_args",
+                               "ignore_all_data_deps",
+                               "modules",
+                               "never_incremental",
+                               "proguard_enabled",
+                               "proguard_enable_obfuscation",
+                               "public_deps",
+                               "use_webview_provider",
+                             ])
+      android_test_apk = ":${_apk_target_name}"
+      android_test_apk_name = invoker.apk_name
+    }
+  }
+
+  # Declare an Android gtest apk
+  #
+  # This target creates an Android apk for running gtest-based unittests.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. These will be passed to
+  #     the underlying android_apk invocation and should include the java and
+  #     resource dependencies of the apk.
+  #   shared_library: shared_library target that contains the unit tests.
+  #   apk_name: The name of the produced apk. If unspecified, it uses the name
+  #             of the shared_library target suffixed with "_apk"
+  #   use_default_launcher: Whether the default activity (NativeUnitTestActivity)
+  #     should be used for launching tests.
+  #   use_native_activity: Test implements ANativeActivity_onCreate().
+  #
+  # Example
+  #   unittest_apk("foo_unittests_apk") {
+  #     deps = [ ":foo_java", ":foo_resources" ]
+  #     shared_library = ":foo_unittests"
+  #   }
+  template("unittest_apk") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    _use_native_activity =
+        defined(invoker.use_native_activity) && invoker.use_native_activity
+    _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml"
+    assert(invoker.shared_library != "")
+
+    # This trivial assert is needed in case android_manifest is defined,
+    # as otherwise _use_native_activity and _android_manifest would not be used.
+    assert(_use_native_activity != "" && _android_manifest != "")
+
+    if (!defined(invoker.android_manifest)) {
+      jinja_template("${target_name}_manifest") {
+        _native_library_name = get_label_info(invoker.shared_library, "name")
+        if (defined(invoker.android_manifest_template)) {
+          input = invoker.android_manifest_template
+        } else {
+          input =
+              "//testing/android/native_test/java/AndroidManifest.xml.jinja2"
+        }
+        output = _android_manifest
+        variables = [
+          "is_component_build=${is_component_build}",
+          "native_library_name=${_native_library_name}",
+          "use_native_activity=${_use_native_activity}",
+        ]
+      }
+    }
+
+    android_apk(target_name) {
+      data_deps = []
+      forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+      testonly = true
+      create_apk_script = false
+      enable_native_mocks = true
+
+      # TODO(crbug.com/1099849): Figure out why angle tests fail to launch
+      #     with newer target_sdk_version.
+      if (!defined(invoker.target_sdk_version) && _use_native_activity) {
+        target_sdk_version = 24
+      }
+
+      assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled ||
+             invoker.proguard_configs != [])
+
+      if (!defined(apk_name)) {
+        apk_name = get_label_info(invoker.shared_library, "name")
+      }
+
+      if (!defined(android_manifest)) {
+        android_manifest_dep = ":${target_name}_manifest"
+        android_manifest = _android_manifest
+      }
+
+      final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
+
+      if (!defined(use_default_launcher) || use_default_launcher) {
+        deps += [
+          "//base:base_java",
+          "//build/android/gtest_apk:native_test_instrumentation_test_runner_java",
+          "//testing/android/native_test:native_test_java",
+        ]
+      }
+      shared_libraries = [ invoker.shared_library ]
+      deps += [
+        ":${target_name}__secondary_abi_shared_library_list",
+        ":${target_name}__shared_library_list",
+      ]
+    }
+  }
+
+  # Generate .java files from .aidl files.
+  #
+  # This target will store the .java files in a srcjar and should be included in
+  # an android_library or android_apk's srcjar_deps.
+  #
+  # Variables
+  #   sources: Paths to .aidl files to compile.
+  #   import_include: Path to directory containing .java files imported by the
+  #     .aidl files.
+  #   interface_file: Preprocessed aidl file to import.
+  #
+  # Example
+  #   android_aidl("foo_aidl") {
+  #     import_include = "java/src"
+  #     sources = [
+  #       "java/src/com/foo/bar/FooBarService.aidl",
+  #       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+  #     ]
+  #   }
+  template("android_aidl") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+      script = "//build/android/gyp/aidl.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      sources = invoker.sources
+
+      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      _aidl_path = "${android_sdk_build_tools}/aidl"
+      _framework_aidl = "$android_sdk/framework.aidl"
+      _imports = [ _framework_aidl ]
+      if (defined(invoker.interface_file)) {
+        assert(invoker.interface_file != "")
+        _imports += [ invoker.interface_file ]
+      }
+
+      inputs = [ _aidl_path ] + _imports
+
+      outputs = [ _srcjar_path ]
+      _rebased_imports = rebase_path(_imports, root_build_dir)
+      args = [
+        "--aidl-path",
+        rebase_path(_aidl_path, root_build_dir),
+        "--imports=$_rebased_imports",
+        "--srcjar",
+        rebase_path(_srcjar_path, root_build_dir),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+      ]
+      if (defined(invoker.import_include) && invoker.import_include != []) {
+        _rebased_import_paths = []
+        foreach(_import_path, invoker.import_include) {
+          _rebased_import_path = []
+          _rebased_import_path = [ rebase_path(_import_path, root_build_dir) ]
+          _rebased_import_paths += _rebased_import_path
+        }
+        args += [ "--includes=$_rebased_import_paths" ]
+      }
+      args += rebase_path(sources, root_build_dir)
+    }
+  }
+
+  # Compile a protocol buffer to java.
+  #
+  # This generates java files from protocol buffers and creates an Android library
+  # containing the classes.
+  #
+  # Variables
+  #   sources (required)
+  #       Paths to .proto files to compile.
+  #
+  #   proto_path (required)
+  #       Root directory of .proto files.
+  #
+  #   deps (optional)
+  #       Additional dependencies. Passed through to both the action and the
+  #       android_library targets.
+  #
+  #   import_dirs (optional)
+  #       A list of extra import directories to be passed to protoc compiler.
+  #       WARNING: This circumvents proto checkdeps, and should only be used
+  #       when needed, typically when proto files cannot cleanly import through
+  #       absolute paths, such as for third_party or generated .proto files.
+  #       http://crbug.com/691451 tracks fixing this.
+  #
+  # Example:
+  #  proto_java_library("foo_proto_java") {
+  #    proto_path = "src/foo"
+  #    sources = [ "$proto_path/foo.proto" ]
+  #  }
+  template("proto_java_library") {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+
+    _template_name = target_name
+
+    action_with_pydeps("${_template_name}__protoc_java") {
+      # The suffix "__protoc_java.srcjar" is used by SuperSize to identify
+      # protobuf symbols.
+      _srcjar_path = "$target_gen_dir/$target_name.srcjar"
+      script = "//build/protoc_java.py"
+
+      deps = []
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      sources = invoker.sources
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [ _srcjar_path ]
+      args = [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+               "--protoc",
+               rebase_path(android_protoc_bin, root_build_dir),
+               "--proto-path",
+               rebase_path(invoker.proto_path, root_build_dir),
+               "--srcjar",
+               rebase_path(_srcjar_path, root_build_dir),
+             ] + rebase_path(sources, root_build_dir)
+
+      if (defined(invoker.import_dirs)) {
+        foreach(_import_dir, invoker.import_dirs) {
+          args += [
+            "--import-dir",
+            rebase_path(_import_dir, root_build_dir),
+          ]
+        }
+      }
+    }
+
+    android_library(target_name) {
+      chromium_code = false
+      sources = []
+      srcjar_deps = [ ":${_template_name}__protoc_java" ]
+      deps = [ "//third_party/android_deps:protobuf_lite_runtime_java" ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+  }
+
+  # Declare an Android library target for a prebuilt AAR.
+  #
+  # This target creates an Android library containing java code and Android
+  # resources. For libraries without resources, it will not generate
+  # corresponding android_resources targets.
+  #
+  # To avoid slowing down "gn gen", an associated .info file must be committed
+  # along with the .aar file. In order to create this file, define the target
+  # and then run once with the gn arg "update_android_aar_prebuilts = true".
+  #
+  # Variables
+  #   aar_path: Path to the AAR.
+  #   info_path: Path to the .aar.info file (generated via
+  #       update_android_aar_prebuilts GN arg).
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #       any apk that depends on this library.
+  #   ignore_aidl: Whether to ignore .aidl files found with the .aar.
+  #   ignore_assets: Whether to ignore assets found in the .aar.
+  #   ignore_manifest: Whether to ignore creating manifest.
+  #   ignore_native_libraries: Whether to ignore .so files found in the .aar.
+  #       See also extract_native_libraries.
+  #   ignore_proguard_configs: Whether to ignore proguard configs.
+  #   ignore_info_updates: Whether to ignore the info file when
+  #       update_android_aar_prebuilts gn arg is true. However, the info file
+  #       will still be verified regardless of the value of this flag.
+  #   strip_resources: Whether to ignore android resources found in the .aar.
+  #   custom_package: Java package for generated R.java files.
+  #   extract_native_libraries: Whether to extract .so files found in the .aar.
+  #       If the file contains .so, either extract_native_libraries or
+  #       ignore_native_libraries must be set.
+  #   TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed.
+  #   requires_android: Whether this target can only be used for compiling
+  #       Android related targets.
+  #
+  # Example
+  #   android_aar_prebuilt("foo_java") {
+  #     aar_path = "foo.aar"
+  #   }
+  template("android_aar_prebuilt") {
+    _info_path = "$target_name.info"
+    if (defined(invoker.info_path)) {
+      _info_path = invoker.info_path
+    }
+    _output_path = "${target_out_dir}/${target_name}"
+    _target_name_without_java_or_junit =
+        string_replace(string_replace(target_name, "_java", ""), "_junit", "")
+
+    # This unpack target is a python action, not a valid java target. Since the
+    # java targets below depend on it, its name must not match the java patterns
+    # in internal_rules.gni.
+    _unpack_target_name = "${_target_name_without_java_or_junit}__unpack_aar"
+    _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl
+    _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets
+    _ignore_manifest =
+        defined(invoker.ignore_manifest) && invoker.ignore_manifest
+    _ignore_native_libraries = defined(invoker.ignore_native_libraries) &&
+                               invoker.ignore_native_libraries
+    _ignore_proguard_configs = defined(invoker.ignore_proguard_configs) &&
+                               invoker.ignore_proguard_configs
+    _extract_native_libraries = defined(invoker.extract_native_libraries) &&
+                                invoker.extract_native_libraries
+    _strip_resources =
+        defined(invoker.strip_resources) && invoker.strip_resources
+
+    # Allow 'resource_overlay' parameter even if there are no resources in order
+    # to keep the logic for generated 'android_aar_prebuilt' rules simple.
+    not_needed(invoker, [ "resource_overlay" ])
+
+    _ignore_info_updates =
+        defined(invoker.ignore_info_updates) && invoker.ignore_info_updates
+
+    # Scan the AAR file and determine the resources and jar files.
+    # Some libraries might not have resources; others might have two jars.
+    if (!_ignore_info_updates && update_android_aar_prebuilts) {
+      print("Writing " + rebase_path(_info_path, "//"))
+      exec_script("//build/android/gyp/aar.py",
+                  [
+                    "list",
+                    rebase_path(invoker.aar_path, root_build_dir),
+                    "--output",
+                    rebase_path(_info_path, root_build_dir),
+                  ])
+    }
+
+    # If "gn gen" is failing on the following line, you need to generate an
+    # .info file for your new target by running:
+    #   gn gen --args='target_os="android" update_android_aar_prebuilts=true' out/tmp
+    #   rm -r out/tmp
+    _scanned_files = read_file(_info_path, "scope")
+
+    _use_scanned_assets = !_ignore_assets && _scanned_files.assets != []
+
+    assert(_ignore_aidl || _scanned_files.aidl == [],
+           "android_aar_prebuilt() aidl not yet supported." +
+               " Implement or use ignore_aidl = true." +
+               " http://crbug.com/644439")
+    assert(
+        !_scanned_files.has_native_libraries ||
+            (_ignore_native_libraries || _extract_native_libraries),
+        "android_aar_prebuilt() contains .so files." +
+            " Please set ignore_native_libraries or extract_native_libraries.")
+    assert(
+        !(_ignore_native_libraries && _extract_native_libraries),
+        "ignore_native_libraries and extract_native_libraries cannot both be set.")
+    assert(!_scanned_files.has_native_libraries ||
+           _scanned_files.native_libraries != [])
+    assert(_scanned_files.has_classes_jar || _scanned_files.subjars == [])
+
+    action_with_pydeps(_unpack_target_name) {
+      script = "//build/android/gyp/aar.py"  # Unzips the AAR
+      args = [
+        "extract",
+        rebase_path(invoker.aar_path, root_build_dir),
+        "--output-dir",
+        rebase_path(_output_path, root_build_dir),
+        "--assert-info-file",
+        rebase_path(_info_path, root_build_dir),
+      ]
+      if (_strip_resources) {
+        args += [ "--ignore-resources" ]
+      }
+      inputs = [ invoker.aar_path ]
+      outputs = [ "${_output_path}/AndroidManifest.xml" ]
+      if (!_strip_resources && _scanned_files.has_r_text_file) {
+        # Certain packages, in particular Play Services have no R.txt even
+        # though its presence is mandated by AAR spec. Such packages cause
+        # spurious rebuilds if this output is specified unconditionally.
+        outputs += [ "${_output_path}/R.txt" ]
+      }
+
+      if (!_strip_resources && _scanned_files.resources != []) {
+        outputs += get_path_info(
+                rebase_path(_scanned_files.resources, "", _output_path),
+                "abspath")
+      }
+      if (_scanned_files.has_classes_jar) {
+        outputs += [ "${_output_path}/classes.jar" ]
+      }
+      outputs +=
+          get_path_info(rebase_path(_scanned_files.subjars, "", _output_path),
+                        "abspath")
+      if (!_ignore_proguard_configs) {
+        if (_scanned_files.has_proguard_flags) {
+          outputs += [ "${_output_path}/proguard.txt" ]
+        }
+      }
+
+      if (_extract_native_libraries && _scanned_files.has_native_libraries) {
+        outputs += get_path_info(
+                rebase_path(_scanned_files.native_libraries, "", _output_path),
+                "abspath")
+      }
+      if (_use_scanned_assets) {
+        outputs +=
+            get_path_info(rebase_path(_scanned_files.assets, "", _output_path),
+                          "abspath")
+      }
+    }
+
+    _has_unignored_resources =
+        !_strip_resources &&
+        (_scanned_files.resources != [] || _scanned_files.has_r_text_file)
+
+    _should_process_manifest =
+        !_ignore_manifest && !_scanned_files.is_manifest_empty
+
+    # Create the android_resources target for resources.
+    if (_has_unignored_resources || _should_process_manifest) {
+      _res_target_name = "${target_name}__resources"
+      android_resources(_res_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "custom_package",
+                                 "resource_overlay",
+                                 "testonly",
+                                 "strip_drawables",
+                               ])
+        deps = [ ":$_unpack_target_name" ]
+        if (_should_process_manifest) {
+          android_manifest_dep = ":$_unpack_target_name"
+          android_manifest = "${_output_path}/AndroidManifest.xml"
+        } else if (defined(_scanned_files.manifest_package) &&
+                   !defined(custom_package)) {
+          custom_package = _scanned_files.manifest_package
+        }
+
+        sources = []
+        if (!_strip_resources) {
+          sources = rebase_path(_scanned_files.resources, "", _output_path)
+        }
+        if (!_strip_resources && _scanned_files.has_r_text_file) {
+          r_text_file = "${_output_path}/R.txt"
+        }
+      }
+    } else if (defined(invoker.strip_drawables)) {
+      not_needed(invoker, [ "strip_drawables" ])
+    }
+
+    if (_ignore_manifest) {
+      # Having this available can be useful for DFMs that depend on AARs. It
+      # provides a way to have manifest entries go into the base split while
+      # the code goes into a DFM.
+      java_group("${target_name}__ignored_manifest") {
+        forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+        deps = [ ":$_unpack_target_name" ]
+        mergeable_android_manifests = [ "${_output_path}/AndroidManifest.xml" ]
+      }
+    }
+
+    # Create the android_assets target for assets
+    if (_use_scanned_assets) {
+      _assets_target_name = "${target_name}__assets"
+      android_assets(_assets_target_name) {
+        forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+        renaming_sources = []
+        renaming_destinations = []
+        foreach(_asset_file, _scanned_files.assets) {
+          _original_path =
+              get_path_info(rebase_path(_asset_file, "", _output_path),
+                            "abspath")
+          _updated_path = string_replace(_asset_file, "assets/", "", 1)
+          renaming_sources += [ _original_path ]
+          renaming_destinations += [ _updated_path ]
+        }
+      }
+    }
+
+    # Create android_java_prebuilt target for classes.jar.
+    if (_scanned_files.has_classes_jar) {
+      _java_library_vars = [
+        "bytecode_rewriter_target",
+        "enable_bytecode_checks",
+        "jar_excluded_patterns",
+        "jar_included_patterns",
+        "missing_classes_allowlist",
+        "requires_android",
+        "testonly",
+      ]
+
+      # Create android_java_prebuilt target for extra jars within jars/.
+      _subjar_targets = []
+      foreach(_tuple, _scanned_files.subjar_tuples) {
+        _current_target = "${target_name}__subjar_${_tuple[0]}"
+        _subjar_targets += [ ":$_current_target" ]
+        java_prebuilt(_current_target) {
+          forward_variables_from(invoker, _java_library_vars)
+          deps = [ ":$_unpack_target_name" ]
+          if (!defined(requires_android)) {
+            requires_android = true
+          }
+          supports_android = true
+          jar_path = "$_output_path/${_tuple[1]}"
+          _base_output_name = get_path_info(jar_path, "name")
+          output_name = "${invoker.target_name}-$_base_output_name"
+          public_target_label = invoker.target_name
+        }
+      }
+
+      _jar_target_name = "${target_name}__classes"
+      java_prebuilt(_jar_target_name) {
+        forward_variables_from(invoker, _java_library_vars)
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "input_jars_paths",
+                                 "proguard_configs",
+                               ])
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += _subjar_targets + [ ":$_unpack_target_name" ]
+        if (defined(_res_target_name)) {
+          deps += [ ":$_res_target_name" ]
+        }
+        if (!defined(requires_android)) {
+          requires_android = true
+        }
+        supports_android = true
+        jar_path = "$_output_path/classes.jar"
+        aar_path = invoker.aar_path
+        output_name = invoker.target_name
+
+        if (!_ignore_proguard_configs) {
+          if (!defined(proguard_configs)) {
+            proguard_configs = []
+          }
+          if (_scanned_files.has_proguard_flags) {
+            proguard_configs += [ "$_output_path/proguard.txt" ]
+          }
+        }
+        public_target_label = invoker.target_name
+      }
+    }
+
+    java_group(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+      public_deps = [ ":$_unpack_target_name" ]
+      deps = []
+      if (defined(_jar_target_name)) {
+        deps += [ ":$_jar_target_name" ]
+
+        # Although subjars are meant to be private, we add them as deps here
+        # because in practice they seem to contain classes required to be in the
+        # classpath.
+        deps += _subjar_targets
+      }
+      if (defined(_res_target_name)) {
+        deps += [ ":$_res_target_name" ]
+      }
+      if (defined(_assets_target_name)) {
+        deps += [ ":$_assets_target_name" ]
+      }
+    }
+  }
+
+  # Create an Android application bundle from one base android_apk target,
+  # and zero or more associated android_apk.
+  #
+  # Variables:
+  #    base_module_target: Name of the android_app_bundle_module target
+  #      corresponding to the base module for this application bundle. The
+  #      bundle file will include the same content in its base module, though in
+  #      a slightly different format.
+  #
+  #    bundle_base_path: Optional. If set, the bundle will be output to this
+  #      directory. Defaults to "$root_build_dir/apks".
+  #
+  #    bundle_name: Optional. If set, the bundle will be output to the
+  #      filename "${bundle_name}.aab".
+  #
+  #    extra_modules: Optional list of scopes, one per extra module used by
+  #      this bundle. Each scope must have a 'name' field that specifies the
+  #      module name (which cannot be 'base', since this is reserved for the
+  #      base module), and an 'apk_target' field that specified the
+  #      corresponding android_apk target name the module is modeled on.
+  #
+  #    enable_language_splits: Optional. If true, enable APK splits based
+  #      on languages.
+  #
+  #    keystore_path: optional keystore path, used only when generating APKs.
+  #    keystore_name: optional keystore name, used only when generating APKs.
+  #    keystore_password: optional keystore password, used only when
+  #      generating APKs.
+  #
+  #    command_line_flags_file: Optional. If provided, named of the on-device
+  #      file that will be used to store command-line arguments. The default
+  #      is 'command_line_flags_file', but this is typically redefined to
+  #      something more specific for certain bundles (e.g. the Chromium based
+  #      APKs use 'chrome-command-line', the WebView one uses
+  #      'webview-command-line').
+  #
+  #    proguard_enabled: Optional. True if proguarding is enabled for this
+  #      bundle. Default is to enable this only for release builds. Note that
+  #      this will always perform synchronized proguarding.
+  #
+  #    proguard_enable_obfuscation: Whether to enable obfuscation (default=true)
+  #
+  #    enable_multidex: Optional. Enable multidexing of optimized modules jars
+  #      when using synchronized proguarding. Only applies to base module.
+  #
+  #    proguard_android_sdk_dep: Optional. android_system_java_prebuilt() target
+  #      used as a library jar for synchronized proguarding.
+  #
+  #    compress_shared_libraries: Optional. Whether to compress shared libraries
+  #      such that they are extracted upon install. Libraries prefixed with
+  #      "crazy." are never compressed.
+  #
+  #    system_image_locale_allowlist: List of locales that should be included
+  #      on system APKs generated from this bundle.
+  #
+  #    static_library_provider: Specifies a single target that this target will
+  #      use as a static library APK.
+  #      Additionally, when allotting libraries to be packaged into modules, the
+  #      libraries packaged into the static library will be accounted for to
+  #      avoid library duplication. Effectively, the static library will be
+  #      treated as the parent of the base module.
+  #
+  #    expected_libs_and_assets: Verify the list of included native libraries
+  #      and assets is consistent with the given expectation file.
+  #    expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff
+  #      with this file as the base.
+  #    expected_proguard_config: Checks that the merged set of proguard flags
+  #      matches the given config.
+  #    expected_proguard_config_base: Treat expected_proguard_config as a diff
+  #      with this file as the base.
+  #
+  #    version_code: Optional. Version code of the target.
+  #
+  #    is_multi_abi: If true will add a library placeholder for the missing ABI
+  #      if either the primary or the secondary ABI has no native libraries set.
+  #
+  #    default_modules_for_testing: (optional): A list of DFM that the wrapper
+  #      script should install. This is for local testing only, and does not
+  #      affect the actual DFM in production.
+  # Example:
+  #   android_app_bundle("chrome_public_bundle") {
+  #      base_module_target = "//chrome/android:chrome_public_apk"
+  #      extra_modules = [
+  #        { # NOTE: Scopes require one field per line, and no comma separators.
+  #          name = "my_module"
+  #          module_target = ":my_module"
+  #        },
+  #      ]
+  #   }
+  #
+  template("android_app_bundle") {
+    _target_name = target_name
+    _uses_static_library = defined(invoker.static_library_provider)
+    _proguard_enabled =
+        defined(invoker.proguard_enabled) && invoker.proguard_enabled
+
+    if (defined(invoker.version_code)) {
+      _version_code = invoker.version_code
+    } else {
+      _version_code = android_default_version_code
+    }
+
+    if (android_override_version_code != "") {
+      _version_code = android_override_version_code
+    }
+
+    # Prevent "unused variable".
+    not_needed([ "_version_code" ])
+
+    _bundle_base_path = "$root_build_dir/apks"
+    if (defined(invoker.bundle_base_path)) {
+      _bundle_base_path = invoker.bundle_base_path
+    }
+
+    _bundle_name = _target_name
+    if (defined(invoker.bundle_name)) {
+      _bundle_name = invoker.bundle_name
+    }
+    _bundle_path = "$_bundle_base_path/${_bundle_name}.aab"
+    _rebased_bundle_path = rebase_path(_bundle_path, root_build_dir)
+
+    _base_target_name = get_label_info(invoker.base_module_target, "name")
+    _base_target_gen_dir =
+        get_label_info(invoker.base_module_target, "target_gen_dir")
+    _base_module_build_config =
+        "$_base_target_gen_dir/${_base_target_name}.build_config"
+    _base_module_build_config_target =
+        "${invoker.base_module_target}$build_config_target_suffix"
+    _rebased_base_module_build_config =
+        rebase_path(_base_module_build_config, root_build_dir)
+
+    _modules = [
+      {
+        name = "base"
+        module_target = invoker.base_module_target
+        build_config = _base_module_build_config
+        build_config_target = _base_module_build_config_target
+        if (_uses_static_library) {
+          parent = "lib"
+        }
+      },
+    ]
+
+    _enable_multidex =
+        !defined(invoker.enable_multidex) || invoker.enable_multidex
+
+    if (!_proguard_enabled && defined(invoker.min_sdk_version)) {
+      not_needed(invoker, [ "min_sdk_version" ])
+    }
+
+    # Prevent "unused variable".
+    not_needed([ "_enable_multidex" ])
+
+    if (_proguard_enabled) {
+      _uses_static_library_synchronized_proguard =
+          defined(invoker.static_library_synchronized_proguard) &&
+          invoker.static_library_synchronized_proguard
+
+      # TODO(crbug.com/1032609): Remove dexsplitter from Trichrome Proguard.
+      _dex_target = "${_target_name}__dex"
+      _proguard_mapping_path = "${_bundle_path}.mapping"
+    }
+
+    assert(_proguard_enabled || !defined(invoker.enable_multidex),
+           "Bundle only adds dexing step if proguarding is enabled.")
+
+    if (defined(invoker.extra_modules)) {
+      _module_count = 0
+      not_needed([ "_module_count" ])
+
+      foreach(_module, invoker.extra_modules) {
+        _module_count += 1
+        assert(defined(_module.name),
+               "Missing 'name' field for extra module #${_module_count}.")
+        assert(_module.name != "base",
+               "Module name 'base' is reserved for the main bundle module")
+        assert(
+            defined(_module.module_target),
+            "Missing 'module_target' field for extra module ${_module.name}.")
+        _module_target = _module.module_target
+        _module_target_name = get_label_info(_module_target, "name")
+        _module_target_gen_dir =
+            get_label_info(_module_target, "target_gen_dir")
+        _module.build_config =
+            "$_module_target_gen_dir/${_module_target_name}.build_config"
+        _module.build_config_target =
+            "$_module_target$build_config_target_suffix"
+        _module.parent = "base"
+        _modules += [ _module ]
+      }
+    }
+
+    # Make build config, which is required for synchronized proguarding.
+    _module_java_targets = []
+    _module_build_configs = []
+    _module_targets = []
+    foreach(_module, _modules) {
+      _module_targets += [ _module.module_target ]
+      _module_java_targets += [ "${_module.module_target}__java" ]
+      _module_build_configs += [ _module.build_config ]
+    }
+
+    if (_uses_static_library) {
+      _lib_proxy_module = {
+        name = "lib"
+      }
+      _static_library_target_name =
+          get_label_info(invoker.static_library_provider, "name")
+      _static_library_gen_dir =
+          get_label_info(invoker.static_library_provider, "target_gen_dir")
+      _lib_proxy_module.build_config =
+          "$_static_library_gen_dir/$_static_library_target_name.build_config"
+      _lib_proxy_module.build_config_target =
+          "${invoker.static_library_provider}$build_config_target_suffix"
+    }
+
+    # Allot native libraries to modules they should be packaged into. This is
+    # necessary since all libraries that are depended on by multiple modules
+    # have to go into base or the static shared library if it exists.
+    # TODO(crbug.com/1021565): It would be nice if this lived outside the
+    # android_app_bundle template and the static shared library would pull in
+    # the libs as allotted by this step.
+    _native_libraries_config =
+        "$target_gen_dir/$_target_name.native_libraries_config"
+    _native_libraries_config_target = "${_target_name}__allot_native_libraries"
+    allot_native_libraries(_native_libraries_config_target) {
+      modules = _modules
+      native_libraries_filearg_keys = [
+        "native:libraries",
+        "native:loadable_modules",
+      ]
+      output = _native_libraries_config
+      if (_uses_static_library) {
+        modules += [ _lib_proxy_module ]
+      }
+    }
+    if (defined(android_app_secondary_abi)) {
+      _secondary_abi_native_libraries_config =
+          "$target_gen_dir/$_target_name.secondary_abi_native_libraries_config"
+      _secondary_abi_native_libraries_config_target =
+          "${_target_name}__allot_secondary_abi_native_libraries"
+      allot_native_libraries(_secondary_abi_native_libraries_config_target) {
+        modules = _modules
+        native_libraries_filearg_keys = [
+          "native:secondary_abi_libraries",
+          "native:secondary_abi_loadable_modules",
+        ]
+        output = _secondary_abi_native_libraries_config
+        if (_uses_static_library) {
+          modules += [ _lib_proxy_module ]
+        }
+      }
+    }
+
+    # Used to expose the module Java targets of the bundle.
+    group("${_target_name}__java") {
+      deps = _module_java_targets
+    }
+    group("${_target_name}__compile_resources") {
+      deps = [ "${invoker.base_module_target}__compile_resources" ]
+    }
+
+    _build_config = "$target_gen_dir/${_target_name}.build_config"
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _build_config_target = "$_target_name$build_config_target_suffix"
+    if (defined(invoker.proguard_android_sdk_dep)) {
+      proguard_android_sdk_dep_ = invoker.proguard_android_sdk_dep
+    } else {
+      proguard_android_sdk_dep_ = "//third_party/android_sdk:android_sdk_java"
+    }
+
+    if (_proguard_enabled) {
+      _proguard_mapping_path = "${_bundle_path}.mapping"
+    }
+
+    write_build_config(_build_config_target) {
+      type = "android_app_bundle"
+      possible_config_deps = _module_targets + [ proguard_android_sdk_dep_ ]
+      build_config = _build_config
+      proguard_enabled = _proguard_enabled
+      module_build_configs = _module_build_configs
+
+      if (_proguard_enabled) {
+        proguard_mapping_path = _proguard_mapping_path
+      }
+    }
+
+    if (_proguard_enabled) {
+      # If this Bundle uses a static library, the static library APK will
+      # create the synchronized dex file path.
+      if (_uses_static_library_synchronized_proguard) {
+        if (defined(invoker.min_sdk_version)) {
+          not_needed(invoker, [ "min_sdk_version" ])
+        }
+      } else {
+        dex(_dex_target) {
+          forward_variables_from(invoker,
+                                 [
+                                   "expected_proguard_config",
+                                   "expected_proguard_config_base",
+                                   "min_sdk_version",
+                                   "proguard_enable_obfuscation",
+                                 ])
+          if (defined(expected_proguard_config)) {
+            top_target_name = _target_name
+          }
+          enable_multidex = _enable_multidex
+          proguard_enabled = true
+          proguard_mapping_path = _proguard_mapping_path
+          proguard_sourcefile_suffix = "$android_channel-$_version_code"
+          build_config = _build_config
+
+          deps = _module_java_targets + [ ":$_build_config_target" ]
+          modules = _modules
+        }
+      }
+    }
+
+    _all_create_module_targets = []
+    _all_module_zip_paths = []
+    _all_module_build_configs = []
+    foreach(_module, _modules) {
+      _module_target = _module.module_target
+      _module_build_config = _module.build_config
+      _module_build_config_target = _module.build_config_target
+
+      if (!_proguard_enabled) {
+        _dex_target_for_module = "${_module_target}__final_dex"
+      } else {
+        _dex_target_for_module = ":$_dex_target"
+      }
+
+      # Generate one module .zip file per bundle module.
+      #
+      # Important: the bundle tool uses the module's zip filename as
+      # the internal module name inside the final bundle, in other words,
+      # this file *must* be named ${_module.name}.zip
+      _create_module_target = "${_target_name}__${_module.name}__create"
+      _module_zip_path = "$target_gen_dir/$target_name/${_module.name}.zip"
+
+      create_android_app_bundle_module(_create_module_target) {
+        forward_variables_from(invoker,
+                               [
+                                 "is_multi_abi",
+                                 "min_sdk_version",
+                                 "uncompress_dex",
+                                 "proguard_enabled",
+                               ])
+        module_name = _module.name
+        build_config = _module_build_config
+        module_zip_path = _module_zip_path
+        native_libraries_config = _native_libraries_config
+
+        if (module_name == "base" &&
+            defined(invoker.expected_libs_and_assets)) {
+          forward_variables_from(invoker,
+                                 [
+                                   "expected_libs_and_assets",
+                                   "expected_libs_and_assets_base",
+                                 ])
+          top_target_name = _target_name
+          build_config_target = _module_build_config_target
+          native_libraries_config_target = ":$_native_libraries_config_target"
+          if (defined(android_app_secondary_abi)) {
+            secondary_abi_native_libraries_config_target =
+                ":$_secondary_abi_native_libraries_config_target"
+          }
+        }
+
+        deps = [
+          ":$_native_libraries_config_target",
+          _dex_target_for_module,
+          _module_build_config_target,
+          _module_target,
+        ]
+
+        if (defined(android_app_secondary_abi)) {
+          secondary_abi_native_libraries_config =
+              _secondary_abi_native_libraries_config
+          deps += [ ":$_secondary_abi_native_libraries_config_target" ]
+        }
+      }
+
+      _all_create_module_targets += [
+        ":$_create_module_target",
+        _module_build_config_target,
+        "${_module_target}__compile_resources",
+      ]
+      _all_module_zip_paths += [ _module_zip_path ]
+      _all_module_build_configs += [ _module_build_config ]
+    }
+
+    _all_rebased_module_zip_paths =
+        rebase_path(_all_module_zip_paths, root_build_dir)
+
+    _enable_language_splits = defined(invoker.enable_language_splits) &&
+                              invoker.enable_language_splits
+
+    _split_dimensions = []
+    if (_enable_language_splits) {
+      _split_dimensions += [ "language" ]
+    }
+
+    _keystore_path = android_keystore_path
+    _keystore_password = android_keystore_password
+    _keystore_name = android_keystore_name
+
+    if (defined(invoker.keystore_path)) {
+      _keystore_path = invoker.keystore_path
+      _keystore_password = invoker.keystore_password
+      _keystore_name = invoker.keystore_name
+    }
+
+    _rebased_keystore_path = rebase_path(_keystore_path, root_build_dir)
+
+    _bundle_target_name = "${_target_name}__bundle"
+    action_with_pydeps(_bundle_target_name) {
+      script = "//build/android/gyp/create_app_bundle.py"
+      inputs = _all_module_zip_paths + _all_module_build_configs
+      outputs = [ _bundle_path ]
+      deps = _all_create_module_targets + [ ":$_build_config_target" ]
+      args = [
+        "--out-bundle=$_rebased_bundle_path",
+        "--rtxt-out-path=$_rebased_bundle_path.R.txt",
+        "--pathmap-out-path=$_rebased_bundle_path.pathmap.txt",
+        "--module-zips=$_all_rebased_module_zip_paths",
+      ]
+      if (_split_dimensions != []) {
+        args += [ "--split-dimensions=$_split_dimensions" ]
+      }
+      if (defined(invoker.compress_shared_libraries) &&
+          invoker.compress_shared_libraries) {
+        args += [ "--compress-shared-libraries" ]
+      }
+      if (treat_warnings_as_errors) {
+        args += [ "--warnings-as-errors" ]
+      }
+
+      if (_enable_language_splits) {
+        args += [
+          "--base-allowlist-rtxt-path=@FileArg(" + "${_rebased_base_module_build_config}:deps_info:base_allowlist_rtxt_path)",
+          "--base-module-rtxt-path=@FileArg(" +
+              "${_rebased_base_module_build_config}:deps_info:r_text_path)",
+        ]
+      }
+      if (defined(invoker.validate_services) && invoker.validate_services) {
+        args += [ "--validate-services" ]
+      }
+
+      foreach(_module, _modules) {
+        _rebased_build_config =
+            rebase_path(_module.build_config, root_build_dir)
+        args += [
+          "--uncompressed-assets=@FileArg(" +
+              "$_rebased_build_config:uncompressed_assets)",
+          "--rtxt-in-paths=@FileArg(" +
+              "$_rebased_build_config:deps_info:r_text_path)",
+          "--pathmap-in-paths=@FileArg(" +
+              "$_rebased_build_config:deps_info:module_pathmap_path)",
+          "--module-name=" + _module.name,
+        ]
+      }
+
+      # http://crbug.com/725224. Fix for bots running out of memory.
+      if (defined(java_cmd_pool_size)) {
+        pool = "//build/config/android:java_cmd_pool($default_toolchain)"
+      } else {
+        pool = "//build/toolchain:link_pool($default_toolchain)"
+      }
+    }
+
+    # Create size info files for targets that care about size
+    # (have proguard enabled).
+    if (_proguard_enabled) {
+      # Merge all module targets to obtain size info files for all targets.
+      _all_module_targets = _module_targets
+
+      _size_info_target = "${_target_name}__size_info"
+      create_size_info_files(_size_info_target) {
+        name = "$_bundle_name.aab"
+        deps = _all_module_targets + [ ":$_build_config_target" ]
+        module_build_configs = _all_module_build_configs
+      }
+    }
+
+    if (_uses_static_library) {
+      _install_artifacts_target = "${target_name}__install_artifacts"
+      _install_artifacts_json =
+          "${target_gen_dir}/${target_name}.install_artifacts"
+      generated_file(_install_artifacts_target) {
+        output_conversion = "json"
+        deps = [ invoker.static_library_provider ]
+        outputs = [ _install_artifacts_json ]
+        data_keys = [ "install_artifacts" ]
+        rebase = root_build_dir
+      }
+    }
+
+    # Generate a wrapper script for the bundle.
+    _android_aapt2_path = android_sdk_tools_bundle_aapt2
+
+    _bundle_apks_path = "$_bundle_base_path/$_bundle_name.apks"
+    _bundle_wrapper_script_dir = "$root_build_dir/bin"
+    _bundle_wrapper_script_path = "$_bundle_wrapper_script_dir/$_target_name"
+
+    action_with_pydeps("${_target_name}__wrapper_script") {
+      script = "//build/android/gyp/create_bundle_wrapper_script.py"
+      inputs = [ _base_module_build_config ]
+      outputs = [ _bundle_wrapper_script_path ]
+
+      # Telemetry for bundles uses the wrapper script for installation.
+      data = [
+        _bundle_wrapper_script_path,
+        _android_aapt2_path,
+        _keystore_path,
+        _bundle_path,
+      ]
+      data_deps = [
+        "//build/android:apk_operations_py",
+        "//build/android:stack_tools",
+      ]
+
+      deps = [ _base_module_build_config_target ]
+      args = [
+        "--script-output-path",
+        rebase_path(_bundle_wrapper_script_path, root_build_dir),
+        "--package-name=@FileArg(" +
+            "$_rebased_base_module_build_config:deps_info:package_name)",
+        "--aapt2",
+        rebase_path(_android_aapt2_path, root_build_dir),
+        "--bundle-path",
+        _rebased_bundle_path,
+        "--bundle-apks-path",
+        rebase_path(_bundle_apks_path, root_build_dir),
+        "--target-cpu=$target_cpu",
+        "--keystore-path",
+        _rebased_keystore_path,
+        "--keystore-password",
+        _keystore_password,
+        "--key-name",
+        _keystore_name,
+      ]
+      if (defined(invoker.default_modules_for_testing)) {
+        args += [ "--default-modules" ] + invoker.default_modules_for_testing
+      }
+      if (defined(invoker.system_image_locale_allowlist)) {
+        args += [
+          "--system-image-locales=${invoker.system_image_locale_allowlist}",
+        ]
+      }
+      if (defined(invoker.command_line_flags_file)) {
+        args += [
+          "--command-line-flags-file",
+          invoker.command_line_flags_file,
+        ]
+      }
+      if (_uses_static_library) {
+        deps += [ ":$_install_artifacts_target" ]
+        _rebased_install_artifacts_json =
+            rebase_path(_install_artifacts_json, root_build_dir)
+        _static_library_apk_path =
+            "@FileArg($_rebased_install_artifacts_json[])"
+        args += [
+          "--additional-apk",
+          _static_library_apk_path,
+        ]
+      }
+
+      if (_proguard_enabled) {
+        args += [
+          "--proguard-mapping-path",
+          rebase_path(_proguard_mapping_path, root_build_dir),
+        ]
+
+        # Required by logcat command.
+        data_deps += [ "//build/android/stacktrace:java_deobfuscate" ]
+        data += [ _proguard_mapping_path ]
+      }
+    }
+
+    _enable_lint = defined(invoker.enable_lint) && invoker.enable_lint &&
+                   !disable_android_lint
+    if (_enable_lint) {
+      android_lint("${target_name}__lint") {
+        forward_variables_from(invoker,
+                               [
+                                 "lint_baseline_file",
+                                 "lint_suppressions_file",
+                                 "min_sdk_version",
+                               ])
+        build_config = _build_config
+        build_config_dep = ":$_build_config_target"
+        deps = _module_java_targets
+        if (defined(invoker.lint_suppressions_dep)) {
+          deps += [ invoker.lint_suppressions_dep ]
+        }
+        if (defined(invoker.lint_min_sdk_version)) {
+          min_sdk_version = invoker.lint_min_sdk_version
+        }
+      }
+    } else {
+      not_needed(invoker,
+                 [
+                   "lint_baseline_file",
+                   "lint_min_sdk_version",
+                   "lint_suppressions_dep",
+                   "lint_suppressions_file",
+                 ])
+    }
+
+    group(_target_name) {
+      public_deps = [
+        ":$_bundle_target_name",
+        ":${_target_name}__wrapper_script",
+      ]
+      if (defined(_size_info_target)) {
+        public_deps += [ ":$_size_info_target" ]
+      }
+      if (_enable_lint) {
+        if (!defined(data_deps)) {
+          data_deps = []
+        }
+        data_deps += [ ":${target_name}__lint" ]
+      }
+    }
+
+    _apks_path = "$root_build_dir/apks/$_bundle_name.apks"
+    action_with_pydeps("${_target_name}_apks") {
+      script = "//build/android/gyp/create_app_bundle_apks.py"
+      inputs = [ _bundle_path ]
+      outputs = [ _apks_path ]
+      data = [ _apks_path ]
+      args = [
+        "--bundle",
+        _rebased_bundle_path,
+        "--output",
+        rebase_path(_apks_path, root_build_dir),
+        "--aapt2-path",
+        rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+        "--keystore-path",
+        rebase_path(android_keystore_path, root_build_dir),
+        "--keystore-name",
+        android_keystore_name,
+        "--keystore-password",
+        android_keystore_password,
+      ]
+      deps = [ ":$_bundle_target_name" ]
+      metadata = {
+        install_artifacts = [ _apks_path ]
+      }
+      if (defined(invoker.static_library_provider)) {
+        metadata.install_artifacts_barrier = []
+      }
+
+      # http://crbug.com/725224. Fix for bots running out of memory.
+      if (defined(java_cmd_pool_size)) {
+        pool = "//build/config/android:java_cmd_pool($default_toolchain)"
+      } else {
+        pool = "//build/toolchain:link_pool($default_toolchain)"
+      }
+    }
+  }
+
+  # Create an .apks file from an .aab file. The .apks file will contain the
+  # minimal set of .apk files needed for tracking binary size.
+  # The file will be created at "$bundle_path_without_extension.minimal.apks".
+  #
+  # Variables:
+  #   bundle_path: Path to the input .aab file.
+  #
+  # Example:
+  #   create_app_bundle_minimal_apks("minimal_apks") {
+  #     deps = [
+  #       ":bundle_target",
+  #     ]
+  #     bundle_path = "$root_build_dir/apks/Bundle.aab"
+  #   }
+  template("create_app_bundle_minimal_apks") {
+    action_with_pydeps(target_name) {
+      forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ])
+      script = "//build/android/gyp/create_app_bundle_apks.py"
+      _dir = get_path_info(invoker.bundle_path, "dir")
+      _name = get_path_info(invoker.bundle_path, "name")
+      _output_path = "$_dir/$_name.minimal.apks"
+      outputs = [ _output_path ]
+      inputs = [ invoker.bundle_path ]
+      args = [
+        "--bundle",
+        rebase_path(invoker.bundle_path, root_build_dir),
+        "--output",
+        rebase_path(_output_path, root_build_dir),
+        "--aapt2-path",
+        rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+        "--keystore-path",
+        rebase_path(android_keystore_path, root_build_dir),
+        "--keystore-name",
+        android_keystore_name,
+        "--keystore-password",
+        android_keystore_password,
+        "--minimal",
+      ]
+    }
+  }
+}
+
+# Generate an Android resources target that contains localized strings
+# describing the current locale used by the Android framework to display
+# UI strings. These are used by
+# org.chromium.chrome.browser.ChromeLocalizationUtils.
+#
+# Variables:
+#    ui_locales: List of Chromium locale names to generate resources for.
+#
+template("generate_ui_locale_resources") {
+  _generating_target_name = "${target_name}__generate"
+  _rebased_output_zip_path = rebase_path(target_gen_dir, root_gen_dir)
+  _output_zip = "${root_out_dir}/resource_zips/${_rebased_output_zip_path}/" +
+                "${target_name}.zip"
+
+  action_with_pydeps(_generating_target_name) {
+    script = "//build/android/gyp/create_ui_locale_resources.py"
+    outputs = [ _output_zip ]
+    args = [
+      "--locale-list=${invoker.ui_locales}",
+      "--output-zip",
+      rebase_path(_output_zip, root_build_dir),
+    ]
+  }
+
+  android_generated_resources(target_name) {
+    generating_target = ":$_generating_target_name"
+    generated_resources_zip = _output_zip
+  }
+}
diff --git a/src/build/config/android/sdk.gni b/src/build/config/android/sdk.gni
new file mode 100644
index 0000000..d2e67a7
--- /dev/null
+++ b/src/build/config/android/sdk.gni
@@ -0,0 +1,10 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The default SDK release used by public builds. Value may differ in
+# internal builds.
+default_android_sdk_release = "r"
+
+# SDK releases against which public builds are supported.
+public_sdk_releases = [ "r" ]
diff --git a/src/build/config/android/test/classpath_order/BUILD.gn b/src/build/config/android/test/classpath_order/BUILD.gn
new file mode 100644
index 0000000..decd1a8
--- /dev/null
+++ b/src/build/config/android/test/classpath_order/BUILD.gn
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+template("test_resources") {
+  jinja_template_resources(target_name) {
+    forward_variables_from(invoker, "*")
+    testonly = true
+    variables = [ "resource_name=$resource_name" ]
+    res_dir = "java/res_template"
+    resources = [ "java/res_template/values/values.xml" ]
+  }
+}
+
+template("generate_dummy_android_library") {
+  # No underscores to avoid crbug.com/908819.
+  _generate_java_source_target_name = "${target_name}generatejavasource"
+  jinja_template(_generate_java_source_target_name) {
+    testonly = true
+    input = "java/src/org/chromium/build/classpath_order/Dummy.java.jinja2"
+    output = "$target_gen_dir/java/src/org/chromium/build/classpath_order/${invoker.class_name}.java"
+    variables = [ "class_name=${invoker.class_name}" ]
+  }
+
+  android_library(target_name) {
+    forward_variables_from(invoker, "*")
+
+    if (!defined(invoker.deps)) {
+      deps = []
+    }
+
+    sources = get_target_outputs(":${_generate_java_source_target_name}")
+    deps += [ ":${_generate_java_source_target_name}" ]
+  }
+}
+
+# Test that classpath order keeps resources accessible when multiple targets generate
+# resources for the same package. Specifically, test that an android_library precedes
+# its dependencies regardless of the relative lexographic order.
+
+test_resources("a1_dependency_resources") {
+  resource_name = "a1_dependency_resource"
+}
+
+generate_dummy_android_library("a1_dependency_java") {
+  testonly = true
+  class_name = "A1Dependency"
+  resources_package = "org.chromium.build.classpath_order.test1"
+  deps = [ ":a1_dependency_resources" ]
+}
+
+test_resources("z1_master_resources") {
+  resource_name = "z1_master_resource"
+  deps = [ ":a1_dependency_resources" ]
+}
+
+generate_dummy_android_library("z1_master_java") {
+  testonly = true
+  class_name = "Z1Master"
+  resources_package = "org.chromium.build.classpath_order.test1"
+  deps = [
+    ":a1_dependency_java",
+    ":z1_master_resources",
+  ]
+}
+
+test_resources("z2_dependency_resources") {
+  resource_name = "z2_dependency_resource"
+}
+
+generate_dummy_android_library("z2_dependency_java") {
+  testonly = true
+  class_name = "Z2Dependency"
+  resources_package = "org.chromium.build.classpath_order.test2"
+  deps = [ ":z2_dependency_resources" ]
+}
+
+test_resources("a2_master_resources") {
+  resource_name = "a2_master_resource"
+  deps = [ ":z2_dependency_resources" ]
+}
+
+generate_dummy_android_library("a2_master_java") {
+  testonly = true
+  class_name = "A2Master"
+  resources_package = "org.chromium.build.classpath_order.test2"
+  deps = [
+    ":a2_master_resources",
+    ":z2_dependency_java",
+  ]
+}
+
+java_library("junit_tests") {
+  bypass_platform_checks = true
+  testonly = true
+  sources =
+      [ "java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java" ]
+  deps = [
+    ":a1_dependency_java",
+    ":a2_master_java",
+    ":z1_master_java",
+    ":z2_dependency_java",
+    "//testing/android/junit:junit_test_support",
+    "//third_party/android_deps:robolectric_all_java",
+    "//third_party/android_support_test_runner:runner_java",
+    "//third_party/androidx:androidx_test_runner_java",
+    "//third_party/junit",
+  ]
+}
diff --git a/src/build/config/android/test/classpath_order/java/res_template/values/values.xml b/src/build/config/android/test/classpath_order/java/res_template/values/values.xml
new file mode 100644
index 0000000..ee706b2
--- /dev/null
+++ b/src/build/config/android/test/classpath_order/java/res_template/values/values.xml
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2021 The Chromium Authors. All rights reserved.
+     Use of this source code is governed by a BSD-style license that can be
+     found in the LICENSE file. -->
+
+
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+    <integer name="{{resource_name}}">42</integer>
+</resources>
diff --git a/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java b/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java
new file mode 100644
index 0000000..c5a9202
--- /dev/null
+++ b/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/ClassPathOrderTest.java
@@ -0,0 +1,32 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.classpath_order;
+
+import static org.junit.Assert.assertTrue;
+
+import androidx.test.filters.SmallTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.annotation.Config;
+
+import org.chromium.testing.local.LocalRobolectricTestRunner;
+
+/**
+ * Test that resources defined in different android_resources() targets but with the same
+ * package are accessible.
+ */
+@RunWith(LocalRobolectricTestRunner.class)
+@Config(manifest = Config.NONE)
+public final class ClassPathOrderTest {
+    @Test
+    @SmallTest
+    public void testAll() {
+        assertTrue(org.chromium.build.classpath_order.test1.R.integer.a1_dependency_resource >= 0);
+        assertTrue(org.chromium.build.classpath_order.test1.R.integer.z1_master_resource >= 0);
+        assertTrue(org.chromium.build.classpath_order.test2.R.integer.z2_dependency_resource >= 0);
+        assertTrue(org.chromium.build.classpath_order.test2.R.integer.a2_master_resource >= 0);
+    }
+}
diff --git a/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2 b/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2
new file mode 100644
index 0000000..0ccf28b
--- /dev/null
+++ b/src/build/config/android/test/classpath_order/java/src/org/chromium/build/classpath_order/Dummy.java.jinja2
@@ -0,0 +1,8 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.classpath_order;
+
+public class {{class_name}} {
+}
diff --git a/src/build/config/android/test/proto/BUILD.gn b/src/build/config/android/test/proto/BUILD.gn
new file mode 100644
index 0000000..a28111a
--- /dev/null
+++ b/src/build/config/android/test/proto/BUILD.gn
@@ -0,0 +1,103 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+import("//third_party/protobuf/proto_library.gni")
+
+# The purpose of these targets is test that |deps| satisfies java compilation
+# dependencies, and that |import_dirs| allows us to deal with various relative
+# imports to other proto dependencies. Although we should strive to avoid using
+# |import_dirs| and relative import paths, preferring to use absolute imports
+# whenever possible. See https://crbug.com/691451. While this target is
+# primarily to test that the Java proto targets build correctly, also build the
+# C++ versions of the protos as well. There are currently some configurations of
+# Java protos that can be built but will not work for C++, see
+# https://crbug.com/1039014, so make sure we don't create any tests that would
+# violate that.
+group("test_build_protos") {
+  deps = [
+    ":absolute_root_proto",
+    ":absolute_root_proto_java",
+    ":relative_root_proto",
+    ":relative_root_proto_java",
+  ]
+}
+
+proto_java_library("absolute_root_proto_java") {
+  proto_path = "//"
+  import_dirs = [ "relative_dep/" ]
+  sources = [
+    "root/absolute_child.proto",
+    "root/absolute_root.proto",
+  ]
+  deps = [
+    ":absolute_dep_proto_java",
+    ":relative_dep_proto_java",
+  ]
+}
+
+proto_java_library("relative_root_proto_java") {
+  proto_path = "root/"
+  import_dirs = [
+    "relative_dep/",
+    "//",
+  ]
+  sources = [
+    "root/relative_child.proto",
+    "root/relative_root.proto",
+  ]
+  deps = [
+    ":absolute_dep_proto_java",
+    ":relative_dep_proto_java",
+  ]
+}
+
+proto_java_library("absolute_dep_proto_java") {
+  proto_path = "//"
+  sources = [ "absolute_dep/absolute_dep.proto" ]
+}
+
+proto_java_library("relative_dep_proto_java") {
+  proto_path = "relative_dep/"
+  sources = [ "relative_dep/relative_dep.proto" ]
+}
+
+proto_library("absolute_root_proto") {
+  proto_in_dir = "//"
+  import_dirs = [ "relative_dep/" ]
+  sources = [
+    "root/absolute_child.proto",
+    "root/absolute_root.proto",
+  ]
+  link_deps = [
+    ":absolute_dep_proto",
+    ":relative_dep_proto",
+  ]
+}
+
+proto_library("relative_root_proto") {
+  proto_in_dir = "root/"
+  import_dirs = [
+    "relative_dep/",
+    "//",
+  ]
+  sources = [
+    "root/relative_child.proto",
+    "root/relative_root.proto",
+  ]
+  link_deps = [
+    ":absolute_dep_proto",
+    ":relative_dep_proto",
+  ]
+}
+
+proto_library("absolute_dep_proto") {
+  proto_in_dir = "//"
+  sources = [ "absolute_dep/absolute_dep.proto" ]
+}
+
+proto_library("relative_dep_proto") {
+  proto_in_dir = "relative_dep/"
+  sources = [ "relative_dep/relative_dep.proto" ]
+}
diff --git a/src/build/config/android/test/proto/absolute_dep/absolute_dep.proto b/src/build/config/android/test/proto/absolute_dep/absolute_dep.proto
new file mode 100644
index 0000000..46dcce7
--- /dev/null
+++ b/src/build/config/android/test/proto/absolute_dep/absolute_dep.proto
@@ -0,0 +1,10 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+message AbsoluteDep {}
diff --git a/src/build/config/android/test/proto/relative_dep/relative_dep.proto b/src/build/config/android/test/proto/relative_dep/relative_dep.proto
new file mode 100644
index 0000000..600b6ca
--- /dev/null
+++ b/src/build/config/android/test/proto/relative_dep/relative_dep.proto
@@ -0,0 +1,10 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+message RelativeDep {}
diff --git a/src/build/config/android/test/proto/root/absolute_child.proto b/src/build/config/android/test/proto/root/absolute_child.proto
new file mode 100644
index 0000000..d6a6a13
--- /dev/null
+++ b/src/build/config/android/test/proto/root/absolute_child.proto
@@ -0,0 +1,10 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+message AbsoluteChild {}
diff --git a/src/build/config/android/test/proto/root/absolute_root.proto b/src/build/config/android/test/proto/root/absolute_root.proto
new file mode 100644
index 0000000..3e20097
--- /dev/null
+++ b/src/build/config/android/test/proto/root/absolute_root.proto
@@ -0,0 +1,18 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+import "build/config/android/test/proto/root/absolute_child.proto";
+import "build/config/android/test/proto/absolute_dep/absolute_dep.proto";
+import "relative_dep.proto";
+
+message AbsoluteRoot {
+  optional AbsoluteChild absolute_child = 1;
+  optional AbsoluteDep absolute_dep = 2;
+  optional RelativeDep relative_dep = 3;
+}
diff --git a/src/build/config/android/test/proto/root/relative_child.proto b/src/build/config/android/test/proto/root/relative_child.proto
new file mode 100644
index 0000000..10f7ed4
--- /dev/null
+++ b/src/build/config/android/test/proto/root/relative_child.proto
@@ -0,0 +1,10 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+message RelativeChild {}
diff --git a/src/build/config/android/test/proto/root/relative_root.proto b/src/build/config/android/test/proto/root/relative_root.proto
new file mode 100644
index 0000000..a37a268
--- /dev/null
+++ b/src/build/config/android/test/proto/root/relative_root.proto
@@ -0,0 +1,18 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto2";
+
+package build.config.android.test;
+option java_package = "build.config.android.test";
+
+import "relative_child.proto";
+import "build/config/android/test/proto/absolute_dep/absolute_dep.proto";
+import "relative_dep.proto";
+
+message RelativeRoot {
+  optional RelativeChild relative_child = 1;
+  optional AbsoluteDep absolute_dep = 2;
+  optional RelativeDep relative_dep = 3;
+}
diff --git a/src/build/config/android/test/resource_overlay/BUILD.gn b/src/build/config/android/test/resource_overlay/BUILD.gn
new file mode 100644
index 0000000..4a063d2
--- /dev/null
+++ b/src/build/config/android/test/resource_overlay/BUILD.gn
@@ -0,0 +1,60 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+# Tests for 'resource_overlay' parameter in android_resources() template.
+
+template("test_resources") {
+  jinja_template_resources(target_name) {
+    forward_variables_from(invoker, "*")
+    testonly = true
+    variables = [
+      "resource_name=$resource_name",
+      "resource_value=$resource_value",
+    ]
+    res_dir = "java/res_template"
+    resources = [ "java/res_template/values/values.xml" ]
+  }
+}
+
+test_resources("dependency_tagged_dependency_resources") {
+  resource_overlay = true
+  resource_name = "resource_overlay_dependency_tagged_secret"
+  resource_value = 41
+}
+
+test_resources("dependency_tagged_root_resources") {
+  resource_name = "resource_overlay_dependency_tagged_secret"
+  resource_value = 42
+  deps = [ ":dependency_tagged_dependency_resources" ]
+}
+
+test_resources("root_tagged_dependency_resources") {
+  resource_name = "resource_overlay_root_tagged_secret"
+  resource_value = 41
+}
+
+test_resources("root_tagged_root_resources") {
+  resource_overlay = true
+  resource_name = "resource_overlay_root_tagged_secret"
+  resource_value = 42
+  deps = [ ":root_tagged_dependency_resources" ]
+}
+
+android_library("javatests") {
+  testonly = true
+  sources = [
+    "java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java",
+  ]
+  resources_package = "org.chromium.build.resource_overlay"
+  deps = [
+    ":dependency_tagged_root_resources",
+    ":root_tagged_root_resources",
+    "//base:base_java_test_support",
+    "//third_party/android_support_test_runner:runner_java",
+    "//third_party/androidx:androidx_test_runner_java",
+    "//third_party/junit",
+  ]
+}
diff --git a/src/build/config/android/test/resource_overlay/java/res_template/values/values.xml b/src/build/config/android/test/resource_overlay/java/res_template/values/values.xml
new file mode 100644
index 0000000..973f855
--- /dev/null
+++ b/src/build/config/android/test/resource_overlay/java/res_template/values/values.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2020 The Chromium Authors. All rights reserved.
+
+     Use of this source code is governed by a BSD-style license that can be
+     found in the LICENSE file.
+-->
+
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+    <integer name="{{resource_name}}">{{resource_value}}</integer>
+</resources>
\ No newline at end of file
diff --git a/src/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java b/src/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java
new file mode 100644
index 0000000..794cafa
--- /dev/null
+++ b/src/build/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java
@@ -0,0 +1,49 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build.resource_overlay;
+
+import static org.junit.Assert.assertEquals;
+
+import android.content.res.Resources;
+import android.support.test.InstrumentationRegistry;
+
+import androidx.test.filters.SmallTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.chromium.base.test.BaseJUnit4ClassRunner;
+import org.chromium.base.test.util.Batch;
+
+/**
+ * Test for resource_overlay parameter in android_resources() build rule.
+ */
+@RunWith(BaseJUnit4ClassRunner.class)
+@Batch(Batch.UNIT_TESTS)
+public class ResourceOverlayTest {
+    /**
+     * Test that when an android_resources() target with resource_overlay=false has a resource with
+     * the same name but a different value as a dependency with resource_overlay=true that the value
+     * of the resource in the dependency is used.
+     */
+    @Test
+    @SmallTest
+    public void testDependencyTagged() {
+        Resources resources = InstrumentationRegistry.getTargetContext().getResources();
+        assertEquals(41, resources.getInteger(R.integer.resource_overlay_dependency_tagged_secret));
+    }
+
+    /**
+     * Test that when an android_resources() target with resource_overlay=true has a resource with
+     * the same name but different value as one of its dependencies that the value of resource in
+     * the target with resource_overlay=true is used.
+     */
+    @Test
+    @SmallTest
+    public void testRootTagged() {
+        Resources resources = InstrumentationRegistry.getTargetContext().getResources();
+        assertEquals(42, resources.getInteger(R.integer.resource_overlay_root_tagged_secret));
+    }
+}
diff --git a/src/build/config/apple/sdk_info.py b/src/build/config/apple/sdk_info.py
new file mode 100644
index 0000000..fea6801
--- /dev/null
+++ b/src/build/config/apple/sdk_info.py
@@ -0,0 +1,177 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import doctest
+import itertools
+import os
+import plistlib
+import re
+import subprocess
+import sys
+
+if sys.version_info.major < 3:
+  basestring_compat = basestring
+else:
+  basestring_compat = str
+
+# src directory
+ROOT_SRC_DIR = os.path.dirname(
+    os.path.dirname(os.path.dirname(os.path.dirname(
+        os.path.realpath(__file__)))))
+
+# This script prints information about the build system, the operating
+# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
+# "iphoneos" or "macosx" generally).
+
+
+def LoadPList(path):
+  """Loads Plist at |path| and returns it as a dictionary."""
+  # Cloned from //build/apple/plist_util.py.
+  if sys.version_info.major == 2:
+    return plistlib.readPlist(path)
+  with open(path, 'rb') as f:
+    return plistlib.load(f)
+
+
+def SplitVersion(version):
+  """Splits the Xcode version to 3 values.
+
+  >>> list(SplitVersion('8.2.1.1'))
+  ['8', '2', '1']
+  >>> list(SplitVersion('9.3'))
+  ['9', '3', '0']
+  >>> list(SplitVersion('10.0'))
+  ['10', '0', '0']
+  """
+  version = version.split('.')
+  return itertools.islice(itertools.chain(version, itertools.repeat('0')), 0, 3)
+
+
+def FormatVersion(version):
+  """Converts Xcode version to a format required for DTXcode in Info.plist
+
+  >>> FormatVersion('8.2.1')
+  '0821'
+  >>> FormatVersion('9.3')
+  '0930'
+  >>> FormatVersion('10.0')
+  '1000'
+  """
+  major, minor, patch = SplitVersion(version)
+  return ('%2s%s%s' % (major, minor, patch)).replace(' ', '0')
+
+
+def FillXcodeVersion(settings, developer_dir):
+  """Fills the Xcode version and build number into |settings|."""
+  if developer_dir:
+    xcode_version_plist_path = os.path.join(developer_dir,
+                                            'Contents/version.plist')
+    version_plist = LoadPList(xcode_version_plist_path)
+    settings['xcode_version'] = FormatVersion(
+        version_plist['CFBundleShortVersionString'])
+    settings['xcode_version_int'] = int(settings['xcode_version'], 10)
+    settings['xcode_build'] = version_plist['ProductBuildVersion']
+    return
+
+  lines = subprocess.check_output(['xcodebuild',
+                                   '-version']).decode('UTF-8').splitlines()
+  settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
+  settings['xcode_version_int'] = int(settings['xcode_version'], 10)
+  settings['xcode_build'] = lines[-1].split()[-1]
+
+
+def FillMachineOSBuild(settings):
+  """Fills OS build number into |settings|."""
+  machine_os_build = subprocess.check_output(['sw_vers', '-buildVersion'
+                                              ]).decode('UTF-8').strip()
+  settings['machine_os_build'] = machine_os_build
+
+
+def FillSDKPathAndVersion(settings, platform, xcode_version):
+  """Fills the SDK path and version for |platform| into |settings|."""
+  settings['sdk_path'] = subprocess.check_output(
+      ['xcrun', '-sdk', platform, '--show-sdk-path']).decode('UTF-8').strip()
+  settings['sdk_version'] = subprocess.check_output(
+      ['xcrun', '-sdk', platform,
+       '--show-sdk-version']).decode('UTF-8').strip()
+  settings['sdk_platform_path'] = subprocess.check_output(
+      ['xcrun', '-sdk', platform,
+       '--show-sdk-platform-path']).decode('UTF-8').strip()
+  settings['sdk_build'] = subprocess.check_output(
+      ['xcrun', '-sdk', platform,
+       '--show-sdk-build-version']).decode('UTF-8').strip()
+  settings['toolchains_path'] = os.path.join(
+      subprocess.check_output(['xcode-select',
+                               '-print-path']).decode('UTF-8').strip(),
+      'Toolchains/XcodeDefault.xctoolchain')
+
+
+def CreateXcodeSymlinkAt(src, dst):
+  """Create symlink to Xcode directory at target location."""
+
+  if not os.path.isdir(dst):
+    os.makedirs(dst)
+
+  dst = os.path.join(dst, os.path.basename(src))
+  updated_value = '//' + os.path.relpath(dst, ROOT_SRC_DIR)
+
+  # Update the symlink only if it is different from the current destination.
+  if os.path.islink(dst):
+    current_src = os.readlink(dst)
+    if current_src == src:
+      return updated_value
+    os.unlink(dst)
+    sys.stderr.write('existing symlink %s points %s; want %s. Removed.' %
+                     (dst, current_src, src))
+  os.symlink(src, dst)
+  return updated_value
+
+
+if __name__ == '__main__':
+  doctest.testmod()
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--developer_dir", dest="developer_dir", required=False)
+  parser.add_argument("--get_sdk_info",
+                      action="store_true",
+                      dest="get_sdk_info",
+                      default=False,
+                      help="Returns SDK info in addition to xcode info.")
+  parser.add_argument("--get_machine_info",
+                      action="store_true",
+                      dest="get_machine_info",
+                      default=False,
+                      help="Returns machine info in addition to xcode info.")
+  parser.add_argument("--create_symlink_at",
+                      action="store",
+                      dest="create_symlink_at",
+                      help="Create symlink of SDK at given location and "
+                      "returns the symlinked paths as SDK info instead "
+                      "of the original location.")
+  args, unknownargs = parser.parse_known_args()
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+  if len(unknownargs) != 1:
+    sys.stderr.write('usage: %s [iphoneos|iphonesimulator|macosx]\n' %
+                     os.path.basename(sys.argv[0]))
+    sys.exit(1)
+
+  settings = {}
+  if args.get_machine_info:
+    FillMachineOSBuild(settings)
+  FillXcodeVersion(settings, args.developer_dir)
+  if args.get_sdk_info:
+    FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
+
+  for key in sorted(settings):
+    value = settings[key]
+    if args.create_symlink_at and '_path' in key:
+      value = CreateXcodeSymlinkAt(value, args.create_symlink_at)
+    if isinstance(value, basestring_compat):
+      value = '"%s"' % value
+    print('%s=%s' % (key, value))
diff --git a/src/build/config/apple/symbols.gni b/src/build/config/apple/symbols.gni
new file mode 100644
index 0000000..dd1d796
--- /dev/null
+++ b/src/build/config/apple/symbols.gni
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+# This file declares arguments and configs that control whether dSYM debug
+# info is produced and whether build products are stripped.
+
+declare_args() {
+  # Produce dSYM files for targets that are configured to do so. dSYM
+  # generation is controlled globally as it is a linker output (produced via
+  # the //build/toolchain/apple/linker_driver.py. Enabling this will result in
+  # all shared library, loadable module, and executable targets having a dSYM
+  # generated.
+  enable_dsyms = is_official_build || using_sanitizer
+
+  # Strip symbols from linked targets by default. If this is enabled, the
+  # //build/config/mac:strip_all config will be applied to all linked targets.
+  # If custom stripping parameters are required, remove that config from a
+  # linked target and apply custom -Wcrl,strip flags. See
+  # //build/toolchain/apple/linker_driver.py for more information.
+  enable_stripping = is_official_build
+}
+
+# Save unstripped copies of targets with a ".unstripped" suffix. This is
+# useful to preserve the original output when enable_stripping=true but
+# we're not actually generating real dSYMs.
+save_unstripped_output = enable_stripping && !enable_dsyms
diff --git a/src/build/config/arm.gni b/src/build/config/arm.gni
new file mode 100644
index 0000000..ddd1c5d
--- /dev/null
+++ b/src/build/config/arm.gni
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "arm" contexts, where
+# ARM code is being compiled.  But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+if (current_cpu == "arm" || v8_current_cpu == "arm") {
+  declare_args() {
+    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+    # platforms.
+    arm_version = 7
+
+    # The ARM architecture. This will be a string like "armv6" or "armv7-a".
+    # An empty string means to use the default for the arm_version.
+    arm_arch = ""
+
+    # The ARM floating point hardware. This will be a string like "neon" or
+    # "vfpv3". An empty string means to use the default for the arm_version.
+    arm_fpu = ""
+
+    # The ARM variant-specific tuning mode. This will be a string like "armv6"
+    # or "cortex-a15". An empty string means to use the default for the
+    # arm_version.
+    arm_tune = ""
+
+    # Whether to use the neon FPU instruction set or not.
+    arm_use_neon = ""
+
+    # Whether to enable optional NEON code paths.
+    arm_optionally_use_neon = false
+
+    # Thumb is a reduced instruction set available on some ARM processors that
+    # has increased code density.
+    arm_use_thumb = true
+  }
+
+  if (current_os == "android" || target_os == "android") {
+    arm_float_abi = "softfp"
+  } else {
+    declare_args() {
+      # The ARM floating point mode. This is either the string "hard", "soft",
+      # or "softfp". An empty string means to use the default one for the
+      # arm_version.
+      arm_float_abi = ""
+    }
+  }
+  assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+         arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+  if (arm_use_neon == "") {
+    if (current_os == "linux" && target_cpu != v8_target_cpu) {
+      # Don't use neon on V8 simulator builds as a default.
+      arm_use_neon = false
+    } else {
+      arm_use_neon = true
+    }
+  }
+
+  if (arm_version == 6) {
+    if (arm_arch == "") {
+      arm_arch = "armv6"
+    }
+    if (arm_tune != "") {
+      arm_tune = ""
+    }
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+    if (arm_fpu == "") {
+      arm_fpu = "vfp"
+    }
+    arm_use_thumb = false
+    arm_use_neon = false
+  } else if (arm_version == 7) {
+    if (arm_arch == "") {
+      arm_arch = "armv7-a"
+    }
+    if (arm_tune == "") {
+      arm_tune = "generic-armv7-a"
+    }
+
+    if (arm_float_abi == "") {
+      if (current_os == "linux" && target_cpu != v8_target_cpu) {
+        # Default to the same as Android for V8 simulator builds.
+        arm_float_abi = "softfp"
+      } else {
+        arm_float_abi = "hard"
+      }
+    }
+
+    if (arm_fpu == "") {
+      if (arm_use_neon) {
+        arm_fpu = "neon"
+      } else {
+        arm_fpu = "vfpv3-d16"
+      }
+    }
+  } else if (arm_version == 8) {
+    if (arm_arch == "") {
+      arm_arch = "armv8-a"
+    }
+    if (arm_tune == "") {
+      arm_tune = "generic-armv8-a"
+    }
+
+    if (arm_float_abi == "") {
+      arm_float_abi = "hard"
+    }
+
+    if (arm_fpu == "") {
+      if (arm_use_neon) {
+        arm_fpu = "neon"
+      } else {
+        arm_fpu = "vfpv3-d16"
+      }
+    }
+  }
+} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
+  # arm64 supports only "hard".
+  arm_float_abi = "hard"
+  arm_use_neon = true
+}
diff --git a/src/build/config/buildflags_paint_preview.gni b/src/build/config/buildflags_paint_preview.gni
new file mode 100644
index 0000000..7129e76
--- /dev/null
+++ b/src/build/config/buildflags_paint_preview.gni
@@ -0,0 +1,16 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/features.gni")
+
+declare_args() {
+  # Enable basic paint preview support. Does not work on iOS or Fuchsia. Should
+  # not be included with Chromecast. Not ready for shipping builds yet so
+  # include in unofficial builds.
+  # Used by //components/paint_preview and //third_party/harfbuzz-ng.
+  # TODO(bug/webrtc:11223) Move back this file in //components/paint_preview/
+  #     once WebRTC doesn't roll harfbuzz-ng anymore, for consistency sake.
+  enable_paint_preview = !is_chromecast && !is_ios && !is_fuchsia
+}
diff --git a/src/build/config/c++/BUILD.gn b/src/build/config/c++/BUILD.gn
new file mode 100644
index 0000000..6494bb0
--- /dev/null
+++ b/src/build/config/c++/BUILD.gn
@@ -0,0 +1,154 @@
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//buildtools/deps_revisions.gni")
+
+assert(use_custom_libcxx, "should only be used if use_custom_libcxx is set")
+
+declare_args() {
+  # lldb pretty printing only works when libc++ is built in the __1 (or __ndk1)
+  # namespaces.  For pretty printing to work out-of-the-box on Mac (where lldb
+  # is primarily used), this flag is set to false to build with the __1
+  # namespace (to maintain ABI compatibility, this implies building without
+  # _LIBCPP_ABI_UNSTABLE).  This is not necessary on non-component builds
+  # because we leave the ABI version set to __1 in that case because libc++
+  # symbols are not exported.
+  # TODO(thomasanderson): Set this to true by default once rL352899 is available
+  # in MacOS's lldb.
+  libcxx_abi_unstable = !(is_apple && is_debug && is_component_build)
+}
+
+# TODO(xiaohuic): https://crbug/917533 Crashes on internal ChromeOS build.
+# Do unconditionally once the underlying problem is fixed.
+if (is_chromeos_ash && is_chrome_branded) {
+  libcxx_abi_unstable = false
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is specific to libc++. Please see that target for advice on what should
+# go in :runtime_library vs. :compiler.
+config("runtime_library") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+  ldflags = []
+  libs = []
+
+  if (libcxx_abi_unstable) {
+    defines += [ "_LIBCPP_ABI_UNSTABLE" ]
+  }
+
+  if (libcxx_is_shared) {
+    # When libcxx_is_shared is true, symbols from libc++.so are exported for
+    # all DSOs to use.  If the system libc++ gets loaded (indirectly through
+    # a system library), then it will conflict with our libc++.so.  Add a
+    # custom ABI version if we're building with _LIBCPP_ABI_UNSTABLE to avoid
+    # conflicts.
+    #
+    # Windows doesn't need to set _LIBCPP_ABI_VERSION since there's no system
+    # C++ library we could conflict with.
+    if (libcxx_abi_unstable && !is_win) {
+      defines += [ "_LIBCPP_ABI_VERSION=Cr" ]
+    }
+  } else {
+    # Don't leak any symbols on a static build.
+    defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ]
+    if (!export_libcxxabi_from_executables && !is_win) {
+      defines += [ "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" ]
+    }
+  }
+
+  defines += [
+    "_LIBCPP_ENABLE_NODISCARD",
+
+    # TODO(crbug.com/1166707): libc++ requires this macro.
+    "_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS",
+  ]
+
+  # Work around a symbol conflict between GRPC and the Fuchsia SDK.
+  # TODO(crbug.com/1166970): Remove this when resolved.
+  if (is_fuchsia) {
+    defines += [ "_LIBCPP_NO_NATIVE_SEMAPHORES" ]
+  }
+
+  # The Windows component build fails to link with libc++'s debug mode. See
+  # https://crbug.com/923166#c33, https://crbug.com/923166#c44, and
+  # https://llvm.org/PR41018.
+  if (!(is_win && is_component_build)) {
+    # libc++ has two levels of debug mode. Setting _LIBCPP_DEBUG to zero
+    # enables most assertions. Setting it to one additionally enables iterator
+    # debugging. See https://libcxx.llvm.org/docs/DesignDocs/DebugMode.html
+    if (enable_iterator_debugging) {
+      defines += [ "_LIBCPP_DEBUG=1" ]
+    } else if (is_debug || dcheck_always_on) {
+      defines += [ "_LIBCPP_DEBUG=0" ]
+    }
+  }
+
+  if (is_win) {
+    # Intentionally not using libc++abi on Windows because libc++abi only
+    # implements the Itanium C++ ABI, and not the Microsoft ABI which we use on
+    # Windows (and we need to use in order to interoperate correctly with COM
+    # among other things).
+    assert(!export_libcxxabi_from_executables,
+           "Don't use libcxxabi on Windows.")
+
+    cflags_cc +=
+        [ "-I" + rebase_path("$libcxx_prefix/include", root_build_dir) ]
+
+    # Prevent libc++ from embedding linker flags to try to automatically link
+    # against its runtime library. This is unnecessary with our build system,
+    # and can also result in build failures if libc++'s name for a library
+    # does not match ours.
+    defines += [ "_LIBCPP_NO_AUTO_LINK" ]
+
+    if (is_component_build) {
+      # TODO(crbug.com/1090975): Disable the exclude_from_explicit_instantiation
+      # to work around compiler bugs in the interaction between it and
+      # dllimport/dllexport.
+      defines += [ "_LIBCPP_HIDE_FROM_ABI=_LIBCPP_HIDDEN" ]
+    }
+
+    # Add a debug visualizer for Microsoft's debuggers so that they can display
+    # libc++ types well.
+    if (libcxx_natvis_include) {
+      # chrome.natvis listed as an input in //buildtools/third_party/libc++ to
+      # guarantee relinking on changes.
+      ldflags += [ "/NATVIS:" + rebase_path("libc++.natvis", root_build_dir) ]
+    }
+  } else {
+    cflags_cc += [
+      "-nostdinc++",
+      "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
+      "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
+    ]
+    cflags_objcc = cflags_cc
+
+    defines += [ "CR_LIBCXX_REVISION=$libcxx_revision" ]
+
+    # Make sure we don't link against the system libstdc++ or libc++.
+    if (is_clang) {
+      ldflags += [ "-nostdlib++" ]
+    } else {
+      # Gcc has a built-in abs() definition with default visibility.
+      # If it was not disabled, it would conflict with libc++'s abs()
+      # with hidden visibility.
+      cflags += [ "-fno-builtin-abs" ]
+
+      ldflags += [ "-nodefaultlibs" ]
+
+      # Unfortunately, there's no way to disable linking against just libc++
+      # (gcc doesn't have -notstdlib++:
+      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83931); -nodefaultlibs
+      # removes all of the default libraries, so add back the ones that we need.
+      libs += [
+        "c",
+        "gcc_s",
+        "m",
+        "rt",
+      ]
+    }
+  }
+}
diff --git a/src/build/config/c++/c++.gni b/src/build/config/c++/c++.gni
new file mode 100644
index 0000000..a7448f3
--- /dev/null
+++ b/src/build/config/c++/c++.gni
@@ -0,0 +1,82 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # Use in-tree libc++ (buildtools/third_party/libc++ and
+  # buildtools/third_party/libc++abi) instead of the system C++ library for C++
+  # standard library support.
+  # Don't check in changes that set this to false for more platforms; doing so
+  # is not supported.
+  use_custom_libcxx = is_fuchsia || is_android || is_mac ||
+                      (is_ios && !use_xcode_clang) || (is_win && is_clang) ||
+                      ((is_linux || is_chromeos) &&
+                       (!is_chromeos_ash ||
+                        default_toolchain != "//build/toolchain/cros:target"))
+
+  # Use libc++ instead of stdlibc++ when using the host_cpu toolchain, even if
+  # use_custom_libcxx is false. This is useful for cross-compiles where a custom
+  # toolchain for the target_cpu has been set as the default toolchain, but
+  # use_custom_libcxx should still be true when building for the host.  The
+  # expected usage is to set use_custom_libcxx=false and
+  # use_custom_libcxx_for_host=true in the passed in buildargs.
+  use_custom_libcxx_for_host = false
+
+  # Builds libcxx Natvis into the symbols for type visualization.
+  # Set to false to workaround http://crbug.com/966676 and
+  # http://crbug.com/966687.
+  libcxx_natvis_include = true
+
+  # When set, enables libc++ debug mode with iterator debugging.
+  #
+  # Iterator debugging is generally useful for catching bugs. But it can
+  # introduce extra locking to check the state of an iterator against the state
+  # of the current object. For iterator- and thread-heavy code, this can
+  # significantly slow execution - two orders of magnitude slowdown has been
+  # seen (crbug.com/903553) and iterator debugging also slows builds by making
+  # generation of snapshot_blob.bin take ~40-60 s longer. Therefore this
+  # defaults to off.
+  enable_iterator_debugging = false
+}
+
+use_custom_libcxx =
+    use_custom_libcxx || (use_custom_libcxx_for_host && !is_a_target_toolchain)
+use_custom_libcxx = use_custom_libcxx && !is_nacl
+
+declare_args() {
+  # WARNING: Setting this to a non-default value is highly discouraged.
+  # If true, libc++ will be built as a shared library; otherwise libc++ will be
+  # linked statically. Setting this to something other than the default is
+  # unsupported and can be broken by libc++ rolls. Note that if this is set to
+  # true, you must also set libcxx_abi_unstable=false, which is bad for
+  # performance and memory use.
+  libcxx_is_shared = use_custom_libcxx && is_component_build
+}
+
+# libc++abi needs to be exported from executables to be picked up by shared
+# libraries on certain instrumented builds.
+export_libcxxabi_from_executables =
+    use_custom_libcxx && !is_ios && !is_win && !is_component_build &&
+    (is_asan || is_ubsan_vptr)
+
+# On Android, many shared libraries get loaded from the context of a JRE.  In
+# this case, there's no "main executable" to export libc++abi from.  We could
+# export libc++abi from each "toplevel" shared library instead, but that would
+# require adding an explicit dependency for each one, and might introduce
+# subtle, hard-to-fix problems down the line if the dependency is missing.
+#
+# export_libcxxabi_from_executables was added to avoid having an RPATH set in
+# static sanitizer builds just for executables to find libc++.  But on Android,
+# the Bionic dynamic loader doesn't even look at RPATH; instead, LD_LIBRARY_PATH
+# is set for tests. Because of this, we make libc++ a shared library on android
+# since it should get loaded properly.
+if (is_android && export_libcxxabi_from_executables) {
+  export_libcxxabi_from_executables = false
+  libcxx_is_shared = true
+}
+
+libcxx_prefix = "//buildtools/third_party/libc++/trunk"
+libcxxabi_prefix = "//buildtools/third_party/libc++abi/trunk"
diff --git a/src/build/config/c++/libc++.natvis b/src/build/config/c++/libc++.natvis
new file mode 100644
index 0000000..9a49a29
--- /dev/null
+++ b/src/build/config/c++/libc++.natvis
@@ -0,0 +1,435 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<AutoVisualizer
+  xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
+
+  <!-- libc++'s __compressed_pair is an internal type used pervasively for
+       doing the empty base class optimization.
+
+       __compressed_pair<U,V> derives from __compressed_pair_elem<U,0> and
+       __compressed_pair_elem<V,1>. __compressed_pair_elem<T> is specialized on
+       a 3rd template parameter:
+       * if T is empty and non-final the 3rd param is 1 and it derives from T
+       * else it has a member variable __value_ of type T
+  -->
+  <Type Name="std::__1::__compressed_pair_elem&lt;*,*,0&gt;">
+    <DisplayString>{__value_}</DisplayString>
+    <Expand>
+      <ExpandedItem>__value_</ExpandedItem>
+    </Expand>
+  </Type>
+  <Type Name="std::__1::__compressed_pair_elem&lt;*,*,1&gt;">
+    <DisplayString>{*($T1*)this}</DisplayString>
+    <Expand>
+      <ExpandedItem>*($T1*)this</ExpandedItem>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::array&lt;*,*&gt;">
+    <DisplayString>{{ size={$T2} }}</DisplayString>
+    <Expand>
+      <ArrayItems>
+        <Size>$T2</Size>
+        <ValuePointer>__elems_</ValuePointer>
+      </ArrayItems>
+    </Expand>
+  </Type>
+
+  <!--libc++'s short string optimization:
+      A basic_string is 3 size_t words long. In the "alternate string layout"
+      that we use, they are: pointer to data, size, capacity.
+      (In the normal layout, it's capacity, size, data instead.)
+      If a string is short enough that it fits in these three size_ts instead,
+      the string data is stored inline in these 3 words, with the last byte of
+      the storage storing the length of the string.
+      The highest bit of the "capacity" word is set for normal, "long" strings,
+      and that bit needs to be masked out to know the real capacity.
+      If this bit is not set, the string data is stored inline.
+      (In the normal layout, if the lowest bit in the first byte is set,
+      it's a "long" string, requiring a long string to always have even
+      capacity. A short string here stores its length in the first byte
+      and the inline data in the remaining storage.)
+  -->
+
+  <Type Name="std::__1::basic_string&lt;char,*&gt;">
+    <!--<Intrinsic Name="is_long"
+            Expression="((__rep*)&amp;__r_)-&gt;__s.__size_ &amp; 0x80" />-->
+    <!-- The above doesn't work because of https://llvm.org/PR41615
+         TODO(thakis): Now that we have clang r362038, try the above approach
+                       again.
+         The below assumes the alternate string layout and little endianness :/
+    -->
+    <Intrinsic Name="is_long"
+        Expression="*(((char*)this) + 3*sizeof(size_t) - 1) &amp; 0x80" />
+    <DisplayString Condition="is_long()">{*(char**)this}</DisplayString>
+    <DisplayString Condition="!is_long()">{(char*)this}</DisplayString>
+    <StringView Condition="is_long()">*(char**)this</StringView>
+    <StringView Condition="!is_long()">(char*)this</StringView>
+    <Expand>
+      <Item Name="[size]" Condition="is_long()"
+          ExcludeView="simple">((size_t*)this)[1]</Item>
+      <Item Name="[size]" Condition="!is_long()"
+          ExcludeView="simple">*(((char*)this) + 3*sizeof(size_t) - 1)</Item>
+      <Item Name="[capacity]" Condition="is_long()" ExcludeView="simple">
+        ((size_t*)this)[2] &amp; (~((size_t)0) &gt;&gt; 1)
+      </Item>
+      <Item Name="[capacity]" Condition="!is_long()"
+          ExcludeView="simple">22</Item>
+      <ArrayItems>
+        <Size Condition="is_long()">((size_t*)this)[1]</Size>
+        <Size Condition="!is_long()">
+          *(((char*)this) + 3*sizeof(size_t) - 1)
+        </Size>
+        <ValuePointer Condition="is_long()">*(char**)this</ValuePointer>
+        <ValuePointer Condition="!is_long()">(char*)this</ValuePointer>
+      </ArrayItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::basic_string&lt;wchar_t,*&gt;">
+    <Intrinsic Name="is_long"
+        Expression="*(((char*)this) + 3*sizeof(size_t) - 1) &amp; 0x80" />
+    <DisplayString Condition="is_long()">{*(wchar_t**)this}</DisplayString>
+    <DisplayString Condition="!is_long()">{(wchar_t*)this}</DisplayString>
+    <StringView Condition="is_long()">*(wchar_t**)this</StringView>
+    <StringView Condition="!is_long()">(wchar_t*)this</StringView>
+    <Expand>
+      <Item Name="[size]" Condition="is_long()"
+          ExcludeView="simple">((size_t*)this)[1]</Item>
+      <Item Name="[size]" Condition="!is_long()"
+          ExcludeView="simple">*(((char*)this) + 3*sizeof(size_t) - 1)</Item>
+      <Item Name="[capacity]" Condition="is_long()" ExcludeView="simple">
+        ((size_t*)this)[2] &amp; (~((size_t)0) &gt;&gt; 1)
+      </Item>
+      <Item Name="[capacity]" Condition="!is_long()"
+          ExcludeView="simple">10</Item>
+      <ArrayItems>
+        <Size Condition="is_long()">((size_t*)this)[1]</Size>
+        <Size Condition="!is_long()">
+          *(((char*)this) + 3*sizeof(size_t) - 1)
+        </Size>
+        <ValuePointer Condition="is_long()">*(wchar_t**)this</ValuePointer>
+        <ValuePointer Condition="!is_long()">(wchar_t*)this</ValuePointer>
+      </ArrayItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::deque&lt;*,*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__size_" />
+    <Intrinsic Name="block_size"
+        Expression="sizeof($T1) &lt; 256 ? 4096 / sizeof($T1) : 16" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <IndexListItems>
+        <Size>size()</Size>
+        <ValueNode>
+          *(*(__map_.__begin_ + ($i + __start_) / block_size()) +
+                                ($i + __start_) % block_size())
+        </ValueNode>
+      </IndexListItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::forward_list&lt;*&gt;">
+    <Intrinsic Name="head"
+        Expression="((__node_pointer)&amp;__before_begin_)-&gt;__next_" />
+    <DisplayString Condition="head() == 0">empty</DisplayString>
+    <DisplayString Condition="head() != 0">non-empty</DisplayString>
+    <Expand>
+      <LinkedListItems>
+        <HeadPointer>head()</HeadPointer>
+        <NextPointer>__next_</NextPointer>
+        <ValueNode>__value_</ValueNode>
+      </LinkedListItems>
+    </Expand>
+  </Type>
+
+  <!-- Note: Not in __1! But will win over the one in stl.natvis -->
+  <Type Name="std::initializer_list&lt;*&gt;">
+    <DisplayString>{{ size={__size_} }}</DisplayString>
+    <Expand>
+      <ArrayItems>
+        <Size>__size_</Size>
+        <ValuePointer>__begin_</ValuePointer>
+      </ArrayItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::list&lt;*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__size_alloc_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <LinkedListItems>
+        <Size>size()</Size>
+        <HeadPointer>__end_.__next_</HeadPointer>
+        <NextPointer>__next_</NextPointer>
+        <ValueNode>
+          ((std::__1::list&lt;$T1,$T2&gt;::__node_pointer)this)
+              -&gt;__value_
+        </ValueNode>
+      </LinkedListItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::map&lt;*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__tree_.__pair3_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <Item Name="[size]">size()</Item>
+      <TreeItems>
+        <Size>size()</Size>
+        <HeadPointer>
+          ((__node_pointer)&amp;__tree_.__pair1_)-&gt;__left_
+        </HeadPointer>
+        <LeftPointer>
+          ((std::__1::map&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__left_
+        </LeftPointer>
+        <RightPointer>
+          ((std::__1::map&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__right_
+        </RightPointer>
+        <ValueNode>
+          ((std::__1::map&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__value_.__cc
+        </ValueNode>
+      </TreeItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::multimap&lt;*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__tree_.__pair3_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <Item Name="[size]">size()</Item>
+      <TreeItems>
+        <Size>size()</Size>
+        <HeadPointer>
+          ((__node_pointer)&amp;__tree_.__pair1_)-&gt;__left_
+        </HeadPointer>
+        <LeftPointer>
+          ((std::__1::multimap&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__left_
+        </LeftPointer>
+        <RightPointer>
+          ((std::__1::multimap&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__right_
+        </RightPointer>
+        <ValueNode>
+          ((std::__1::multimap&lt;$T1,$T2,$T3,$T4&gt;::__node_pointer)this)
+              -&gt;__value_.__cc
+        </ValueNode>
+      </TreeItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::multiset&lt;*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__tree_.__pair3_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <Item Name="[size]">size()</Item>
+      <TreeItems>
+        <Size>size()</Size>
+        <HeadPointer>
+          ((__base::__node_pointer)&amp;__tree_.__pair1_)-&gt;__left_
+        </HeadPointer>
+        <LeftPointer>
+          ((std::__1::multiset&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__left_
+        </LeftPointer>
+        <RightPointer>
+          ((std::__1::multiset&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__right_
+        </RightPointer>
+        <ValueNode>
+          ((std::__1::multiset&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__value_
+        </ValueNode>
+      </TreeItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::priority_queue&lt;*&gt;">
+    <DisplayString>{c}</DisplayString>
+    <Expand>
+      <ExpandedItem>c</ExpandedItem>
+      <Item Name="[comp]">comp</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::set&lt;*&gt;">
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__tree_.__pair3_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <Item Name="[size]">size()</Item>
+      <TreeItems>
+        <Size>size()</Size>
+        <HeadPointer>
+          ((__base::__node_pointer)&amp;__tree_.__pair1_)-&gt;__left_
+        </HeadPointer>
+        <LeftPointer>
+          ((std::__1::set&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__left_
+        </LeftPointer>
+        <RightPointer>
+          ((std::__1::set&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__right_
+        </RightPointer>
+        <ValueNode>
+          ((std::__1::set&lt;$T1,$T2,$T3&gt;::__base::__node_pointer)this)
+              -&gt;__value_
+        </ValueNode>
+      </TreeItems>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::stack&lt;*&gt;">
+    <AlternativeType Name="std::__1::queue&lt;*&gt;" />
+    <DisplayString>{c}</DisplayString>
+    <Expand>
+      <ExpandedItem>c</ExpandedItem>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::__tuple_leaf&lt;*,*,0&gt;">
+    <DisplayString>{__value_}</DisplayString>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;&gt;">
+    <DisplayString>()</DisplayString>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_})</DisplayString>
+      <Expand>
+          <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+      <Item Name="[2]">(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*,*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+      <Item Name="[2]">(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_</Item>
+      <Item Name="[3]">(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*,*,*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+      <Item Name="[2]">(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_</Item>
+      <Item Name="[3]">(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_</Item>
+      <Item Name="[4]">(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*,*,*,*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;5,$T6,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+      <Item Name="[2]">(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_</Item>
+      <Item Name="[3]">(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_</Item>
+      <Item Name="[4]">(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_</Item>
+      <Item Name="[5]">(std::__1::__tuple_leaf&lt;5,$T6,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::tuple&lt;*,*,*,*,*,*,*&gt;">
+    <DisplayString>({(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;5,$T6,0&gt;)__base_}, {(std::__1::__tuple_leaf&lt;6,$T7,0&gt;)__base_})</DisplayString>
+    <Expand>
+      <Item Name="[0]">(std::__1::__tuple_leaf&lt;0,$T1,0&gt;)__base_</Item>
+      <Item Name="[1]">(std::__1::__tuple_leaf&lt;1,$T2,0&gt;)__base_</Item>
+      <Item Name="[2]">(std::__1::__tuple_leaf&lt;2,$T3,0&gt;)__base_</Item>
+      <Item Name="[3]">(std::__1::__tuple_leaf&lt;3,$T4,0&gt;)__base_</Item>
+      <Item Name="[4]">(std::__1::__tuple_leaf&lt;4,$T5,0&gt;)__base_</Item>
+      <Item Name="[5]">(std::__1::__tuple_leaf&lt;5,$T6,0&gt;)__base_</Item>
+      <Item Name="[6]">(std::__1::__tuple_leaf&lt;6,$T7,0&gt;)__base_</Item>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::unique_ptr&lt;*&gt;">
+    <Intrinsic Name="value" Expression="*($T1**)&amp;__ptr_" />
+    <SmartPointer Usage="Minimal">value()</SmartPointer>
+      <DisplayString Condition="value() == 0">empty</DisplayString>
+      <DisplayString Condition="value() != 0">
+        unique_ptr {value()}</DisplayString>
+      <Expand>
+        <Item Condition="value() != 0" Name="[ptr]">value()</Item>
+      </Expand>
+  </Type>
+
+<Type Name="std::__1::unordered_map&lt;*&gt;">
+    <AlternativeType Name="std::__1::unordered_multimap&lt;*&gt;" />
+    <AlternativeType Name="std::__1::unordered_multiset&lt;*&gt;" />
+    <AlternativeType Name="std::__1::unordered_set&lt;*&gt;" />
+    <Intrinsic Name="size" Expression="*(size_type*)&amp;__table_.__p2_" />
+    <Intrinsic Name="bucket_count"
+        Expression="*(size_type*)&amp;
+                    ((__table::__bucket_list_deleter*)
+                        ((void**)&amp;__table_.__bucket_list_ + 1))
+                        -&gt;__data_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <Item Name="[bucket_count]">bucket_count()</Item>
+      <Item Name="[load_factor]">
+        bucket_count() != 0 ? (float)size() / bucket_count() : 0.f</Item>
+      <Item Name="[max_load_factor]">*(float*)&amp;__table_.__p3_</Item>
+      <!-- Use CustomListItems instead of LinkedListItems because we
+        need to cast to __table::__node_pointer and LinkedListItems
+        evaluates <Value> in the context of the node, not of the container,
+        so we'd have to say std::unordered_map<$T1,...>::__table::__node_pointer
+        and then we couldn't share this <Type> between unordered_(multi)map
+        and unordered_(multi)set. -->
+      <CustomListItems>
+        <Variable Name="node"
+            InitialValue="*(__table::__next_pointer*)&amp;__table_.__p1_" />
+        <Size>size()</Size>
+        <Loop>
+          <Item>(*(__table::__node_pointer*)&amp;node)-&gt;__value_</Item>
+          <Exec>node = node-&gt;__next_</Exec>
+        </Loop>
+      </CustomListItems>
+    </Expand>
+  </Type>
+  <!-- This is the node __value_ of an unordered_(multi)map. Expand it through
+    a separate formatter instead of in the <Item> expression above so that the
+    same <Type> works for unordered_(multi)set and unordered_(multi)map. -->
+  <Type Name="std::__1::__hash_value_type&lt;*&gt;">
+    <DisplayString>{__cc}</DisplayString>
+    <Expand>
+      <ExpandedItem>__cc</ExpandedItem>
+    </Expand>
+  </Type>
+
+  <Type Name="std::__1::vector&lt;*&gt;">
+    <Intrinsic Name="size" Expression="__end_ - __begin_" />
+    <DisplayString>{{ size={size()} }}</DisplayString>
+    <Expand>
+      <ArrayItems>
+        <Size>size()</Size>
+        <ValuePointer>__begin_</ValuePointer>
+      </ArrayItems>
+    </Expand>
+  </Type>
+</AutoVisualizer>
diff --git a/src/build/config/chrome_build.gni b/src/build/config/chrome_build.gni
new file mode 100644
index 0000000..5c51d7f
--- /dev/null
+++ b/src/build/config/chrome_build.gni
@@ -0,0 +1,25 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Select the desired branding flavor. False means normal Chromium branding,
+  # true means official Google Chrome branding (requires extra Google-internal
+  # resources).
+  is_chrome_branded = false
+}
+
+declare_args() {
+  # Refers to the subdirectory for branding in various places including
+  # chrome/app/theme.
+  if (is_chrome_branded) {
+    branding_path_component = "google_chrome"
+  } else {
+    branding_path_component = "chromium"
+  }
+}
+
+declare_args() {
+  # The path to the BRANDING file in chrome/app/theme.
+  branding_file_path = "//chrome/app/theme/$branding_path_component/BRANDING"
+}
diff --git a/src/build/config/chromecast/BUILD.gn b/src/build/config/chromecast/BUILD.gn
new file mode 100644
index 0000000..0c3b2cb
--- /dev/null
+++ b/src/build/config/chromecast/BUILD.gn
@@ -0,0 +1,89 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+
+assert(is_chromecast)
+
+config("static_config") {
+  if (!is_clang) {
+    ldflags = [
+      # Don't allow visible symbols from libraries that contain
+      # assembly code with symbols that aren't hidden properly.
+      # http://b/26390825
+      "-Wl,--exclude-libs=libffmpeg.a",
+    ]
+
+    if (!is_android) {
+      ldflags += [
+        # We want to statically link libstdc++/libgcc on Linux.
+        # (On Android, libstdc++ and libgcc aren't used.)
+        "-static-libstdc++",
+        "-static-libgcc",
+      ]
+    }
+  }
+}
+
+config("ldconfig") {
+  visibility = [ ":*" ]
+
+  # Chromecast executables depend on several shared libraries in
+  # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
+  # This is explicitly disabled in Chrome for security reasons (see comments in
+  # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may
+  # override the default libraries shipped in the Cast receiver package.
+  if (target_rpath == "") {
+    ldflags = [
+      "-Wl,-rpath=/oem_cast_shlib",
+      "-Wl,-rpath=\$ORIGIN/lib",
+      "-Wl,-rpath=\$ORIGIN",
+    ]
+  } else {
+    ldflags = [ "-Wl,-rpath=${target_rpath}" ]
+  }
+
+  # Binaries which don't live in the same directory as Chrome component
+  # libraries may still depend on them. Explicitly add the component library
+  # directory to the rpath for the component build.
+  if (is_component_build) {
+    ldflags += [ "-Wl,-rpath=/system/chrome" ]
+  }
+}
+
+config("executable_config") {
+  configs = [ ":ldconfig" ]
+
+  if (!is_clang && current_cpu == "arm") {
+    ldflags = [
+      # Export stdlibc++ and libgcc symbols to force shlibs to refer to these
+      # symbols from the executable.
+      "-Wl,--export-dynamic",
+
+      "-lm",  # stdlibc++ requires math.h
+
+      # In case we redefined stdlibc++ symbols (e.g. tc_malloc)
+      "-Wl,--allow-multiple-definition",
+
+      "-Wl,--whole-archive",
+      "-l:libstdc++.a",
+      "-l:libgcc.a",
+      "-Wl,--no-whole-archive",
+    ]
+
+    # Despite including libstdc++/libgcc archives, we still need to specify
+    # static linking for them in order to prevent the executable from having a
+    # dynamic dependency on them.
+    configs += [ ":static_config" ]
+  }
+}
+
+# Shared libaries should not have RPATH or RUNPATH set. This allows the
+# shared libs to inherit RPATH from the parent executable that is loading
+# the shared library. (See internal b/37514052 for more details.)
+config("shared_library_config") {
+  if (current_cpu == "arm") {
+    configs = [ ":static_config" ]
+  }
+}
diff --git a/src/build/config/chromecast_build.gni b/src/build/config/chromecast_build.gni
new file mode 100644
index 0000000..deecdb5
--- /dev/null
+++ b/src/build/config/chromecast_build.gni
@@ -0,0 +1,95 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The args declared in this file should be referenced by components outside of
+# //chromecast. Args needed only in //chromecast should be declared in
+# //chromecast/chromecast.gni.
+declare_args() {
+  # Set this true for a Chromecast build. Chromecast builds are supported on
+  # Linux and Android.
+  is_chromecast = false
+
+  # If true, IS_CAST_DEBUG_BUILD() will evaluate to 1 in version.h. Otherwise,
+  # it will evaluate to 0. Overriding this when is_debug=false is useful for
+  # doing engineering builds.
+  cast_is_debug = is_debug
+
+  # chromecast_branding is used to include or exclude Google-branded components.
+  # Set it to "public" for a Chromium build.
+  chromecast_branding = "public"
+
+  # Set this true for an audio-only Chromecast build.
+  is_cast_audio_only = false
+
+  # If non empty, rpath of executables is set to this.
+  # If empty, default value is used.
+  target_rpath = ""
+
+  # Set true to enable modular_updater.
+  enable_modular_updater = false
+}
+
+# Note(slan): This arg depends on the value of is_chromecast, and thus must be
+# declared in a separate block. These blocks can be combined when/if
+# crbug.com/542846 is resolved.
+declare_args() {
+  # True if Chromecast build is targeted for linux desktop. This type of build
+  # is useful for testing and development, but currently supports only a subset
+  # of Cast functionality. Though this defaults to true for x86 Linux devices,
+  # this should be overriden manually for an embedded x86 build.
+  # TODO(slan): Remove instances of this when x86 is a fully supported platform.
+  is_cast_desktop_build = is_chromecast && target_os == "linux" &&
+                          (target_cpu == "x86" || target_cpu == "x64")
+}
+
+declare_args() {
+  # True to enable the cast renderer.  It is enabled by default for linux and
+  # android audio only builds.
+  enable_cast_renderer = is_chromecast && (is_linux || is_chromeos ||
+                                           (is_cast_audio_only && is_android))
+}
+
+# Configures media options for cast.  See media/media_options.gni
+cast_mojo_media_services = []
+cast_mojo_media_host = ""
+
+if (enable_cast_renderer) {
+  # In this path, mojo media services are hosted in two processes:
+  # 1. "renderer" and "cdm" run in browser process. This is hard coded in the
+  # code.
+  # 2. "video_decoder" runs in the process specified by "cast_mojo_media_host".
+  cast_mojo_media_services = [
+    "cdm",
+    "renderer",
+  ]
+
+  if (!is_cast_audio_only) {
+    cast_mojo_media_services += [ "video_decoder" ]
+  }
+
+  cast_mojo_media_host = "gpu"
+} else if (is_android) {
+  # On Android, all the enabled mojo media services run in the process specified
+  # by "cast_mojo_media_host".
+  cast_mojo_media_services = [
+    "cdm",
+    "audio_decoder",
+  ]
+  if (!is_cast_audio_only) {
+    # These are Cast/Android devices with Video capabilities (and GPU)
+    cast_mojo_media_services += [ "video_decoder" ]
+    cast_mojo_media_host = "gpu"
+  } else {
+    # These are Cast/Android devices with only Audio capabilities (no GPU)
+    cast_mojo_media_host = "browser"
+  }
+}
+
+# Assert that Chromecast is being built for a supported platform.
+assert(is_linux || is_chromeos || is_android || is_fuchsia || !is_chromecast,
+       "Chromecast builds are not supported on $target_os")
+
+# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a
+# non-Chromecast build.
+assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build))
diff --git a/src/build/config/chromeos/BUILD.gn b/src/build/config/chromeos/BUILD.gn
new file mode 100644
index 0000000..f3dfe70
--- /dev/null
+++ b/src/build/config/chromeos/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+
+assert(is_chromeos_ash)
+
+declare_args() {
+  # The location to a file used to dump symbols ordered by Call-Chain Clustering (C3)
+  # https://research.fb.com/wp-content/uploads/2017/01/cgo2017-hfsort-final1.pdf?
+  # to a file, used for generating orderfiles in Chrome OS
+  dump_call_chain_clustering_order = ""
+}
+
+declare_args() {
+  # Whether or not we're using new pass manager to build and link Chrome
+  use_new_pass_manager = dump_call_chain_clustering_order != ""
+}
+
+config("print_orderfile") {
+  if (dump_call_chain_clustering_order != "") {
+    _output_orderfile =
+        rebase_path(dump_call_chain_clustering_order, root_build_dir)
+    ldflags = [ "-Wl,--print-symbol-order=$_output_orderfile" ]
+  }
+}
+
+config("compiler") {
+  if (use_new_pass_manager) {
+    cflags = [ "-fexperimental-new-pass-manager" ]
+    ldflags = [ "-fexperimental-new-pass-manager" ]
+  }
+}
diff --git a/src/build/config/chromeos/args.gni b/src/build/config/chromeos/args.gni
new file mode 100644
index 0000000..3be4f27
--- /dev/null
+++ b/src/build/config/chromeos/args.gni
@@ -0,0 +1,27 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # This is used only by Simple Chrome to bind its value to test-runner scripts
+  # generated at build-time.
+  cros_board = ""
+
+  # Similar to cros_board above, this used only by test-runner scripts in
+  # Simple Chrome.
+  cros_sdk_version = ""
+}
+
+# Ensure that if one is set, the other is as well.
+assert(cros_board == "" == (cros_sdk_version == ""))
+
+declare_args() {
+  # Determines if we're building for a Chrome OS device (or VM) and not just
+  # linux-chromeos. NOTE: Most test targets in Chrome expect to run under
+  # linux-chromeos, so some have compile-time asserts that intentionally fail
+  # when this build flag is set. Build and run the tests for linux-chromeos
+  # instead.
+  # https://chromium.googlesource.com/chromium/src/+/master/docs/chromeos_build_instructions.md
+  # https://chromium.googlesource.com/chromiumos/docs/+/master/simple_chrome_workflow.md
+  is_chromeos_device = false
+}
diff --git a/src/build/config/chromeos/rules.gni b/src/build/config/chromeos/rules.gni
new file mode 100644
index 0000000..c8693ba
--- /dev/null
+++ b/src/build/config/chromeos/rules.gni
@@ -0,0 +1,479 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/gclient_args.gni")
+import("//build/config/python.gni")
+import("//build/util/generate_wrapper.gni")
+
+assert((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device)
+
+# Determine the real paths for various items in the SDK, which may be used
+# in the 'generate_runner_script' template below. We do so outside the template
+# to confine exec_script to a single invocation.
+if (cros_sdk_version != "") {
+  # Ideally these should be maps, however, gn doesn't support map, so using a
+  # list of list to simulate a map:
+  # [key1, [value1, value2, ...]], [key2, [value1, value2, ...]], where
+  # the keys are boards and values are symlinks or symlink targets, and the
+  # mapping shouldn't be used for anything else.
+  #
+  # A sample usage is:
+  # foreach(m, _symlink_targets_map) {
+  #   if(m[0] == target_key) {
+  #     target_value = m[1]
+  #   }
+  # }
+  #
+  _symlink_map = []
+  _symlink_targets_map = []
+
+  if (is_chromeos_ash) {
+    _potential_test_boards = [ cros_board ]
+  } else {
+    _potential_test_boards = []
+    if (cros_boards != "") {
+      _potential_test_boards += string_split(cros_boards, ":")
+    }
+    if (cros_boards_with_qemu_images != "") {
+      _potential_test_boards += string_split(cros_boards_with_qemu_images, ":")
+    }
+  }
+
+  foreach(b, _potential_test_boards) {
+    _cache_path_prefix =
+        "//build/cros_cache/chrome-sdk/symlinks/${b}+${cros_sdk_version}"
+
+    _cros_is_vm = false
+    foreach(b1, string_split(cros_boards_with_qemu_images, ":")) {
+      if (b == b1) {
+        _cros_is_vm = true
+      }
+    }
+
+    _symlinks = []
+    _symlinks = [
+      # Tast harness & test data.
+      rebase_path("${_cache_path_prefix}+chromeos-base/tast-cmd"),
+      rebase_path("${_cache_path_prefix}+chromeos-base/tast-remote-tests-cros"),
+
+      # Binutils (and other toolchain tools) used to deploy Chrome to the device.
+      rebase_path(
+          "${_cache_path_prefix}+environment_chromeos-base_chromeos-chrome.tar.xz"),
+      rebase_path("${_cache_path_prefix}+target_toolchain"),
+    ]
+    if (_cros_is_vm) {
+      # VM-related tools.
+      _symlinks += [
+        rebase_path("${_cache_path_prefix}+sys-firmware/seabios"),
+        rebase_path("${_cache_path_prefix}+chromiumos_qemu_image.tar.xz"),
+        rebase_path("${_cache_path_prefix}+app-emulation/qemu"),
+      ]
+    }
+    _symlink_map += [ [
+          b,
+          _symlinks,
+        ] ]
+  }
+
+  _all_symlinks = []
+  foreach(m, _symlink_map) {
+    _all_symlinks += m[1]
+  }
+  _all_symlink_targets =
+      exec_script("//build/get_symlink_targets.py", _all_symlinks, "list lines")
+  _index = 0
+  foreach(m, _symlink_map) {
+    _symlink_targets = []
+    foreach(_, m[1]) {
+      _symlink_targets += [ _all_symlink_targets[_index] ]
+      _index += 1
+    }
+
+    _symlink_targets_map += [ [
+          m[0],
+          _symlink_targets,
+        ] ]
+  }
+}
+
+# Creates a script at $generated_script that can be used to launch a cros VM
+# and optionally run a test within it.
+# Args:
+#   test_exe: Name of test binary located in the out dir. This will get copied
+#       to the VM and executed there.
+#   tast_attr_expr: Tast expression to pass to local_test_runner on the VM.
+#   tast_tests: List of Tast tests to run on the VM. Note that when this is
+#       specified, the target name used to invoke this template will be
+#       designated as the "name" of this test and will primarly used for test
+#       results tracking and displaying (eg: flakiness dashboard).
+#   generated_script: Path to place the generated script.
+#   deploy_chrome: If true, deploys a locally built chrome located in the root
+#       build dir to the VM or DUT after launching it.
+#   deploy_lacros: If true, deploys a locally built Lacros located in the root
+#       build dir to the VM or DUT after launching it.
+#   runtime_deps_file: Path to file listing runtime deps for the test. If set,
+#       all files listed will be copied to the VM before testing.
+#   skip_generating_board_args: By default, this template generates an '--board'
+#       arg with corresponding '--flash' or '--use-vm' args for device and vm
+#       respectively. This argument instructs the template to skip generating
+#       them, and it's designed for use cases where one builds for one board
+#       (e.g. amd64-generic), but tests on a different board (e.g. eve).
+#   tast_vars: A list of "key=value" runtime variable pairs to pass to invoke
+#       the Tast tests. For more details, please see:
+#       https://chromium.googlesource.com/chromiumos/platform/tast/+/HEAD/docs/writing_tests.md#Runtime-variables
+template("generate_runner_script") {
+  forward_variables_from(invoker,
+                         [
+                           "deploy_chrome",
+                           "deploy_lacros",
+                           "generated_script",
+                           "runtime_deps_file",
+                           "skip_generating_board_args",
+                           "tast_attr_expr",
+                           "tast_tests",
+                           "tast_vars",
+                           "testonly",
+                           "test_exe",
+                         ])
+
+  if (!defined(skip_generating_board_args)) {
+    skip_generating_board_args = false
+  }
+
+  if (skip_generating_board_args) {
+    # cros_board is not needed, so setting it to empty to avoid being used
+    # accidentally below.
+    cros_board = ""
+    not_needed([ cros_board ])
+  }
+
+  if (!defined(deploy_chrome)) {
+    deploy_chrome = false
+  }
+  if (!defined(deploy_lacros)) {
+    deploy_lacros = false
+  }
+  assert(!(deploy_chrome && deploy_lacros),
+         "deploy_chrome and deploy_lacros are exclusive.")
+
+  is_tast = defined(tast_attr_expr) || defined(tast_tests)
+  assert(!(is_tast && defined(test_exe)),
+         "Tast tests are invoked from binaries shipped with the VM image. " +
+             "There should be no locally built binary needed.")
+  assert(is_tast || !defined(tast_vars),
+         "tast_vars is only support for Tast tests")
+
+  # If we're in the cros chrome-sdk (and not the raw ebuild), the test will
+  # need some additional runtime data located in the SDK cache.
+  _sdk_data = []
+  if (cros_sdk_version != "") {
+    assert(defined(generated_script),
+           "Must specify where to place generated test launcher script via " +
+               "'generated_script'")
+
+    foreach(b, _potential_test_boards) {
+      _cros_is_vm = false
+      foreach(b1, string_split(cros_boards_with_qemu_images, ":")) {
+        if (b == b1) {
+          _cros_is_vm = true
+        }
+      }
+
+      # Determine the real paths for various items in the SDK, which may be used
+      # in the 'generate_runner_script' template below.
+      if (is_tast || _cros_is_vm || deploy_chrome) {
+        _symlink_targets = []
+        foreach(m, _symlink_targets_map) {
+          if (b == m[0]) {
+            _symlink_targets = []
+            _symlink_targets = m[1]
+          }
+        }
+
+        if (is_tast) {
+          # Add tast sdk items.
+          _sdk_data += [
+            _symlink_targets[0],
+            _symlink_targets[1],
+          ]
+        }
+        if (deploy_chrome) {
+          # To deploy chrome to the VM, it needs to be stripped down to fit into
+          # the VM. This is done by using binutils in the toolchain. So add the
+          # toolchain to the data.
+          _sdk_data += [
+            _symlink_targets[2],
+            _symlink_targets[3],
+          ]
+        }
+        if (_cros_is_vm) {
+          # Add vm sdk items.
+          _sdk_data += [
+            _symlink_targets[4],
+            _symlink_targets[5],
+            _symlink_targets[6],
+          ]
+        }
+      }
+    }
+  }
+
+  generate_wrapper(target_name) {
+    executable = "//build/chromeos/test_runner.py"
+    use_vpython3 = true
+    wrapper_script = generated_script
+    executable_args = []
+
+    if (defined(runtime_deps_file)) {
+      write_runtime_deps = runtime_deps_file
+    }
+
+    # Build executable_args for the three different test types: GTest, Tast,
+    # and host-side commands (eg telemetry).
+    if (defined(test_exe)) {
+      executable_args += [
+        "gtest",
+        "--test-exe",
+        test_exe,
+      ]
+      if (defined(runtime_deps_file)) {
+        executable_args += [
+          "--runtime-deps-path",
+          rebase_path(runtime_deps_file, root_build_dir),
+        ]
+      }
+    } else if (is_tast) {
+      # When --tast-tests is specified, test_runner.py will call
+      # local_test_runner on the VM to run the set of tests.
+      executable_args += [
+        "tast",
+        "--suite-name",
+        target_name,
+      ]
+      if (defined(tast_attr_expr)) {
+        executable_args += [
+          "--attr-expr",
+          tast_attr_expr,
+        ]
+      } else {
+        foreach(test, tast_tests) {
+          executable_args += [
+            "-t",
+            test,
+          ]
+        }
+      }
+      if (defined(tast_vars)) {
+        foreach(var, tast_vars) {
+          executable_args += [
+            "--tast-var",
+            var,
+          ]
+        }
+      }
+    } else {
+      executable_args += [ "host-cmd" ]
+    }
+    executable_args += [
+      "--cros-cache",
+      "build/cros_cache/",
+      "--path-to-outdir",
+      rebase_path(root_out_dir, "//"),
+      "-v",
+    ]
+
+    if (!skip_generating_board_args) {
+      executable_args += [
+        "--board",
+        cros_board,
+      ]
+
+      _cros_is_vm = false
+      foreach(b, string_split(cros_boards_with_qemu_images, ":")) {
+        if (cros_board == b) {
+          _cros_is_vm = true
+        }
+      }
+      if (_cros_is_vm) {
+        executable_args += [ "--use-vm" ]
+      } else {
+        executable_args += [ "--flash" ]
+      }
+    }
+
+    # If we have public Chromium builds, use public Chromium OS images when
+    # flashing the test device.
+    if (!is_chrome_branded) {
+      executable_args += [ "--public-image" ]
+    }
+
+    if (deploy_lacros) {
+      executable_args += [ "--deploy-lacros" ]
+    }
+
+    if (deploy_chrome && !defined(test_exe) && !is_tast) {
+      executable_args += [ "--deploy-chrome" ]
+    }
+
+    # executable_args should be finished, now build the data and deps lists.
+    deps = [ "//testing/buildbot/filters:chromeos_filters" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    data = [
+      "//.vpython",
+      "//.vpython3",
+
+      # We use android test-runner's results libs to construct gtest output
+      # json.
+      "//build/android/pylib/__init__.py",
+      "//build/android/pylib/base/",
+      "//build/android/pylib/results/",
+      "//build/chromeos/",
+
+      # Needed for various SDK components used below.
+      "//build/cros_cache/chrome-sdk/misc/",
+      "//build/cros_cache/chrome-sdk/symlinks/",
+
+      # The LKGM file controls what version of the VM image to download. Add it
+      # as data here so that changes to it will trigger analyze.
+      "//chromeos/CHROMEOS_LKGM",
+      "//third_party/chromite/",
+    ]
+
+    data += _sdk_data
+
+    if (defined(invoker.data)) {
+      data += invoker.data
+    }
+
+    data_deps = [ "//testing:test_scripts_shared" ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+  }
+}
+
+template("tast_test") {
+  forward_variables_from(invoker, "*")
+
+  # Default the expression to match any chrome-related test.
+  if (!defined(tast_attr_expr) && !defined(tast_tests)) {
+    # The following expression filters out all non-critical tests. See the link
+    # below for more details:
+    # https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/test_attributes.md
+    tast_attr_expr = "\"group:mainline\" && \"dep:chrome\""
+
+    if (defined(enable_tast_informational_tests) &&
+        enable_tast_informational_tests) {
+      tast_attr_expr += " && informational"
+    } else {
+      tast_attr_expr += " && !informational"
+    }
+    if (!is_chrome_branded) {
+      tast_attr_expr += " && !\"dep:chrome_internal\""
+    }
+  } else {
+    assert(defined(tast_attr_expr) != defined(tast_tests),
+           "Specify one of tast_tests or tast_attr_expr.")
+  }
+
+  # Append any disabled tests to the expression.
+  if (defined(tast_disabled_tests)) {
+    assert(defined(tast_attr_expr),
+           "tast_attr_expr must be used when specifying tast_disabled_tests.")
+    foreach(test, tast_disabled_tests) {
+      tast_attr_expr += " && !\"name:${test}\""
+    }
+  }
+  if (defined(tast_attr_expr)) {
+    tast_attr_expr = "( " + tast_attr_expr + " )"
+  }
+  generate_runner_script(target_name) {
+    testonly = true
+    generated_script = "$root_build_dir/bin/run_${target_name}"
+    runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps"
+    deploy_chrome = true
+    data_deps = [
+      "//:chromiumos_preflight",  # Builds the browser.
+      "//chromeos:cros_chrome_deploy",  # Adds additional browser run-time deps.
+
+      # Tools used to symbolize Chrome crash dumps.
+      # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by
+      # default.
+      "//third_party/breakpad:dump_syms",
+      "//third_party/breakpad:minidump_dump",
+      "//third_party/breakpad:minidump_stackwalk",
+    ]
+
+    data = [ "//components/crash/content/tools/generate_breakpad_symbols.py" ]
+  }
+}
+
+template("lacros_tast_tests") {
+  forward_variables_from(invoker,
+                         [
+                           "tast_attr_expr",
+                           "tast_disabled_tests",
+                           "tast_tests",
+                         ])
+  assert(defined(tast_attr_expr) != defined(tast_tests),
+         "Specify one of tast_tests or tast_attr_expr.")
+
+  # Append any disabled tests to the expression.
+  if (defined(tast_disabled_tests)) {
+    assert(defined(tast_attr_expr),
+           "tast_attr_expr must be used when specifying tast_disabled_tests.")
+    foreach(test, tast_disabled_tests) {
+      tast_attr_expr += " && !\"name:${test}\""
+    }
+  }
+  if (defined(tast_attr_expr)) {
+    tast_attr_expr = "( " + tast_attr_expr + " )"
+  }
+
+  generate_runner_script(target_name) {
+    testonly = true
+    deploy_lacros = true
+    generated_script = "$root_build_dir/bin/run_${target_name}"
+    runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps"
+
+    # At build time, Lacros tests don't know whether they'll run on VM or HW,
+    # and instead, these flags are specified at runtime when invoking the
+    # generated runner script.
+    skip_generating_board_args = true
+
+    # By default, tast tests download a lacros-chrome from a gcs location and
+    # use it for testing. To support running lacros tast tests from Chromium CI,
+    # a Var is added to support pointing the tast tests to use a specified
+    # pre-deployed lacros-chrome. The location is decided by:
+    # https://source.chromium.org/chromium/chromium/src/+/master:third_party/chromite/scripts/deploy_chrome.py;l=80;drc=86f1234a4be8e9574442e076cdc835897f7bea61
+    tast_vars = [ "lacrosDeployedBinary=/usr/local/lacros-chrome" ]
+
+    # Lacros tast tests may have different test expectations based on whether
+    # they're for Chromium or Chrome.
+    tast_vars += [ "lacrosIsChromeBranded=$is_chrome_branded" ]
+
+    data_deps = [
+      "//chrome",  # Builds the browser.
+
+      # Tools used to symbolize Chrome crash dumps.
+      # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by
+      # default.
+      "//third_party/breakpad:dump_syms",
+      "//third_party/breakpad:minidump_dump",
+      "//third_party/breakpad:minidump_stackwalk",
+    ]
+
+    data = [
+      "//components/crash/content/tools/generate_breakpad_symbols.py",
+
+      # A script needed to launch Lacros in Lacros Tast tests.
+      "//build/lacros/mojo_connection_lacros_launcher.py",
+    ]
+  }
+}
diff --git a/src/build/config/chromeos/ui_mode.gni b/src/build/config/chromeos/ui_mode.gni
new file mode 100644
index 0000000..df578bc
--- /dev/null
+++ b/src/build/config/chromeos/ui_mode.gni
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Deprecated, use is_lacros.
+  #
+  # This controls UI configuration for Chrome.
+  # If this flag is set, we assume Chrome runs on Chrome OS devices, using
+  # Wayland (instead of X11).
+  #
+  # TODO(crbug.com/1052397):
+  # Define chromeos_product instead, which takes either "browser" or "ash".
+  # Re-define the following variables as:
+  # is_lacros = chromeos_product == "browser"
+  # is_ash = chromeos_product == "ash"
+  chromeos_is_browser_only = false
+
+  # Setting this to true when building LaCrOS-chrome will cause it to
+  # *also* build ash-chrome in a subdirectory using an alternate toolchain.
+  # Don't set this unless you're sure you want it, because it'll double
+  # your build time.
+  also_build_ash_chrome = false
+}
+
+# is_chromeos_{ash,lacros} is used to specify that it is specific to either
+# ash or lacros chrome for chromeos. For platform specific logic that
+# applies to both, use is_chromeos instead.
+# .gn files are evaluated multiple times for each toolchain.
+# is_chromeos_{ash,lacros} should be set true only for builds with target
+# toolchains.
+is_chromeos_ash = is_chromeos && !chromeos_is_browser_only
+
+# TODO(crbug.com/1052397): Remove is_linux once lacros-chrome switches
+# to target_os=chromeos
+is_chromeos_lacros = (is_chromeos || is_linux) && chromeos_is_browser_only
diff --git a/src/build/config/clang/BUILD.gn b/src/build/config/clang/BUILD.gn
new file mode 100644
index 0000000..180e2e6
--- /dev/null
+++ b/src/build/config/clang/BUILD.gn
@@ -0,0 +1,59 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+  if (clang_use_chrome_plugins) {
+    cflags = []
+
+    # The plugin is built directly into clang, so there's no need to load it
+    # dynamically.
+    cflags += [
+      "-Xclang",
+      "-add-plugin",
+      "-Xclang",
+      "find-bad-constructs",
+      "-Xclang",
+      "-plugin-arg-find-bad-constructs",
+      "-Xclang",
+      "checked-ptr-as-trivial-member",
+    ]
+
+    if (is_linux || is_chromeos || is_android || is_fuchsia) {
+      cflags += [
+        "-Xclang",
+        "-plugin-arg-find-bad-constructs",
+        "-Xclang",
+        "check-ipc",
+      ]
+    }
+  }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+  cflags = [
+    "-Wheader-hygiene",
+
+    # Warns when a const char[] is converted to bool.
+    "-Wstring-conversion",
+
+    "-Wtautological-overlap-compare",
+  ]
+}
+
+group("llvm-symbolizer_data") {
+  if (is_win) {
+    data = [ "$clang_base_path/bin/llvm-symbolizer.exe" ]
+  } else {
+    data = [ "$clang_base_path/bin/llvm-symbolizer" ]
+
+    if (!is_apple) {
+      # llvm-symbolizer uses libstdc++ from the clang package.
+      data += [ "$clang_base_path/lib/libstdc++.so.6" ]
+    }
+  }
+}
diff --git a/src/build/config/clang/clang.gni b/src/build/config/clang/clang.gni
new file mode 100644
index 0000000..5888645
--- /dev/null
+++ b/src/build/config/clang/clang.gni
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+default_clang_base_path = "//third_party/llvm-build/Release+Asserts"
+
+declare_args() {
+  # Indicates if the build should use the Chrome-specific plugins for enforcing
+  # coding guidelines, etc. Only used when compiling with Chrome's Clang, not
+  # Chrome OS's.
+  clang_use_chrome_plugins =
+      is_clang && !is_nacl && !use_xcode_clang &&
+      default_toolchain != "//build/toolchain/cros:target"
+
+  clang_base_path = default_clang_base_path
+}
diff --git a/src/build/config/compiler/BUILD.gn b/src/build/config/compiler/BUILD.gn
new file mode 100644
index 0000000..e9e393d
--- /dev/null
+++ b/src/build/config/compiler/BUILD.gn
@@ -0,0 +1,2585 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/buildflag_header.gni")
+import("//build/config/android/config.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/dcheck_always_on.gni")
+if (!is_starboard) {
+  import("//build/config/gclient_args.gni")
+}
+import("//build/config/host_byteorder.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+  import("//build/config/arm.gni")
+}
+if (current_cpu == "mipsel" || current_cpu == "mips64el" ||
+    current_cpu == "mips" || current_cpu == "mips64") {
+  import("//build/config/mips.gni")
+}
+if (current_cpu == "x64") {
+  import("//build/config/x64.gni")
+}
+if (is_mac) {
+  import("//build/config/apple/symbols.gni")
+}
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+if (is_nacl) {
+  # To keep NaCl variables out of builds that don't include NaCl, all
+  # variables defined in nacl/config.gni referenced here should be protected by
+  # is_nacl conditions.
+  import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+  # Normally, Android builds are lightly optimized, even for debug builds, to
+  # keep binary size down. Setting this flag to true disables such optimization
+  android_full_debug = false
+
+  # Compile in such a way as to make it possible for the profiler to unwind full
+  # stack frames. Setting this flag has a large effect on the performance of the
+  # generated code than just setting profiling, but gives the profiler more
+  # information to analyze.
+  # Requires profiling to be set to true.
+  enable_full_stack_frames_for_profiling = false
+
+  # When we are going to use gold we need to find it.
+  # This is initialized below, after use_gold might have been overridden.
+  gold_path = ""
+
+  # Enable fatal linker warnings. Building Chromium with certain versions
+  # of binutils can cause linker warning.
+  # TODO(thakis): Set this to true unconditionally once lld/MachO bring-up
+  # is along far enough that it no longer emits linker warnings.
+  fatal_linker_warnings = !(is_apple && use_lld)
+
+  # Build with C++ RTTI enabled. Chromium builds without RTTI by default,
+  # but some sanitizers are known to require it, like CFI diagnostics
+  # and UBsan variants.
+  use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security
+
+  # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided
+  # optimization that GCC supports. It used by ChromeOS in their official
+  # builds. To use it, set auto_profile_path to the path to a file containing
+  # the needed gcov profiling data.
+  auto_profile_path = ""
+
+  # Allow projects that wish to stay on C++11 to override Chromium's default.
+  use_cxx11 = false
+
+  # Path to an AFDO profile to use while building with clang, if any. Empty
+  # implies none.
+  clang_sample_profile_path = ""
+
+  # Some configurations have default sample profiles. If this is true and
+  # clang_sample_profile_path is empty, we'll fall back to the default.
+  #
+  # We currently only have default profiles for Chromium in-tree, so we disable
+  # this by default for all downstream projects, since these profiles are likely
+  # nonsensical for said projects.
+  clang_use_default_sample_profile =
+      chrome_pgo_phase == 0 && build_with_chromium && is_official_build &&
+      (is_android || chromeos_is_browser_only || is_chromecast)
+
+  # This configuration is used to select a default profile in Chrome OS based on
+  # the microarchitectures we are using. This is only used if
+  # clang_use_default_sample_profile is true and clang_sample_profile_path is
+  # empty.
+  chromeos_afdo_platform = "atom"
+
+  # Emit debug information for profiling wile building with clang.
+  clang_emit_debug_info_for_profiling = false
+
+  # Turn this on to have the compiler output extra timing information.
+  compiler_timing = false
+
+  # Turn this on to use ghash feature of lld for faster debug link on Windows.
+  # http://blog.llvm.org/2018/01/improving-link-time-on-windows-with.html
+  use_ghash = true
+
+  # Whether to enable ThinLTO optimizations. Turning ThinLTO optimizations on
+  # can substantially increase link time and binary size, but they generally
+  # also make binaries a fair bit faster.
+  #
+  # TODO(gbiv): We disable optimizations by default on most platforms because
+  # the space overhead is too great. We should use some mixture of profiles and
+  # optimization settings to better tune the size increase.
+  thin_lto_enable_optimizations =
+      (is_chromeos_ash || is_android || is_win || is_linux) && is_official_build
+
+  # Initialize all local variables with a pattern. This flag will fill
+  # uninitialized floating-point types (and 32-bit pointers) with 0xFF and the
+  # rest with 0xAA. This makes behavior of uninitialized memory bugs consistent,
+  # recognizable in the debugger, and crashes on memory accesses through
+  # uninitialized pointers.
+  #
+  # TODO(crbug.com/1131993): Enabling this when 'is_android' is true breaks
+  # content_shell_test_apk on both ARM and x86.
+  #
+  # TODO(crbug.com/977230): Enabling this when 'use_xcode_clang' is true may
+  # call an old clang that doesn't support auto-init.
+  init_stack_vars = !is_android && !use_xcode_clang
+
+  # This argument is to control whether enabling text section splitting in the
+  # final binary. When enabled, the separated text sections with prefix
+  # '.text.hot', '.text.unlikely', '.text.startup' and '.text.exit' will not be
+  # merged to '.text' section. This allows us to identify the hot code section
+  # ('.text.hot') in the binary which may be mlocked or mapped to huge page to
+  # reduce TLB misses which gives performance improvement on cpu usage.
+  # The gold linker by default has text section splitting enabled.
+  use_text_section_splitting = false
+
+  # Token limits may not be accurate for build configs not covered by the CQ,
+  # so only enable them by default for mainstream build configs.
+  enable_wmax_tokens =
+      !is_official_build &&
+      ((is_mac && target_cpu == "x64" && !use_system_xcode) ||
+       (is_linux && !is_chromeos && target_cpu == "x64") ||
+       (is_win && target_cpu == "x86") || (is_win && target_cpu == "x64") ||
+       (is_android && target_cpu == "arm") ||
+       (is_android && target_cpu == "arm64"))
+
+  # Turn off the --call-graph-profile-sort flag for lld by default. Enable
+  # selectively for targets where it's beneficial.
+  enable_call_graph_profile_sort = chrome_pgo_phase == 2
+
+  # Enable DWARF v5.
+  use_dwarf5 = false
+
+  # Override this to put full paths to PDBs in Windows PE files. This helps
+  # windbg and Windows Performance Analyzer with finding the PDBs in some local-
+  # build scenarios. This is never needed for bots or official builds. Because
+  # this puts the output directory in the DLLs/EXEs it breaks build determinism.
+  # Bugs have been reported to the windbg/WPA teams and this workaround will be
+  # removed when they are fixed.
+  use_full_pdb_paths = false
+
+  # Enable -H, which prints the include tree during compilation.
+  # For use by tools/clang/scripts/analyze_includes.py
+  show_includes = false
+}
+
+declare_args() {
+  # C++11 may not be an option if Android test infrastructure is used.
+  use_cxx11_on_android = use_cxx11
+}
+
+declare_args() {
+  # Set to true to use icf, Identical Code Folding.
+  #
+  # icf=all is broken in older golds, see
+  # https://sourceware.org/bugzilla/show_bug.cgi?id=17704
+  # chromeos binutils has been patched with the fix, so always use icf there.
+  # The bug only affects x86 and x64, so we can still use ICF when targeting
+  # other architectures.
+  #
+  # lld doesn't have the bug.
+  use_icf =
+      (is_posix || is_fuchsia) && !is_debug && !using_sanitizer &&
+      !use_clang_coverage && !(is_android && use_order_profiling) &&
+      (use_lld || (use_gold && (is_chromeos_ash || !(current_cpu == "x86" ||
+                                                     current_cpu == "x64"))))
+}
+
+if (is_android || (is_chromeos_ash && is_chromeos_device)) {
+  # Set the path to use orderfile for linking Chrome
+  # Note that this is for using only one orderfile for linking
+  # the Chrome binary/library.
+  declare_args() {
+    chrome_orderfile_path = ""
+
+    if (defined(default_chrome_orderfile)) {
+      # Allow downstream tools to set orderfile path with
+      # another variable.
+      chrome_orderfile_path = default_chrome_orderfile
+    } else if (is_chromeos_ash && is_chromeos_device) {
+      chrome_orderfile_path = "//chromeos/profiles/chromeos.orderfile.txt"
+    }
+  }
+}
+
+assert(!(llvm_force_head_revision && use_goma),
+       "can't use goma with trunk clang")
+assert(!(llvm_force_head_revision && use_rbe), "can't use rbe with trunk clang")
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+  include_dirs = [
+    "//",
+    root_gen_dir,
+  ]
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+  asmflags = []
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  cflags_objc = []
+  cflags_objcc = []
+  ldflags = []
+  defines = []
+  configs = []
+
+  # System-specific flags. If your compiler flags apply to one of the
+  # categories here, add it to the associated file to keep this shared config
+  # smaller.
+  if (is_win) {
+    configs += [ "//build/config/win:compiler" ]
+  } else if (is_android) {
+    configs += [ "//build/config/android:compiler" ]
+  } else if (is_linux || is_chromeos) {
+    configs += [ "//build/config/linux:compiler" ]
+    if (is_chromeos_ash) {
+      configs += [ "//build/config/chromeos:compiler" ]
+    }
+  } else if (is_nacl) {
+    configs += [ "//build/config/nacl:compiler" ]
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:compiler" ]
+  } else if (is_ios) {
+    configs += [ "//build/config/ios:compiler" ]
+  } else if (is_fuchsia) {
+    configs += [ "//build/config/fuchsia:compiler" ]
+  } else if (current_os == "aix") {
+    configs += [ "//build/config/aix:compiler" ]
+  }
+
+  configs += [
+    # See the definitions below.
+    ":clang_revision",
+    ":compiler_cpu_abi",
+    ":compiler_codegen",
+    ":compiler_deterministic",
+  ]
+
+  # Here we enable -fno-delete-null-pointer-checks, which makes various nullptr
+  # operations (e.g. dereferencing) into defined behavior. This avoids deletion
+  # of some security-critical code: see https://crbug.com/1139129.
+  # Nacl does not support the flag. And, we still want UBSAN to catch undefined
+  # behavior related to nullptrs, so do not add this flag if UBSAN is enabled.
+  # GCC seems to have some bugs compiling constexpr code when this is defined,
+  # so only enable it if using_clang. See: https://gcc.gnu.org/PR97913
+  # TODO(mpdenton): remove is_clang once GCC bug is fixed.
+  if (!is_nacl && !is_ubsan && is_clang) {
+    cflags += [ "-fno-delete-null-pointer-checks" ]
+  }
+
+  # Don't emit the GCC version ident directives, they just end up in the
+  # .comment section or debug info taking up binary size, and makes comparing
+  # .o files built with different compiler versions harder.
+  if (!is_win || is_clang) {
+    cflags += [ "-fno-ident" ]
+  }
+
+  # In general, Windows is totally different, but all the other builds share
+  # some common compiler and linker configuration.
+  if (!is_win) {
+    # Common POSIX compiler flags setup.
+    # --------------------------------
+    cflags += [ "-fno-strict-aliasing" ]  # See http://crbug.com/32204
+
+    # Stack protection.
+    if (is_mac) {
+      # The strong variant of the stack protector significantly increases
+      # binary size, so only enable it in debug mode.
+      if (is_debug) {
+        cflags += [ "-fstack-protector-strong" ]
+      } else {
+        cflags += [ "-fstack-protector" ]
+      }
+    } else if ((is_posix && !is_chromeos_ash && !is_nacl) || is_fuchsia) {
+      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it.
+      # See also https://crbug.com/533294
+      cflags += [ "--param=ssp-buffer-size=4" ]
+
+      # The x86 toolchain currently has problems with stack-protector.
+      if (is_android && current_cpu == "x86") {
+        cflags += [ "-fno-stack-protector" ]
+      } else if (current_os != "aix") {
+        # Not available on aix.
+        cflags += [ "-fstack-protector" ]
+      }
+    }
+
+    if (use_lld) {
+      ldflags += [ "-fuse-ld=lld" ]
+    }
+
+    # Linker warnings.
+    if (fatal_linker_warnings && !is_apple && current_os != "aix") {
+      ldflags += [ "-Wl,--fatal-warnings" ]
+    }
+    if (fatal_linker_warnings && is_apple) {
+      ldflags += [ "-Wl,-fatal_warnings" ]
+    }
+  }
+
+  if (is_clang && is_debug) {
+    # Allow comparing the address of references and 'this' against 0
+    # in debug builds. Technically, these can never be null in
+    # well-defined C/C++ and Clang can optimize such checks away in
+    # release builds, but they may be used in asserts in debug builds.
+    cflags_cc += [
+      "-Wno-undefined-bool-conversion",
+      "-Wno-tautological-undefined-compare",
+    ]
+  }
+
+  # Non-Mac Posix and Fuchsia compiler flags setup.
+  # -----------------------------------
+  if ((is_posix && !is_apple) || is_fuchsia) {
+    if (enable_profiling) {
+      if (!is_debug) {
+        cflags += [ "-g" ]
+
+        if (enable_full_stack_frames_for_profiling) {
+          cflags += [
+            "-fno-inline",
+            "-fno-optimize-sibling-calls",
+          ]
+        }
+      }
+    }
+
+    # Explicitly pass --build-id to ld. Compilers used to always pass this
+    # implicitly but don't any more (in particular clang when built without
+    # ENABLE_LINKER_BUILD_ID=ON).
+    if (is_official_build) {
+      # The sha1 build id has lower risk of collision but is more expensive to
+      # compute, so only use it in the official build to avoid slowing down
+      # links.
+      ldflags += [ "-Wl,--build-id=sha1" ]
+    } else if (current_os != "aix") {
+      ldflags += [ "-Wl,--build-id" ]
+    }
+
+    if (!is_android) {
+      defines += [
+        # _FILE_OFFSET_BITS=64 should not be set on Android in order to maintain
+        # the behavior of the Android NDK from earlier versions.
+        # See https://android-developers.googleblog.com/2017/09/introducing-android-native-development.html
+        "_FILE_OFFSET_BITS=64",
+        "_LARGEFILE_SOURCE",
+        "_LARGEFILE64_SOURCE",
+      ]
+    }
+
+    if (!is_nacl) {
+      if (exclude_unwind_tables) {
+        cflags += [
+          "-fno-unwind-tables",
+          "-fno-asynchronous-unwind-tables",
+        ]
+        defines += [ "NO_UNWIND_TABLES" ]
+      } else {
+        cflags += [ "-funwind-tables" ]
+      }
+    }
+  }
+
+  # Linux/Android/Fuchsia common flags setup.
+  # ---------------------------------
+  if (is_linux || is_chromeos || is_android || is_fuchsia) {
+    asmflags += [ "-fPIC" ]
+    cflags += [ "-fPIC" ]
+    ldflags += [ "-fPIC" ]
+
+    if (!is_clang) {
+      # Use pipes for communicating between sub-processes. Faster.
+      # (This flag doesn't do anything with Clang.)
+      cflags += [ "-pipe" ]
+    }
+
+    ldflags += [
+      "-Wl,-z,noexecstack",
+      "-Wl,-z,relro",
+    ]
+
+    if (!is_component_build) {
+      ldflags += [ "-Wl,-z,now" ]
+    }
+
+    # Compiler instrumentation can introduce dependencies in DSOs to symbols in
+    # the executable they are loaded into, so they are unresolved at link-time.
+    if (!using_sanitizer) {
+      ldflags += [
+        "-Wl,-z,defs",
+        "-Wl,--as-needed",
+      ]
+    }
+  }
+
+  # Linux-specific compiler flags setup.
+  # ------------------------------------
+  if ((is_posix || is_fuchsia) && use_lld) {
+    if (current_cpu == "arm64") {
+      # Reduce the page size from 65536 in order to reduce binary size slightly
+      # by shrinking the alignment gap between segments. This also causes all
+      # segments to be mapped adjacently, which breakpad relies on.
+      ldflags += [ "-Wl,-z,max-page-size=4096" ]
+    }
+  } else if (use_gold) {
+    ldflags += [ "-fuse-ld=gold" ]
+    if (!is_android) {
+      # On Android, this isn't needed.  gcc in the NDK knows to look next to
+      # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed
+      # above.
+      if (gold_path != "") {
+        ldflags += [ "-B$gold_path" ]
+      }
+
+      ldflags += [
+        # Experimentation found that using four linking threads
+        # saved ~20% of link time.
+        # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+        # Only apply this to the target linker, since the host
+        # linker might not be gold, but isn't used much anyway.
+        "-Wl,--threads",
+        "-Wl,--thread-count=4",
+      ]
+    }
+
+    # TODO(thestig): Make this flag work with GN.
+    #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
+    #  ldflags += [
+    #    "-Wl,--detect-odr-violations",
+    #  ]
+    #}
+  }
+
+  if (use_icf && !is_apple) {
+    ldflags += [ "-Wl,--icf=all" ]
+  }
+
+  if (is_linux || is_chromeos) {
+    cflags += [ "-pthread" ]
+    # Do not use the -pthread ldflag here since it becomes a no-op
+    # when using -nodefaultlibs, which would cause an unused argument
+    # error.  "-lpthread" is added in //build/config:default_libs.
+  }
+
+  # Clang-specific compiler flags setup.
+  # ------------------------------------
+  if (is_clang) {
+    cflags += [ "-fcolor-diagnostics" ]
+
+    # Enable -fmerge-all-constants. This used to be the default in clang
+    # for over a decade. It makes clang non-conforming, but is fairly safe
+    # in practice and saves some binary size. We might want to consider
+    # disabling this (https://bugs.llvm.org/show_bug.cgi?id=18538#c13),
+    # but for now it looks like our build might rely on it
+    # (https://crbug.com/829795).
+    cflags += [ "-fmerge-all-constants" ]
+  }
+
+  if (use_lld) {
+    # TODO(thakis): Make the driver pass --color-diagnostics to the linker
+    # if -fcolor-diagnostics is passed to it, and pass -fcolor-diagnostics
+    # in ldflags instead.
+    if (is_win) {
+      # On Windows, we call the linker directly, instead of calling it through
+      # the driver.
+      ldflags += [ "--color-diagnostics" ]
+    } else {
+      ldflags += [ "-Wl,--color-diagnostics" ]
+    }
+  }
+
+  # Enable text section splitting only on linux when using lld for now. Other
+  # platforms can be added later if needed.
+  if ((is_linux || is_chromeos) && use_lld && use_text_section_splitting) {
+    ldflags += [ "-Wl,-z,keep-text-section-prefix" ]
+  }
+
+  if (is_clang && !is_nacl && !use_xcode_clang) {
+    cflags += [ "-fcrash-diagnostics-dir=" + clang_diagnostic_dir ]
+
+    # TODO(hans): Remove this once Clang generates better optimized debug info
+    # by default. https://crbug.com/765793
+    cflags += [
+      "-mllvm",
+      "-instcombine-lower-dbg-declare=0",
+    ]
+    if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+      if (is_win) {
+        ldflags += [ "-mllvm:-instcombine-lower-dbg-declare=0" ]
+      } else {
+        ldflags += [ "-Wl,-mllvm,-instcombine-lower-dbg-declare=0" ]
+      }
+    }
+  }
+
+  # C11/C++11 compiler flags setup.
+  # ---------------------------
+  if (is_linux || is_chromeos || is_android || (is_nacl && is_clang) ||
+      current_os == "aix") {
+    if (target_os == "android") {
+      cxx11_override = use_cxx11_on_android
+    } else {
+      cxx11_override = use_cxx11
+    }
+
+    if (is_clang) {
+      standard_prefix = "c"
+
+      # Since we build with -std=c* and not -std=gnu*, _GNU_SOURCE will not be
+      # defined by the compiler.  However, lots of code relies on the
+      # non-standard features that _GNU_SOURCE enables, so define it manually.
+      defines += [ "_GNU_SOURCE" ]
+
+      if (is_nacl) {
+        # Undefine __STRICT_ANSI__ to get non-standard features which would
+        # otherwise not be enabled by NaCl's sysroots.
+        cflags += [ "-U__STRICT_ANSI__" ]
+      }
+    } else {
+      # Gcc does not support ##__VA_ARGS__ when in standards-conforming mode,
+      # but we use this feature in several places in Chromium.
+      # TODO(thomasanderson): Replace usages of ##__VA_ARGS__ with the
+      # standard-compliant __VA_OPT__ added by C++20, and switch the gcc build
+      # to -std=c*.
+      standard_prefix = "gnu"
+    }
+
+    cflags_c += [ "-std=${standard_prefix}11" ]
+    if (cxx11_override) {
+      # Override Chromium's default for projects that wish to stay on C++11.
+      cflags_cc += [ "-std=${standard_prefix}++11" ]
+    } else {
+      cflags_cc += [ "-std=${standard_prefix}++14" ]
+    }
+  } else if (!is_win && !is_nacl) {
+    if (target_os == "android") {
+      cxx11_override = use_cxx11_on_android
+    } else {
+      cxx11_override = use_cxx11
+    }
+
+    # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu11/gnu++11
+    # or c11/c++11; we technically don't need this toolchain any more, but there
+    # are still a few buildbots using it, so until those are turned off
+    # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above.
+    cflags_c += [ "-std=c11" ]
+    if (cxx11_override) {
+      cflags_cc += [ "-std=c++11" ]
+    } else {
+      cflags_cc += [ "-std=c++14" ]
+    }
+  }
+
+  # C++17 removes trigraph support, so preemptively disable trigraphs. This is
+  # especially useful given the collision with ecmascript's logical assignment
+  # operators: https://github.com/tc39/proposal-logical-assignment
+  if (is_clang) {
+    # clang-cl disables trigraphs by default
+    if (!is_win) {
+      # The gnu variants of C++11 and C++14 already disable trigraph support,
+      # but when building with clang, we use -std=c++11 / -std=c++14, which
+      # enables trigraph support: override that here.
+      cflags_cc += [ "-fno-trigraphs" ]
+    }
+
+    # Don't warn that trigraphs are ignored, since trigraphs are disabled
+    # anyway.
+    cflags_cc += [ "-Wno-trigraphs" ]
+  }
+
+  if (is_mac) {
+    # The system libc++ on Mac doesn't have aligned allocation in C++17.
+    defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ]
+    cflags_cc += [ "-stdlib=libc++" ]
+    ldflags += [ "-stdlib=libc++" ]
+  }
+
+  # Add flags for link-time optimization. These flags enable
+  # optimizations/transformations that require whole-program visibility at link
+  # time, so they need to be applied to all translation units, and we may end up
+  # with miscompiles if only part of the program is compiled with LTO flags. For
+  # that reason, we cannot allow targets to enable or disable these flags, for
+  # example by disabling the optimize configuration.
+  # TODO(pcc): Make this conditional on is_official_build rather than on gn
+  # flags for specific features.
+  if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+    assert(use_lld, "LTO is only supported with lld")
+
+    cflags += [
+      "-flto=thin",
+      "-fsplit-lto-unit",
+    ]
+
+    # Limit the size of the ThinLTO cache to the lesser of 10% of
+    # available disk space, 40GB and 100000 files.
+    cache_policy = "cache_size=10%:cache_size_bytes=40g:cache_size_files=100000"
+
+    # TODO(gbiv): We ideally shouldn't need to specify this; ThinLTO
+    # should be able to better manage binary size increases on its own.
+    import_instr_limit = 5
+
+    if (is_win) {
+      ldflags += [
+        "/opt:lldltojobs=all",
+        "-mllvm:-import-instr-limit=$import_instr_limit",
+        "/lldltocache:" +
+            rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+        "/lldltocachepolicy:$cache_policy",
+      ]
+    } else {
+      ldflags += [ "-flto=thin" ]
+
+      # Enabling ThinLTO on Chrome OS too, in an effort to reduce the memory
+      # usage in crbug.com/1038040. Note this will increase build time in
+      # Chrome OS.
+
+      # In ThinLTO builds, we run at most one link process at a time,
+      # and let it use all cores.
+      # TODO(thakis): Check if '=0' (that is, number of cores, instead
+      # of "all" which means number of hardware threads) is faster.
+      ldflags += [ "-Wl,--thinlto-jobs=all" ]
+
+      ldflags += [
+        "-Wl,--thinlto-cache-dir=" +
+            rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+        "-Wl,--thinlto-cache-policy,$cache_policy",
+      ]
+
+      if (is_chromeos_ash) {
+        # Not much performance difference was noted between the default (100)
+        # and these. ARM was originally set lower than x86 to keep the size
+        # bloat of ThinLTO to <10%, but that's potentially no longer true.
+        # FIXME(inglorion): maybe tune these?
+        if (target_cpu == "arm" || target_cpu == "arm64") {
+          import_instr_limit = 20
+        } else {
+          import_instr_limit = 30
+        }
+      }
+
+      ldflags += [ "-Wl,-mllvm,-import-instr-limit=$import_instr_limit" ]
+    }
+
+    # Whole-program devirtualization increases android libchrome.so size
+    # by ~100kb on arm32 and reduces it by ~108kb on arm64 instead.
+    # Tracked by llvm bug: https://bugs.llvm.org/show_bug.cgi?id=48245
+    if (!is_android || current_cpu == "arm64") {
+      cflags += [ "-fwhole-program-vtables" ]
+      if (!is_win) {
+        ldflags += [ "-fwhole-program-vtables" ]
+      }
+    }
+
+    # This flag causes LTO to create an .ARM.attributes section with the correct
+    # architecture. This is necessary because LLD will refuse to link a program
+    # unless the architecture revision in .ARM.attributes is sufficiently new.
+    # TODO(pcc): The contents of .ARM.attributes should be based on the
+    # -march flag passed at compile time (see llvm.org/pr36291).
+    if (current_cpu == "arm") {
+      ldflags += [ "-march=$arm_arch" ]
+    }
+  }
+
+  if (compiler_timing) {
+    if (is_clang && !is_nacl) {
+      cflags += [ "-ftime-trace" ]
+    } else if (is_win) {
+      cflags += [
+        # "Documented" here:
+        # http://aras-p.info/blog/2017/10/23/Best-unknown-MSVC-flag-d2cgsummary/
+        "/d2cgsummary",
+      ]
+    }
+  }
+
+  # Pass flag to LLD so Android builds can allow debuggerd to properly symbolize
+  # stack crashes (http://crbug.com/919499).
+  if (use_lld && is_android) {
+    ldflags += [ "-Wl,--no-rosegment" ]
+  }
+
+  # LLD does call-graph-sorted binary layout by default when profile data is
+  # present. On Android this increases binary size due to more thinks for long
+  # jumps. Turn it off by default and enable selectively for targets where it's
+  # beneficial.
+  if (use_lld && !enable_call_graph_profile_sort) {
+    if (is_win) {
+      ldflags += [ "/call-graph-profile-sort:no" ]
+    } else if (!is_apple) {
+      # TODO(thakis): Once LLD's Mach-O port basically works, implement call
+      # graph profile sorting for it, add an opt-out flag, and pass it here.
+      ldflags += [ "-Wl,--no-call-graph-profile-sort" ]
+    }
+  }
+
+  if (is_clang && !is_nacl && show_includes) {
+    assert(!is_win, "show_includes is not supported on Windows")
+    cflags += [
+      "-H",
+      "-Xclang",
+      "-show-skipped-includes",
+    ]
+  }
+
+  # This flag enforces that member pointer base types are complete. It helps
+  # prevent us from running into problems in the Microsoft C++ ABI (see
+  # https://crbug.com/847724).
+  # TODO(crbug/1052397): Remove is_chromeos_lacros once lacros-chrome switches
+  # to target_os="chromeos".
+  if (is_clang && !is_nacl && target_os != "chromeos" && !use_xcode_clang &&
+      !is_chromeos_lacros && (is_win || use_custom_libcxx)) {
+    cflags += [ "-fcomplete-member-pointers" ]
+  }
+
+  # Pass the same C/C++ flags to the objective C/C++ compiler.
+  cflags_objc += cflags_c
+  cflags_objcc += cflags_cc
+
+  # Assign any flags set for the C compiler to asmflags so that they are sent
+  # to the assembler. The Windows assembler takes different types of flags
+  # so only do so for posix platforms.
+  if (is_posix || is_fuchsia) {
+    asmflags += cflags
+    asmflags += cflags_c
+  }
+}
+
+# The BUILDCONFIG file sets this config on targets by default, which means when
+# building with ThinLTO, no optimization is performed in the link step.
+config("thinlto_optimize_default") {
+  if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+    lto_opt_level = 0
+
+    if (is_win) {
+      ldflags = [ "/opt:lldlto=" + lto_opt_level ]
+    } else {
+      ldflags = [ "-Wl,--lto-O" + lto_opt_level ]
+    }
+  }
+}
+
+# Use this to enable optimization in the ThinLTO link step for select targets
+# when thin_lto_enable_optimizations is set by doing:
+#
+#   configs -= [ "//build/config/compiler:thinlto_optimize_default" ]
+#   configs += [ "//build/config/compiler:thinlto_optimize_max" ]
+#
+# Since it makes linking significantly slower and more resource intensive, only
+# use it on important targets such as the main browser executable or dll.
+config("thinlto_optimize_max") {
+  if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+    if (thin_lto_enable_optimizations) {
+      lto_opt_level = 2
+    } else {
+      lto_opt_level = 0
+    }
+
+    if (is_win) {
+      ldflags = [ "/opt:lldlto=" + lto_opt_level ]
+    } else {
+      ldflags = [ "-Wl,--lto-O" + lto_opt_level ]
+    }
+  }
+}
+
+# This provides the basic options to select the target CPU and ABI.
+# It is factored out of "compiler" so that special cases can use this
+# without using everything that "compiler" brings in.  Options that
+# tweak code generation for a particular CPU do not belong here!
+# See "compiler_codegen", below.
+config("compiler_cpu_abi") {
+  cflags = []
+  ldflags = []
+  defines = []
+
+  if ((is_posix && !is_apple) || is_fuchsia) {
+    # CPU architecture. We may or may not be doing a cross compile now, so for
+    # simplicity we always explicitly set the architecture.
+    if (current_cpu == "x64") {
+      cflags += [
+        "-m64",
+        "-march=$x64_arch",
+        "-msse3",
+      ]
+      ldflags += [ "-m64" ]
+    } else if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+      ldflags += [ "-m32" ]
+      if (!is_nacl) {
+        cflags += [
+          "-mfpmath=sse",
+          "-msse3",
+        ]
+      }
+    } else if (current_cpu == "arm") {
+      if (is_clang && !is_android && !is_nacl) {
+        cflags += [ "--target=arm-linux-gnueabihf" ]
+        ldflags += [ "--target=arm-linux-gnueabihf" ]
+      }
+      if (!is_nacl) {
+        cflags += [
+          "-march=$arm_arch",
+          "-mfloat-abi=$arm_float_abi",
+        ]
+      }
+      if (arm_tune != "") {
+        cflags += [ "-mtune=$arm_tune" ]
+      }
+    } else if (current_cpu == "arm64") {
+      if (is_clang && !is_android && !is_nacl && !is_fuchsia) {
+        cflags += [ "--target=aarch64-linux-gnu" ]
+        ldflags += [ "--target=aarch64-linux-gnu" ]
+      }
+    } else if (current_cpu == "mipsel" && !is_nacl) {
+      ldflags += [ "-Wl,--hash-style=sysv" ]
+      if (custom_toolchain == "") {
+        if (is_clang) {
+          if (is_android) {
+            cflags += [ "--target=mipsel-linux-android" ]
+            ldflags += [ "--target=mipsel-linux-android" ]
+          } else {
+            cflags += [ "--target=mipsel-linux-gnu" ]
+            ldflags += [ "--target=mipsel-linux-gnu" ]
+          }
+        } else {
+          cflags += [ "-EL" ]
+          ldflags += [ "-EL" ]
+        }
+      }
+
+      if (mips_arch_variant == "r6") {
+        cflags += [ "-mno-odd-spreg" ]
+        ldflags += [ "-mips32r6" ]
+        if (is_clang) {
+          cflags += [
+            "-march=mipsel",
+            "-mcpu=mips32r6",
+          ]
+        } else {
+          cflags += [
+            "-mips32r6",
+            "-Wa,-mips32r6",
+          ]
+          if (is_android) {
+            ldflags += [ "-Wl,-melf32ltsmip" ]
+          }
+        }
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        ldflags += [ "-mips32r2" ]
+        if (is_clang) {
+          cflags += [
+            "-march=mipsel",
+            "-mcpu=mips32r2",
+          ]
+        } else {
+          cflags += [
+            "-mips32r2",
+            "-Wa,-mips32r2",
+          ]
+          if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+            cflags += [ "-m$mips_fpu_mode" ]
+          }
+        }
+      } else if (mips_arch_variant == "r1") {
+        ldflags += [ "-mips32" ]
+        if (is_clang) {
+          cflags += [
+            "-march=mipsel",
+            "-mcpu=mips32",
+          ]
+        } else {
+          cflags += [
+            "-mips32",
+            "-Wa,-mips32",
+          ]
+        }
+      } else if (mips_arch_variant == "loongson3") {
+        defines += [ "_MIPS_ARCH_LOONGSON" ]
+        cflags += [
+          "-march=loongson3a",
+          "-mno-branch-likely",
+          "-Wa,-march=loongson3a",
+        ]
+      }
+
+      if (mips_dsp_rev == 1) {
+        cflags += [ "-mdsp" ]
+      } else if (mips_dsp_rev == 2) {
+        cflags += [ "-mdspr2" ]
+      }
+
+      cflags += [ "-m${mips_float_abi}-float" ]
+    } else if (current_cpu == "mips" && !is_nacl) {
+      ldflags += [ "-Wl,--hash-style=sysv" ]
+      if (custom_toolchain == "") {
+        if (is_clang) {
+          cflags += [ "--target=mips-linux-gnu" ]
+          ldflags += [ "--target=mips-linux-gnu" ]
+        } else {
+          cflags += [ "-EB" ]
+          ldflags += [ "-EB" ]
+        }
+      }
+
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips32r6",
+          "-Wa,-mips32r6",
+        ]
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips32r2",
+          "-Wa,-mips32r2",
+        ]
+        if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+          cflags += [ "-m$mips_fpu_mode" ]
+        }
+      } else if (mips_arch_variant == "r1") {
+        cflags += [
+          "-mips32",
+          "-Wa,-mips32",
+        ]
+      }
+
+      if (mips_dsp_rev == 1) {
+        cflags += [ "-mdsp" ]
+      } else if (mips_dsp_rev == 2) {
+        cflags += [ "-mdspr2" ]
+      }
+
+      cflags += [ "-m${mips_float_abi}-float" ]
+    } else if (current_cpu == "mips64el") {
+      cflags += [ "-D__SANE_USERSPACE_TYPES__" ]
+      ldflags += [ "-Wl,--hash-style=sysv" ]
+      if (custom_toolchain == "") {
+        if (is_clang) {
+          if (is_android) {
+            cflags += [ "--target=mips64el-linux-android" ]
+            ldflags += [ "--target=mips64el-linux-android" ]
+          } else {
+            cflags += [ "--target=mips64el-linux-gnuabi64" ]
+            ldflags += [ "--target=mips64el-linux-gnuabi64" ]
+          }
+        } else {
+          cflags += [
+            "-EL",
+            "-mabi=64",
+          ]
+          ldflags += [
+            "-EL",
+            "-mabi=64",
+          ]
+        }
+      }
+
+      if (mips_arch_variant == "r6") {
+        if (is_clang) {
+          cflags += [
+            "-march=mips64el",
+            "-mcpu=mips64r6",
+          ]
+        } else {
+          cflags += [
+            "-mips64r6",
+            "-Wa,-mips64r6",
+          ]
+          ldflags += [ "-mips64r6" ]
+        }
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        ldflags += [ "-mips64r2" ]
+        if (is_clang) {
+          cflags += [
+            "-march=mips64el",
+            "-mcpu=mips64r2",
+          ]
+        } else {
+          cflags += [
+            "-mips64r2",
+            "-Wa,-mips64r2",
+          ]
+        }
+      } else if (mips_arch_variant == "loongson3") {
+        defines += [ "_MIPS_ARCH_LOONGSON" ]
+        cflags += [
+          "-march=loongson3a",
+          "-mno-branch-likely",
+          "-Wa,-march=loongson3a",
+        ]
+      }
+    } else if (current_cpu == "mips64") {
+      ldflags += [ "-Wl,--hash-style=sysv" ]
+      if (custom_toolchain == "") {
+        if (is_clang) {
+          cflags += [ "--target=mips64-linux-gnuabi64" ]
+          ldflags += [ "--target=mips64-linux-gnuabi64" ]
+        } else {
+          cflags += [
+            "-EB",
+            "-mabi=64",
+          ]
+          ldflags += [
+            "-EB",
+            "-mabi=64",
+          ]
+        }
+      }
+
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips64r6",
+          "-Wa,-mips64r6",
+        ]
+        ldflags += [ "-mips64r6" ]
+
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips64r2",
+          "-Wa,-mips64r2",
+        ]
+        ldflags += [ "-mips64r2" ]
+      }
+    } else if (current_cpu == "pnacl" && is_nacl_nonsfi) {
+      if (target_cpu == "x86" || target_cpu == "x64") {
+        cflags += [
+          "-arch",
+          "x86-32-nonsfi",
+          "--pnacl-bias=x86-32-nonsfi",
+          "--target=i686-unknown-nacl",
+        ]
+        ldflags += [
+          "-arch",
+          "x86-32-nonsfi",
+          "--target=i686-unknown-nacl",
+        ]
+      } else if (target_cpu == "arm") {
+        cflags += [
+          "-arch",
+          "arm-nonsfi",
+          "-mfloat-abi=hard",
+          "--pnacl-bias=arm-nonsfi",
+          "--target=armv7-unknown-nacl-gnueabihf",
+        ]
+        ldflags += [
+          "-arch",
+          "arm-nonsfi",
+          "--target=armv7-unknown-nacl-gnueabihf",
+        ]
+      }
+    } else if (current_cpu == "ppc64") {
+      if (current_os == "aix") {
+        cflags += [ "-maix64" ]
+        ldflags += [ "-maix64" ]
+      } else {
+        cflags += [ "-m64" ]
+        ldflags += [ "-m64" ]
+      }
+    } else if (current_cpu == "s390x") {
+      cflags += [ "-m64" ]
+      ldflags += [ "-m64" ]
+    }
+  }
+
+  asmflags = cflags
+}
+
+# This provides options to tweak code generation that are necessary
+# for particular Chromium code or for working around particular
+# compiler bugs (or the combination of the two).
+config("compiler_codegen") {
+  configs = []
+  cflags = []
+  ldflags = []
+
+  if (is_nacl) {
+    configs += [ "//build/config/nacl:compiler_codegen" ]
+  }
+
+  if (current_cpu == "arm64" && is_android) {
+    # On arm64 disable outlining for Android. See crbug.com/931297 for more
+    # information.
+    cflags += [ "-mno-outline" ]
+
+    # This can be removed once https://bugs.llvm.org/show_bug.cgi?id=40348
+    # has been resolved, and -mno-outline is obeyed by the linker during
+    # ThinLTO.
+    ldflags += [ "-Wl,-mllvm,-enable-machine-outliner=never" ]
+  }
+
+  asmflags = cflags
+}
+
+# This provides options that make the build deterministic, so that the same
+# revision produces the same output, independent of the name of the build
+# directory and of the computer the build is done on.
+# The relative path from build dir to source dir makes it into the build
+# outputs, so it's recommended that you use a build dir two levels deep
+# (e.g. "out/Release") so that you get the same "../.." path as all the bots
+# in your build outputs.
+config("compiler_deterministic") {
+  cflags = []
+  ldflags = []
+
+  # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for
+  # deterministic build.  See https://crbug.com/314403
+  if (!is_official_build) {
+    if (is_win && !is_clang) {
+      cflags += [
+        "/wd4117",  # Trying to define or undefine a predefined macro.
+        "/D__DATE__=",
+        "/D__TIME__=",
+        "/D__TIMESTAMP__=",
+      ]
+    } else {
+      cflags += [
+        "-Wno-builtin-macro-redefined",
+        "-D__DATE__=",
+        "-D__TIME__=",
+        "-D__TIMESTAMP__=",
+      ]
+    }
+  }
+
+  # Makes builds independent of absolute file path.
+  if (is_clang && strip_absolute_paths_from_debug_symbols) {
+    # If debug option is given, clang includes $cwd in debug info by default.
+    # For such build, this flag generates reproducible obj files even we use
+    # different build directory like "out/feature_a" and "out/feature_b" if
+    # we build same files with same compile flag.
+    # Other paths are already given in relative, no need to normalize them.
+    cflags += [
+      "-Xclang",
+      "-fdebug-compilation-dir",
+      "-Xclang",
+      ".",
+    ]
+    if (!is_win) {
+      # We don't use clang -cc1as on Windows (yet? https://crbug.com/762167)
+      asmflags = [ "-Wa,-fdebug-compilation-dir,." ]
+    }
+
+    if (is_win && use_lld) {
+      if (symbol_level == 2 || (is_clang && using_sanitizer)) {
+        # Absolutize source file paths for PDB. Pass the real build directory
+        # if the pdb contains source-level debug information and if linker
+        # reproducibility is not critical.
+        ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ]
+      } else {
+        # Use a fake fixed base directory for paths in the pdb to make the pdb
+        # output fully deterministic and independent of the build directory.
+        ldflags += [ "/PDBSourcePath:o:\fake\prefix" ]
+      }
+    }
+  }
+
+  # Tells the compiler not to use absolute paths when passing the default
+  # paths to the tools it invokes. We don't want this because we don't
+  # really need it and it can mess up the goma cache entries.
+  if (is_clang && !is_nacl) {
+    cflags += [ "-no-canonical-prefixes" ]
+  }
+}
+
+config("clang_revision") {
+  if (!is_starboard && is_clang && clang_base_path == default_clang_base_path) {
+    update_args = [
+      "--print-revision",
+      "--verify-version=$clang_version",
+    ]
+    if (llvm_force_head_revision) {
+      update_args += [ "--llvm-force-head-revision" ]
+    }
+    clang_revision = exec_script("//tools/clang/scripts/update.py",
+                                 update_args,
+                                 "trim string")
+
+    # This is here so that all files get recompiled after a clang roll and
+    # when turning clang on or off. (defines are passed via the command line,
+    # and build system rebuild things when their commandline changes). Nothing
+    # should ever read this define.
+    defines = [ "CR_CLANG_REVISION=\"$clang_revision\"" ]
+  }
+}
+
+config("compiler_arm_fpu") {
+  if (current_cpu == "arm" && !is_ios && !is_nacl) {
+    cflags = [ "-mfpu=$arm_fpu" ]
+    if (!arm_use_thumb) {
+      cflags += [ "-marm" ]
+    }
+    asmflags = cflags
+  }
+}
+
+config("compiler_arm_thumb") {
+  if (current_cpu == "arm" && arm_use_thumb && is_posix &&
+      !(is_apple || is_nacl)) {
+    cflags = [ "-mthumb" ]
+  }
+}
+
+config("compiler_arm") {
+  if (current_cpu == "arm" && (is_chromeos_ash || is_chromeos_lacros)) {
+    # arm is normally the default mode for clang, but on chromeos a wrapper
+    # is used to pass -mthumb, and therefor change the default.
+    cflags = [ "-marm" ]
+  }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+  configs = []
+
+  # The order of this config is important: it must appear before
+  # android:runtime_library.  This is to ensure libc++ appears before
+  # libandroid_support in the -isystem include order.  Otherwise, there will be
+  # build errors related to symbols declared in math.h.
+  if (use_custom_libcxx) {
+    configs += [ "//build/config/c++:runtime_library" ]
+  }
+
+  # TODO(crbug.com/830987): Come up with a better name for is POSIX + Fuchsia
+  # configuration.
+  if (is_posix || is_fuchsia) {
+    configs += [ "//build/config/posix:runtime_library" ]
+  }
+
+  # System-specific flags. If your compiler flags apply to one of the
+  # categories here, add it to the associated file to keep this shared config
+  # smaller.
+  if (is_win) {
+    configs += [ "//build/config/win:runtime_library" ]
+  } else if (is_linux || is_chromeos) {
+    configs += [ "//build/config/linux:runtime_library" ]
+  } else if (is_ios) {
+    configs += [ "//build/config/ios:runtime_library" ]
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:runtime_library" ]
+  } else if (is_android) {
+    configs += [ "//build/config/android:runtime_library" ]
+  }
+
+  if (is_component_build) {
+    defines = [ "COMPONENT_BUILD" ]
+  }
+}
+
+# default_warnings ------------------------------------------------------------
+#
+# Collects all warning flags that are used by default.  This is used as a
+# subconfig of both chromium_code and no_chromium_code.  This way these
+# flags are guaranteed to appear on the compile command line after -Wall.
+config("default_warnings") {
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  ldflags = []
+
+  if (is_win) {
+    if (treat_warnings_as_errors) {
+      cflags += [ "/WX" ]
+    }
+    if (fatal_linker_warnings) {
+      ldflags = [ "/WX" ]
+    }
+
+    cflags += [
+      # Warnings permanently disabled:
+
+      # C4091: 'typedef ': ignored on left of 'X' when no variable is
+      #                    declared.
+      # This happens in a number of Windows headers. Dumb.
+      "/wd4091",
+
+      # C4127: conditional expression is constant
+      # This warning can in theory catch dead code and other problems, but
+      # triggers in far too many desirable cases where the conditional
+      # expression is either set by macros or corresponds some legitimate
+      # compile-time constant expression (due to constant template args,
+      # conditionals comparing the sizes of different types, etc.).  Some of
+      # these can be worked around, but it's not worth it.
+      "/wd4127",
+
+      # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+      #        used by clients of class 'type2'
+      # This is necessary for the shared library build.
+      "/wd4251",
+
+      # C4275:  non dll-interface class used as base for dll-interface class
+      # This points out a potential (but rare) problem with referencing static
+      # fields of a non-exported base, through the base's non-exported inline
+      # functions, or directly. The warning is subtle enough that people just
+      # suppressed it when they saw it, so it's not worth it.
+      "/wd4275",
+
+      # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
+      # TODO(brucedawson): fix warnings, crbug.com/554200
+      "/wd4312",
+
+      # C4324 warns when padding is added to fulfill alignas requirements,
+      # but can trigger in benign cases that are difficult to individually
+      # suppress.
+      "/wd4324",
+
+      # C4351: new behavior: elements of array 'array' will be default
+      #        initialized
+      # This is a silly "warning" that basically just alerts you that the
+      # compiler is going to actually follow the language spec like it's
+      # supposed to, instead of not following it like old buggy versions did.
+      # There's absolutely no reason to turn this on.
+      "/wd4351",
+
+      # C4355: 'this': used in base member initializer list
+      # It's commonly useful to pass |this| to objects in a class' initializer
+      # list.  While this warning can catch real bugs, most of the time the
+      # constructors in question don't attempt to call methods on the passed-in
+      # pointer (until later), and annotating every legit usage of this is
+      # simply more hassle than the warning is worth.
+      "/wd4355",
+
+      # C4503: 'identifier': decorated name length exceeded, name was
+      #        truncated
+      # This only means that some long error messages might have truncated
+      # identifiers in the presence of lots of templates.  It has no effect on
+      # program correctness and there's no real reason to waste time trying to
+      # prevent it.
+      "/wd4503",
+
+      # Warning C4589 says: "Constructor of abstract class ignores
+      # initializer for virtual base class." Disable this warning because it
+      # is flaky in VS 2015 RTM. It triggers on compiler generated
+      # copy-constructors in some cases.
+      "/wd4589",
+
+      # C4611: interaction between 'function' and C++ object destruction is
+      #        non-portable
+      # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+      # suggests using exceptions instead of setjmp/longjmp for C++, but
+      # Chromium code compiles without exception support.  We therefore have to
+      # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+      # have to turn off this warning (and be careful about how object
+      # destruction happens in such cases).
+      "/wd4611",
+
+      # Warnings to evaluate and possibly fix/reenable later:
+
+      "/wd4100",  # Unreferenced formal function parameter.
+      "/wd4121",  # Alignment of a member was sensitive to packing.
+      "/wd4244",  # Conversion: possible loss of data.
+      "/wd4505",  # Unreferenced local function has been removed.
+      "/wd4510",  # Default constructor could not be generated.
+      "/wd4512",  # Assignment operator could not be generated.
+      "/wd4610",  # Class can never be instantiated, constructor required.
+      "/wd4838",  # Narrowing conversion. Doesn't seem to be very useful.
+      "/wd4995",  # 'X': name was marked as #pragma deprecated
+      "/wd4996",  # Deprecated function warning.
+
+      # These are variable shadowing warnings that are new in VS2015. We
+      # should work through these at some point -- they may be removed from
+      # the RTM release in the /W4 set.
+      "/wd4456",
+      "/wd4457",
+      "/wd4458",
+      "/wd4459",
+
+      # All of our compilers support the extensions below.
+      "/wd4200",  # nonstandard extension used: zero-sized array in struct/union
+      "/wd4201",  # nonstandard extension used: nameless struct/union
+      "/wd4204",  # nonstandard extension used : non-constant aggregate
+                  # initializer
+
+      "/wd4221",  # nonstandard extension used : 'identifier' : cannot be
+                  # initialized using address of automatic variable
+
+      # http://crbug.com/588506 - Conversion suppressions waiting on Clang
+      # -Wconversion.
+      "/wd4245",  # 'conversion' : conversion from 'type1' to 'type2',
+                  # signed/unsigned mismatch
+
+      "/wd4267",  # 'var' : conversion from 'size_t' to 'type', possible loss of
+                  # data
+
+      "/wd4305",  # 'identifier' : truncation from 'type1' to 'type2'
+      "/wd4389",  # 'operator' : signed/unsigned mismatch
+
+      "/wd4702",  # unreachable code
+
+      # http://crbug.com/848979 - MSVC is more conservative than Clang with
+      # regards to variables initialized and consumed in different branches.
+      "/wd4701",  # Potentially uninitialized local variable 'name' used
+      "/wd4703",  # Potentially uninitialized local pointer variable 'name' used
+
+      # http://crbug.com/848979 - Remaining Clang permitted warnings.
+      "/wd4661",  # 'identifier' : no suitable definition provided for explicit
+                  # template instantiation request
+
+      "/wd4706",  # assignment within conditional expression
+                  # MSVC is stricter and requires a boolean expression.
+
+      "/wd4715",  # 'function' : not all control paths return a value'
+                  # MSVC does not analyze switch (enum) for completeness.
+    ]
+
+    cflags_cc += [
+      # Allow "noexcept" annotations even though we compile with exceptions
+      # disabled.
+      "/wd4577",
+    ]
+
+    if (current_cpu == "x86") {
+      cflags += [
+        # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to
+        # 4267. Example: short TruncTest(size_t x) { return x; }
+        # Since we disable 4244 we need to disable 4267 during migration.
+        # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+        "/wd4267",
+      ]
+    }
+  } else {
+    if (is_apple && !is_nacl) {
+      # When compiling Objective-C, warns if a method is used whose
+      # availability is newer than the deployment target.
+      cflags += [ "-Wunguarded-availability" ]
+    }
+
+    if (is_ios) {
+      # When compiling Objective-C, warns if a selector named via @selector has
+      # not been defined in any visible interface.
+      cflags += [ "-Wundeclared-selector" ]
+    }
+
+    # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+    # warning).
+    if (current_cpu == "arm" && !is_clang) {
+      cflags += [ "-Wno-psabi" ]
+    }
+
+    if (!is_clang) {
+      cflags_cc += [
+        # See comment for -Wno-c++11-narrowing.
+        "-Wno-narrowing",
+      ]
+
+      # -Wno-class-memaccess warns about hash table and vector in blink.
+      # But the violation is intentional.
+      if (!is_nacl) {
+        cflags_cc += [ "-Wno-class-memaccess" ]
+      }
+
+      # -Wunused-local-typedefs is broken in gcc,
+      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63872
+      cflags += [ "-Wno-unused-local-typedefs" ]
+
+      # Don't warn about "maybe" uninitialized. Clang doesn't include this
+      # in -Wall but gcc does, and it gives false positives.
+      cflags += [ "-Wno-maybe-uninitialized" ]
+      cflags += [ "-Wno-deprecated-declarations" ]
+
+      # -Wcomment gives too many false positives in the case a
+      # backslash ended comment line is followed by a new line of
+      # comments
+      # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61638
+      cflags += [ "-Wno-comments" ]
+
+      # -Wpacked-not-aligned complains all generated mojom-shared-internal.h
+      # files.
+      cflags += [ "-Wno-packed-not-aligned" ]
+    }
+  }
+
+  # Common Clang and GCC warning setup.
+  if (!is_win || is_clang) {
+    cflags += [
+      # Disables.
+      "-Wno-missing-field-initializers",  # "struct foo f = {0};"
+      "-Wno-unused-parameter",  # Unused function parameters.
+    ]
+  }
+
+  if (is_clang) {
+    cflags += [
+      # TODO(thakis): Consider -Wloop-analysis (turns on
+      # -Wrange-loop-analysis too).
+
+      # This warns on using ints as initializers for floats in
+      # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+      # which happens in several places in chrome code. Not sure if
+      # this is worth fixing.
+      "-Wno-c++11-narrowing",
+
+      # TODO(thakis): This used to be implied by -Wno-unused-function,
+      # which we no longer use. Check if it makes sense to remove
+      # this as well. http://crbug.com/316352
+      "-Wno-unneeded-internal-declaration",
+    ]
+
+    # use_xcode_clang only refers to the iOS toolchain, host binaries use
+    # chromium's clang always.
+    if (!is_nacl) {
+      cflags += [
+        # TODO(thakis): https://crbug.com/604888
+        "-Wno-undefined-var-template",
+      ]
+
+      if (is_win) {
+        # TODO(thakis): https://crbug.com/617318
+        # Currently goma can not handle case sensitiveness for windows well.
+        cflags += [ "-Wno-nonportable-include-path" ]
+      }
+
+      if (current_toolchain == host_toolchain || !use_xcode_clang) {
+        # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not
+        # recognize.
+        cflags += [
+          # Ignore warnings about MSVC optimization pragmas.
+          # TODO(thakis): Only for no_chromium_code? http://crbug.com/912662
+          "-Wno-ignored-pragma-optimize",
+        ]
+        if (!is_starboard) {
+          cflags += [
+            # An ABI compat warning we don't care about, https://crbug.com/1102157
+            # TODO(thakis): Push this to the (few) targets that need it,
+            # instead of having a global flag.
+            "-Wno-psabi",
+
+            # TODO(https://crbug.com/989932): Evaluate and possibly enable.
+            "-Wno-implicit-int-float-conversion",
+
+            # TODO(https://crbug.com/999886): Clean up, enable.
+            "-Wno-final-dtor-non-final-class",
+
+            # TODO(https://crbug.com/1016945) Clean up, enable.
+            "-Wno-builtin-assume-aligned-alignment",
+
+            # TODO(https://crbug.com/1028110): Evaluate and possible enable.
+            "-Wno-deprecated-copy",
+
+            # TODO(https://crbug.com/1050281): Clean up, enable.
+            "-Wno-non-c-typedef-for-linkage",
+          ]
+        }
+
+        cflags_c += [
+          # TODO(https://crbug.com/995993): Clean up and enable.
+          "-Wno-implicit-fallthrough",
+        ]
+
+        if (!is_starboard) {
+          if (enable_wmax_tokens) {
+            cflags += [ "-Wmax-tokens" ]
+          } else {
+            # TODO(https://crbug.com/1049569): Remove after Clang 87b235db.
+            cflags += [ "-Wno-max-tokens" ]
+          }
+        }
+      }
+    }
+  }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+  if (is_win) {
+    cflags = [ "/W4" ]  # Warning level 4.
+
+    if (is_clang) {
+      # Opt in to additional [[nodiscard]] on standard library methods.
+      defines = [ "_HAS_NODISCARD" ]
+    }
+  } else {
+    cflags = [ "-Wall" ]
+    if (treat_warnings_as_errors) {
+      cflags += [ "-Werror" ]
+
+      # The compiler driver can sometimes (rarely) emit warnings before calling
+      # the actual linker.  Make sure these warnings are treated as errors as
+      # well.
+      ldflags = [ "-Werror" ]
+    }
+    if (is_clang) {
+      # Enable extra warnings for chromium_code when we control the compiler.
+      cflags += [ "-Wextra" ]
+    }
+
+    # In Chromium code, we define __STDC_foo_MACROS in order to get the
+    # C99 macros on Mac and Linux.
+    defines = [
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+    ]
+
+    if (!is_debug && !using_sanitizer && current_cpu != "s390x" &&
+        current_cpu != "s390" && current_cpu != "ppc64" &&
+        current_cpu != "mips" && current_cpu != "mips64") {
+      # Non-chromium code is not guaranteed to compile cleanly with
+      # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+      # disabled, so only do that for Release build.
+      defines += [ "_FORTIFY_SOURCE=2" ]
+    }
+
+    if (is_mac) {
+      cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
+      cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
+    }
+
+    if (is_ios) {
+      cflags_objc = [ "-Wimplicit-retain-self" ]
+      cflags_objcc = cflags_objc
+    }
+  }
+
+  if (is_clang) {
+    cflags += [
+      # Warn on missing break statements at the end of switch cases.
+      # For intentional fallthrough, use FALLTHROUGH; from
+      # base/compiler_specific.h
+      "-Wimplicit-fallthrough",
+    ]
+
+    # TODO(thakis): Enable this more often, https://crbug.com/346399
+    # use_libfuzzer: https://crbug.com/1063180
+    if (!is_starboard && !is_nacl && !use_libfuzzer) {
+      cflags += [ "-Wunreachable-code" ]
+    }
+
+    # Thread safety analysis is broken under nacl: https://crbug.com/982423.
+    if (!is_nacl) {
+      cflags += [
+        # Thread safety analysis. See base/thread_annotations.h and
+        # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html
+        "-Wthread-safety",
+      ]
+    }
+
+    # TODO(thakis): Enable this for more platforms, https://crbug.com/926235
+    # ChromeOS: http://crbug.com/940863
+    # Chromecast: http://crbug.com/942554
+    has_dchecks = is_debug || dcheck_always_on
+    if (!has_dchecks && is_chromeos_ash && is_chrome_branded) {
+      # Temporarily disable -Wextra-semi for Chrome on Chrome OS.
+    } else if (is_chromecast && chromecast_branding != "public") {
+      # Temporarily disable -Wextra-semi for Chromecast.
+    } else {
+      cflags += [ "-Wextra-semi" ]
+    }
+  }
+
+  configs = [ ":default_warnings" ]
+}
+
+config("no_chromium_code") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+
+  if (is_win) {
+    cflags += [
+      "/W3",  # Warning level 3.
+      "/wd4800",  # Disable warning when forcing value to bool.
+      "/wd4267",  # TODO(jschuh): size_t to int.
+      "/wd4996",  # Deprecated function warning.
+    ]
+    defines += [
+      "_CRT_NONSTDC_NO_WARNINGS",
+      "_CRT_NONSTDC_NO_DEPRECATE",
+    ]
+  } else {
+    # GCC may emit unsuppressible warnings so don't add -Werror for no chromium
+    # code. crbug.com/589724
+    if (treat_warnings_as_errors && is_clang) {
+      cflags += [ "-Werror" ]
+      ldflags = [ "-Werror" ]
+    }
+    if (is_clang && !is_nacl) {
+      # TODO(thakis): Remove !is_nacl once
+      # https://codereview.webrtc.org/1552863002/ made its way into chromium.
+      cflags += [ "-Wall" ]
+    }
+  }
+
+  if (is_clang) {
+    cflags += [
+      # Lots of third-party libraries have unused variables. Instead of
+      # suppressing them individually, we just blanket suppress them here.
+      "-Wno-unused-variable",
+    ]
+    if (!is_starboard && !is_nacl &&
+        (current_toolchain == host_toolchain || !use_xcode_clang)) {
+      cflags += [
+        # TODO(https://crbug.com/1202159): Clean up and enable.
+        "-Wno-misleading-indentation",
+      ]
+    }
+  }
+
+  configs = [ ":default_warnings" ]
+}
+
+# noshadowing -----------------------------------------------------------------
+#
+# Allows turning -Wshadow on.
+
+config("noshadowing") {
+  # This flag has to be disabled for nacl because the nacl compiler is too
+  # strict about shadowing.
+  if (is_clang && !is_nacl) {
+    cflags = [ "-Wshadow" ]
+  }
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR" ]
+  } else {
+    cflags_cc = [ "-frtti" ]
+  }
+}
+
+config("no_rtti") {
+  # Some sanitizer configs may require RTTI to be left enabled globally
+  if (!use_rtti) {
+    if (is_win) {
+      cflags_cc = [ "/GR-" ]
+    } else {
+      cflags_cc = [ "-fno-rtti" ]
+      cflags_objcc = cflags_cc
+    }
+  }
+}
+
+# export_dynamic ---------------------------------------------------------------
+#
+# Ensures all exported symbols are added to the dynamic symbol table.  This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries.  Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+  # TODO(crbug.com/1052397): Revisit after target_os flip is completed.
+  if (is_linux || is_chromeos_lacros || export_libcxxabi_from_executables) {
+    ldflags = [ "-rdynamic" ]
+  }
+}
+
+# thin_archive -----------------------------------------------------------------
+#
+# Enables thin archives on posix, and on windows when the lld linker is used.
+# Regular archives directly include the object files used to generate it.
+# Thin archives merely reference the object files.
+# This makes building them faster since it requires less disk IO, but is
+# inappropriate if you wish to redistribute your static library.
+# This config is added to the global config, so thin archives should already be
+# enabled.  If you want to make a distributable static library, you need to do 2
+# things:
+# 1. Set complete_static_lib so that all dependencies of the library make it
+#    into the library. See `gn help complete_static_lib` for details.
+# 2. Remove the thin_archive config, so that the .a file actually contains all
+#    .o files, instead of just references to .o files in the build directoy
+config("thin_archive") {
+  # The macOS and iOS default linker ld64 does not support reading thin
+  # archives.
+  if ((is_posix && !is_nacl && (!is_apple || use_lld)) || is_fuchsia) {
+    arflags = [ "-T" ]
+  } else if (is_win && use_lld) {
+    arflags = [ "/llvmlibthin" ]
+  }
+}
+
+# exceptions -------------------------------------------------------------------
+#
+# Allows turning Exceptions on or off.
+# Note: exceptions are disallowed in Google code.
+
+config("exceptions") {
+  if (is_win) {
+    # Enables exceptions in the STL.
+    if (!use_custom_libcxx) {
+      defines = [ "_HAS_EXCEPTIONS=1" ]
+    }
+    cflags_cc = [ "/EHsc" ]
+  } else {
+    cflags_cc = [ "-fexceptions" ]
+    cflags_objcc = cflags_cc
+  }
+}
+
+config("no_exceptions") {
+  if (is_win) {
+    # Disables exceptions in the STL.
+    # libc++ uses the __has_feature macro to control whether to use exceptions,
+    # so defining this macro is unnecessary. Defining _HAS_EXCEPTIONS to 0 also
+    # breaks libc++ because it depends on MSVC headers that only provide certain
+    # declarations if _HAS_EXCEPTIONS is 1. Those MSVC headers do not use
+    # exceptions, despite being conditional on _HAS_EXCEPTIONS.
+    if (!use_custom_libcxx) {
+      defines = [ "_HAS_EXCEPTIONS=0" ]
+    }
+  } else {
+    cflags_cc = [ "-fno-exceptions" ]
+    cflags_objcc = cflags_cc
+  }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# Generate a warning for code that might emit a static initializer.
+# See: //docs/static_initializers.md
+# See: https://groups.google.com/a/chromium.org/d/topic/chromium-dev/B9Q5KTD7iCo/discussion
+config("wglobal_constructors") {
+  if (is_clang) {
+    cflags = [ "-Wglobal-constructors" ]
+  }
+}
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blocklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+  if (is_clang) {
+    cflags = [ "-Wexit-time-destructors" ]
+  }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# This config is just an alias to no_shorten_64_warnings and will
+# suppress a superset of warning 4267 and any 64-bit -> 32-bit implicit
+# conversions. Having both for a time means not having to go through and
+# update all references to no_size_t_to_int_warning throughout the codebase
+# atomically.
+#
+# Any new warning suppressions should use the no_shorten_64_warnings
+# config below and not this.
+#
+# TODO(jschuh): crbug.com/167187 fix this and delete this config.
+config("no_size_t_to_int_warning") {
+  configs = [ ":no_shorten_64_warnings" ]
+}
+
+# As part of re-enabling -Wconversion (see issue 588506) some code
+# will continue to generate warnings.
+# The first warning to be enabled will be -Wshorten-64-to-32.
+#
+# Code that currently generates warnings for this can include this
+# config to disable them.
+config("no_shorten_64_warnings") {
+  if (current_cpu == "x64" || current_cpu == "arm64") {
+    if (is_clang) {
+      cflags = [ "-Wno-shorten-64-to-32" ]
+    } else {
+      if (is_win) {
+        # MSVC does not have an explicit warning equivalent to
+        # -Wshorten-64-to-32 but 4267 warns for size_t -> int
+        # on 64-bit builds, so is the closest.
+        cflags = [ "/wd4267" ]
+      }
+    }
+  }
+}
+
+# Some code presumes that pointers to structures/objects are compatible
+# regardless of whether what they point to is already known to be valid.
+# gcc 4.9 and earlier had no way of suppressing this warning without
+# suppressing the rest of them.  Here we centralize the identification of
+# the gcc 4.9 toolchains.
+config("no_incompatible_pointer_warnings") {
+  cflags = []
+  if (is_clang) {
+    cflags += [ "-Wno-incompatible-pointer-types" ]
+  } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+    cflags += [ "-w" ]
+  } else if (is_chromeos_ash && current_cpu == "arm") {
+    cflags += [ "-w" ]
+  }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# The BUILDCONFIG file sets the "default_optimization" config on targets by
+# default. It will be equivalent to either "optimize" (release) or
+# "no_optimize" (debug) optimization configs.
+#
+# You can override the optimization level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+#   configs -= [ "//build/config/compiler:default_optimization" ]
+#   configs += [ "//build/config/compiler:optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+  common_optimize_on_cflags = [
+    "/Ob2",  # Both explicit and auto inlining.
+    "/Oy-",  # Disable omitting frame pointers, must be after /O2.
+    "/Zc:inline",  # Remove unreferenced COMDAT (faster links).
+  ]
+  if (!is_asan) {
+    common_optimize_on_cflags += [
+      # Put data in separate COMDATs. This allows the linker
+      # to put bit-identical constants at the same address even if
+      # they're unrelated constants, which saves binary size.
+      # This optimization can't be used when ASan is enabled because
+      # it is not compatible with the ASan ODR checker.
+      "/Gw",
+    ]
+  }
+  common_optimize_on_ldflags = []
+
+  # /OPT:ICF is not desirable in Debug builds, since code-folding can result in
+  # misleading symbols in stack traces.
+  if (!is_debug && !is_component_build) {
+    common_optimize_on_ldflags += [ "/OPT:ICF" ]  # Redundant COMDAT folding.
+  }
+
+  if (is_official_build) {
+    common_optimize_on_ldflags += [ "/OPT:REF" ]  # Remove unreferenced data.
+    # TODO(thakis): Add LTO/PGO clang flags eventually, https://crbug.com/598772
+  }
+} else {
+  common_optimize_on_cflags = []
+  common_optimize_on_ldflags = []
+
+  if (is_android) {
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      common_optimize_on_ldflags += [
+        # Warn in case of text relocations.
+        "-Wl,--warn-shared-textrel",
+      ]
+    }
+  }
+
+  if (is_apple) {
+    common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+
+    if (is_official_build) {
+      common_optimize_on_ldflags += [
+        "-Wl,-no_data_in_code_info",
+        "-Wl,-no_function_starts",
+      ]
+    }
+  } else if (current_os != "aix") {
+    # Non-Mac Posix flags.
+    # Aix does not support these.
+
+    common_optimize_on_cflags += [
+      # Put data and code in their own sections, so that unused symbols
+      # can be removed at link time with --gc-sections.
+      "-fdata-sections",
+      "-ffunction-sections",
+    ]
+
+    common_optimize_on_ldflags += [
+      # Specifically tell the linker to perform optimizations.
+      # See http://lwn.net/Articles/192624/ .
+      # -O2 enables string tail merge optimization in gold and lld.
+      "-Wl,-O2",
+      "-Wl,--gc-sections",
+    ]
+  }
+}
+
+config("default_stack_frames") {
+  if (is_posix || is_fuchsia) {
+    if (enable_frame_pointers) {
+      cflags = [ "-fno-omit-frame-pointer" ]
+
+      # Omit frame pointers for leaf functions on x86, otherwise building libyuv
+      # gives clang's register allocator issues, see llvm.org/PR15798 /
+      # crbug.com/233709
+      if (is_clang && current_cpu == "x86" && !is_apple) {
+        cflags += [ "-momit-leaf-frame-pointer" ]
+      }
+    } else {
+      cflags = [ "-fomit-frame-pointer" ]
+    }
+  }
+  # On Windows, the flag to enable framepointers "/Oy-" must always come after
+  # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
+  # the "optimize" configs, see rest of this file. The ordering that cflags are
+  # applied is well-defined by the GN spec, and there is no way to ensure that
+  # cflags set by "default_stack_frames" is applied after those set by an
+  # "optimize" config. Similarly, there is no way to propagate state from this
+  # config into the "optimize" config. We always apply the "/Oy-" config in the
+  # definition for common_optimize_on_cflags definition, even though this may
+  # not be correct.
+}
+
+# Default "optimization on" config.
+config("optimize") {
+  if (is_win) {
+    if (chrome_pgo_phase != 2) {
+      # Favor size over speed, /O1 must be before the common flags.
+      # /O1 implies /Os and /GF.
+      cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+    } else {
+      # PGO requires all translation units to be compiled with /O2. The actual
+      # optimization level will be decided based on the profiling data.
+      cflags = [ "/O2" ] + common_optimize_on_cflags + [ "/Oi" ]
+    }
+  } else if (optimize_for_size && !is_nacl) {
+    # Favor size over speed.
+    # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
+    # guard above.
+    if (is_clang) {
+      cflags = [ "-Oz" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-Os" ] + common_optimize_on_cflags
+    }
+  } else if (is_chromeos_ash) {
+    # TODO(gbiv): This is partially favoring size over speed. CrOS exclusively
+    # uses clang, and -Os in clang is more of a size-conscious -O2 than "size at
+    # any cost" (AKA -Oz). It'd be nice to:
+    # - Make `optimize_for_size` apply to all platforms where we're optimizing
+    #   for size by default (so, also Windows)
+    # - Investigate -Oz here, maybe just for ARM?
+    cflags = [ "-Os" ] + common_optimize_on_cflags
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+  ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+  if (is_win) {
+    cflags = [
+      "/Od",  # Disable optimization.
+      "/Ob0",  # Disable all inlining (on by default).
+      "/GF",  # Enable string pooling (off by default).
+    ]
+
+    if (target_cpu == "arm64") {
+      # Disable omitting frame pointers for no_optimize build because stack
+      # traces on Windows ARM64 rely on it.
+      cflags += [ "/Oy-" ]
+    }
+  } else if (is_android && !android_full_debug) {
+    # On Android we kind of optimize some things that don't affect debugging
+    # much even when optimization is disabled to get the binary size down.
+    if (is_clang) {
+      cflags = [ "-Oz" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-Os" ] + common_optimize_on_cflags
+    }
+
+    if (!is_component_build) {
+      # Required for library partitions. Without this all symbols just end up
+      # in the base partition.
+      ldflags = [ "-Wl,--gc-sections" ]
+    }
+  } else if (is_fuchsia) {
+    # On Fuchsia, we optimize for size here to reduce the size of debug build
+    # packages so they can be run in a KVM. See crbug.com/910243 for details.
+    cflags = [ "-Og" ]
+  } else {
+    cflags = [ "-O0" ]
+    ldflags = []
+  }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # Various components do:
+    #   if (!is_debug) {
+    #     configs -= [ "//build/config/compiler:default_optimization" ]
+    #     configs += [ "//build/config/compiler:optimize_max" ]
+    #   }
+    # So this config has to have the selection logic just like
+    # "default_optimization", below.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else {
+    ldflags = common_optimize_on_ldflags
+    if (is_win) {
+      # Favor speed over size, /O2 must be before the common flags.
+      # /O2 implies /Ot, /Oi, and /GF.
+      cflags = [ "/O2" ] + common_optimize_on_cflags
+    } else if (optimize_for_fuzzing) {
+      cflags = [ "-O1" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-O2" ] + common_optimize_on_cflags
+    }
+  }
+}
+
+# This config can be used to override the default settings for per-component
+# and whole-program optimization, optimizing the particular target for speed
+# instead of code size. This config is exactly the same as "optimize_max"
+# except that we use -O3 instead of -O2 on non-win, non-IRT platforms.
+#
+# TODO(crbug.com/621335) - rework how all of these configs are related
+# so that we don't need this disclaimer.
+config("optimize_speed") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # Various components do:
+    #   if (!is_debug) {
+    #     configs -= [ "//build/config/compiler:default_optimization" ]
+    #     configs += [ "//build/config/compiler:optimize_max" ]
+    #   }
+    # So this config has to have the selection logic just like
+    # "default_optimization", below.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else {
+    ldflags = common_optimize_on_ldflags
+    if (is_win) {
+      # Favor speed over size, /O2 must be before the common flags.
+      # /O2 implies /Ot, /Oi, and /GF.
+      cflags = [ "/O2" ] + common_optimize_on_cflags
+    } else if (optimize_for_fuzzing) {
+      cflags = [ "-O1" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-O3" ] + common_optimize_on_cflags
+    }
+  }
+}
+
+config("optimize_fuzzing") {
+  cflags = [ "-O1" ] + common_optimize_on_cflags
+  ldflags = common_optimize_on_ldflags
+  visibility = [ ":default_optimization" ]
+}
+
+# The default optimization applied to all targets. This will be equivalent to
+# either "optimize" or "no_optimize", depending on the build flags.
+config("default_optimization") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # It gets optimized the same way regardless of the type of build.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else if (is_debug) {
+    configs = [ ":no_optimize" ]
+  } else if (optimize_for_fuzzing) {
+    assert(!is_win, "Fuzzing optimize level not supported on Windows")
+
+    # Coverage build is quite slow. Using "optimize_for_fuzzing" makes it even
+    # slower as it uses "-O1" instead of "-O3". Prevent that from happening.
+    assert(!use_clang_coverage,
+           "optimize_for_fuzzing=true should not be used with " +
+               "use_clang_coverage=true.")
+    configs = [ ":optimize_fuzzing" ]
+  } else {
+    configs = [ ":optimize" ]
+  }
+}
+
+_clang_sample_profile = ""
+if (is_clang && is_a_target_toolchain) {
+  if (clang_sample_profile_path != "") {
+    _clang_sample_profile = clang_sample_profile_path
+  } else if (clang_use_default_sample_profile) {
+    assert(build_with_chromium,
+           "Our default profiles currently only apply to Chromium")
+    assert(is_android || is_chromeos_lacros || is_chromeos_ash || is_chromecast,
+           "The current platform has no default profile")
+    if (is_android || is_chromecast) {
+      _clang_sample_profile = "//chrome/android/profiles/afdo.prof"
+    } else {
+      assert(chromeos_afdo_platform == "atom" ||
+                 chromeos_afdo_platform == "bigcore",
+             "Only atom and bigcore are valid Chrome OS profiles.")
+      _clang_sample_profile =
+          "//chromeos/profiles/${chromeos_afdo_platform}.afdo.prof"
+    }
+  }
+}
+
+# Clang offers a way to assert that AFDO profiles are accurate, which causes it
+# to optimize functions not represented in a profile more aggressively for size.
+# This config can be toggled in cases where shaving off binary size hurts
+# performance too much.
+config("afdo_optimize_size") {
+  if (_clang_sample_profile != "" && sample_profile_is_accurate) {
+    cflags = [ "-fprofile-sample-accurate" ]
+  }
+}
+
+# GCC and clang support a form of profile-guided optimization called AFDO.
+# There are some targeted places that AFDO regresses (and an icky interaction
+# between //base/allocator:tcmalloc and AFDO on GCC), so we provide a separate
+# config to allow AFDO to be disabled per-target.
+config("afdo") {
+  if (is_clang) {
+    cflags = []
+    if (clang_emit_debug_info_for_profiling) {
+      # Add the following flags to generate debug info for profiling.
+      cflags += [ "-gline-tables-only" ]
+      if (!is_nacl) {
+        cflags += [ "-fdebug-info-for-profiling" ]
+      }
+    }
+    if (_clang_sample_profile != "") {
+      assert(chrome_pgo_phase == 0, "AFDO can't be used in PGO builds")
+      rebased_clang_sample_profile =
+          rebase_path(_clang_sample_profile, root_build_dir)
+      cflags += [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ]
+      inputs = [ _clang_sample_profile ]
+    }
+  } else if (auto_profile_path != "" && is_a_target_toolchain) {
+    cflags = [ "-fauto-profile=${auto_profile_path}" ]
+    inputs = [ auto_profile_path ]
+  }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+# The BUILDCONFIG file sets the "default_symbols" config on targets by
+# default. It will be equivalent to one the three specific symbol levels.
+#
+# You can override the symbol level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+#   configs -= [ "//build/config/compiler:default_symbols" ]
+#   configs += [ "//build/config/compiler:symbols" ]
+
+# A helper config that all configs passing /DEBUG to the linker should
+# include as sub-config.
+config("win_pdbaltpath") {
+  visibility = [
+    ":minimal_symbols",
+    ":symbols",
+  ]
+
+  # /DEBUG causes the linker to generate a pdb file, and to write the absolute
+  # path to it in the executable file it generates.  This flag turns that
+  # absolute path into just the basename of the pdb file, which helps with
+  # build reproducibility. Debuggers look for pdb files next to executables,
+  # so there's minimal downside to always using this. However, post-mortem
+  # debugging of Chromium crash dumps and ETW tracing can be complicated by this
+  # switch so an option to omit it is important.
+  if (!use_full_pdb_paths) {
+    ldflags = [ "/pdbaltpath:%_PDB%" ]
+  }
+}
+
+# Full symbols.
+config("symbols") {
+  if (is_win) {
+    if (is_clang) {
+      cflags = [ "/Z7" ]  # Debug information in the .obj files.
+    } else {
+      cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
+    }
+
+    if (is_clang && use_lld && use_ghash) {
+      cflags += [ "-gcodeview-ghash" ]
+      ldflags = [ "/DEBUG:GHASH" ]
+    } else {
+      ldflags = [ "/DEBUG" ]
+    }
+
+    # All configs using /DEBUG should include this:
+    configs = [ ":win_pdbaltpath" ]
+
+    # TODO(crbug.com/1138553): Re-enable constructor homing on windows after
+    # libc++ fix is in.
+  } else {
+    cflags = []
+    if (is_mac && enable_dsyms) {
+      # If generating dSYMs, specify -fno-standalone-debug. This was
+      # originally specified for https://crbug.com/479841 because dsymutil
+      # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to
+      # version 7 also produces debug data that is incompatible with Breakpad
+      # dump_syms, so this is still required (https://crbug.com/622406).
+      cflags += [ "-fno-standalone-debug" ]
+    } else if (is_mac && !use_dwarf5) {
+      # clang defaults to DWARF2 on macOS unless mac_deployment_target is
+      # at least 10.11.
+      # TODO(thakis): Remove this once mac_deployment_target is 10.11.
+      cflags += [ "-gdwarf-4" ]
+    }
+
+    if (use_dwarf5 && !is_nacl) {
+      cflags += [ "-gdwarf-5" ]
+    }
+
+    # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+    # elsewhere in this file), so they can't have build-dir-independent output.
+    # Disable symbols for nacl object files to get deterministic,
+    # build-directory-independent output. pnacl and nacl-clang do support that
+    # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+    # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+    if (!is_nacl || is_clang) {
+      cflags += [ "-g2" ]
+    }
+
+    # TODO(https://crbug.com/1050118): Investigate missing debug info on mac.
+    if (is_clang && !is_nacl && !use_xcode_clang && !is_apple) {
+      cflags += [
+        "-Xclang",
+        "-debug-info-kind=constructor",
+      ]
+    }
+
+    if (is_apple) {
+      swiftflags = [ "-g" ]
+    }
+
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+    asmflags = cflags
+    ldflags = []
+
+    # Split debug info with all thinlto builds except nacl and apple.
+    # thinlto requires -gsplit-dwarf in ldflags.
+    if (use_debug_fission && use_thin_lto && !is_nacl && !is_apple) {
+      ldflags += [ "-gsplit-dwarf" ]
+    }
+
+    # TODO(thakis): Figure out if there's a way to make this go for 32-bit,
+    # currently we get "warning:
+    # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o:
+    # DWARF info may be corrupt; offsets in a range list entry are in different
+    # sections" there.  Maybe just a bug in nacl_switch_32.S.
+    if (!is_apple && !is_nacl && current_cpu != "x86" &&
+        (use_gold || use_lld)) {
+      if (is_clang) {
+        # This flag enables the GNU-format pubnames and pubtypes sections,
+        # which lld needs in order to generate a correct GDB index.
+        # TODO(pcc): Try to make lld understand non-GNU-format pubnames
+        # sections (llvm.org/PR34820).
+        cflags += [ "-ggnu-pubnames" ]
+      }
+      ldflags += [ "-Wl,--gdb-index" ]
+    }
+  }
+}
+
+# Minimal symbols.
+# This config guarantees to hold symbol for stack trace which are shown to user
+# when crash happens in unittests running on buildbot.
+config("minimal_symbols") {
+  if (is_win) {
+    # Functions, files, and line tables only.
+    cflags = []
+    ldflags = [ "/DEBUG" ]
+
+    # All configs using /DEBUG should include this:
+    configs = [ ":win_pdbaltpath" ]
+
+    # Enable line tables for clang. MSVC doesn't have an equivalent option.
+    if (is_clang) {
+      # -gline-tables-only is the same as -g1, but clang-cl only exposes the
+      # former.
+      cflags += [ "-gline-tables-only" ]
+    }
+  } else {
+    cflags = []
+    if (is_mac && !use_dwarf5) {
+      # clang defaults to DWARF2 on macOS unless mac_deployment_target is
+      # at least 10.11.
+      # TODO(thakis): Remove this once mac_deployment_target is 10.11.
+      cflags += [ "-gdwarf-4" ]
+    }
+
+    if (use_dwarf5 && !is_nacl) {
+      cflags += [ "-gdwarf-5" ]
+    }
+
+    # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+    # elsewhere in this file), so they can't have build-dir-independent output.
+    # Disable symbols for nacl object files to get deterministic,
+    # build-directory-independent output. pnacl and nacl-clang do support that
+    # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+    # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+    if (!is_nacl || is_clang) {
+      cflags += [ "-g1" ]
+    }
+    ldflags = []
+    if (is_android && is_clang) {
+      # Android defaults to symbol_level=1 builds in production builds
+      # (https://crbug.com/648948), but clang, unlike gcc, doesn't emit
+      # DW_AT_linkage_name in -g1 builds. -fdebug-info-for-profiling enables
+      # that (and a bunch of other things we don't need), so that we get
+      # qualified names in stacks.
+      # TODO(thakis): Consider making clang emit DW_AT_linkage_name in -g1 mode;
+      #               failing that consider doing this on non-Android too.
+      cflags += [ "-fdebug-info-for-profiling" ]
+    }
+
+    # Note: debug_fission is no-op with symbol_level=1 since all -g1 debug_info
+    # will stay in the executable.
+
+    asmflags = cflags
+  }
+}
+
+# This configuration contains function names only. That is, the compiler is
+# told to not generate debug information and the linker then just puts function
+# names in the final debug information.
+config("no_symbols") {
+  if (is_win) {
+    ldflags = [ "/DEBUG" ]
+
+    # All configs using /DEBUG should include this:
+    configs = [ ":win_pdbaltpath" ]
+  } else {
+    cflags = [ "-g0" ]
+    asmflags = cflags
+  }
+}
+
+# Default symbols.
+config("default_symbols") {
+  if (symbol_level == 0) {
+    configs = [ ":no_symbols" ]
+  } else if (symbol_level == 1) {
+    configs = [ ":minimal_symbols" ]
+  } else if (symbol_level == 2) {
+    configs = [ ":symbols" ]
+  } else {
+    assert(false)
+  }
+
+  # This config is removed by base unittests apk.
+  if (is_android && is_clang && strip_debug_info) {
+    configs += [ ":strip_debug" ]
+  }
+}
+
+config("strip_debug") {
+  if (!defined(ldflags)) {
+    ldflags = []
+  }
+  ldflags += [ "-Wl,--strip-debug" ]
+}
+
+if (is_apple) {
+  # On Mac and iOS, this enables support for ARC (automatic ref-counting).
+  # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html.
+  config("enable_arc") {
+    common_flags = [ "-fobjc-arc" ]
+    cflags_objc = common_flags
+    cflags_objcc = common_flags
+  }
+}
+
+if (is_chromeos_ash && is_chromeos_device) {
+  # This config is intended to be a temporary to facilitate
+  # the transition to use orderfile in Chrome OS. Once orderfile
+  # use becomes a default in Chrome OS, this config should not
+  # be needed.
+  config("use_orderfile_for_hugepage") {
+    if (chrome_orderfile_path != "") {
+      defines = [ "CHROMEOS_ORDERFILE_USE" ]
+    }
+  }
+}
+
+if (is_android || (is_chromeos_ash && is_chromeos_device)) {
+  # Use orderfile for linking Chrome on Android and Chrome OS.
+  # This config enables using an orderfile for linking in LLD.
+  # TODO: Consider using call graph sort instead, at least on Android.
+  config("chrome_orderfile_config") {
+    if (chrome_orderfile_path != "" && !enable_call_graph_profile_sort) {
+      assert(use_lld)
+      _rebased_orderfile = rebase_path(chrome_orderfile_path, root_build_dir)
+      ldflags = [
+        "-Wl,--symbol-ordering-file",
+        "-Wl,$_rebased_orderfile",
+        "-Wl,--no-warn-symbol-ordering",
+      ]
+      inputs = [ chrome_orderfile_path ]
+    }
+  }
+}
+
+# Initialize all variables on the stack if needed.
+config("default_init_stack_vars") {
+  cflags = []
+  if (init_stack_vars && is_clang && !is_nacl && !using_sanitizer) {
+    cflags += [ "-ftrivial-auto-var-init=pattern" ]
+  }
+}
+
+buildflag_header("compiler_buildflags") {
+  header = "compiler_buildflags.h"
+
+  flags = [ "CLANG_PGO=$chrome_pgo_phase" ]
+}
+
+config("cet_shadow_stack") {
+  if (enable_cet_shadow_stack && is_win) {
+    assert(target_cpu == "x64")
+    ldflags = [ "/CETCOMPAT" ]
+  }
+}
diff --git a/src/build/config/compiler/compiler.gni b/src/build/config/compiler/compiler.gni
new file mode 100644
index 0000000..c848dd7
--- /dev/null
+++ b/src/build/config/compiler/compiler.gni
@@ -0,0 +1,310 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (is_android) {
+  import("//build/config/android/abi.gni")
+}
+if (current_cpu == "arm" || current_cpu == "arm64") {
+  import("//build/config/arm.gni")
+}
+
+if (is_apple) {
+  import("//build/config/apple/symbols.gni")
+}
+
+declare_args() {
+  # Default to warnings as errors for default workflow, where we catch
+  # warnings with known toolchains. Allow overriding this e.g. for Chromium
+  # builds on Linux that could use a different version of the compiler.
+  # With GCC, warnings in no-Chromium code are always not treated as errors.
+  treat_warnings_as_errors = true
+
+  # How many symbols to include in the build. This affects the performance of
+  # the build since the symbols are large and dealing with them is slow.
+  #   2 means regular build with symbols.
+  #   1 means minimal symbols, usually enough for backtraces only. Symbols with
+  # internal linkage (static functions or those in anonymous namespaces) may not
+  # appear when using this level.
+  #   0 means no symbols.
+  #   -1 means auto-set according to debug/release and platform.
+  symbol_level = -1
+
+  # Android-only: Strip the debug info of libraries within lib.unstripped to
+  # reduce size. As long as symbol_level > 0, this will still allow stacks to be
+  # symbolized.
+  strip_debug_info = false
+
+  # Compile in such a way as to enable profiling of the generated code. For
+  # example, don't omit the frame pointer and leave in symbols.
+  enable_profiling = false
+
+  # use_debug_fission: whether to use split DWARF debug info
+  # files. This can reduce link time significantly, but is incompatible
+  # with some utilities such as icecc and ccache. Requires gold and
+  # gcc >= 4.8 or clang.
+  # http://gcc.gnu.org/wiki/DebugFission
+  #
+  # This is a placeholder value indicating that the code below should set
+  # the default.  This is necessary to delay the evaluation of the default
+  # value expression until after its input values such as use_gold have
+  # been set, e.g. by a toolchain_args() block.
+  use_debug_fission = "default"
+
+  # Enables support for ThinLTO, which links 3x-10x faster than full LTO. See
+  # also http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
+  # Use it by default on official-optimized android and Chrome OS builds, but
+  # not ARC or linux-chromeos since it's been seen to not play nicely with
+  # Chrome's clang. crbug.com/1033839
+  use_thin_lto =
+      is_cfi ||
+      (is_official_build && chrome_pgo_phase != 1 &&
+       (is_linux || is_win || (is_android && target_os != "chromeos") ||
+        ((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device)))
+
+  # If true, use Goma for ThinLTO code generation where applicable.
+  use_goma_thin_lto = false
+
+  # Whether we're using a sample profile collected on an architecture different
+  # than the one we're compiling for.
+  #
+  # It's currently not possible to collect AFDO profiles on anything but
+  # x86{,_64}.
+  using_mismatched_sample_profile = current_cpu != "x64" && current_cpu != "x86"
+
+  # Whether an error should be raised on attempts to make debug builds with
+  # is_component_build=false. Very large debug symbols can have unwanted side
+  # effects so this is enforced by default for chromium.
+  forbid_non_component_debug_builds = build_with_chromium
+
+  # Exclude unwind tables by default for official builds as unwinding can be
+  # done from stack dumps produced by Crashpad at a later time "offline" in the
+  # crash server. Since this increases binary size, we don't recommend including
+  # them in shipping builds.
+  # For unofficial (e.g. development) builds and non-Chrome branded (e.g. Cronet
+  # which doesn't use Crashpad, crbug.com/479283) builds it's useful to be able
+  # to unwind at runtime.
+  exclude_unwind_tables = is_official_build
+
+  # Where to redirect clang crash diagnoses
+  clang_diagnostic_dir =
+      rebase_path("//tools/clang/crashreports", root_build_dir)
+
+  # Mark binaries as compatible with Shadow Stack of Control-flow Enforcement
+  # Technology (CET). If Windows version and hardware supports the feature and
+  # it's enabled by OS then additional validation of return address will be
+  # performed as mitigation against Return-oriented programming (ROP).
+  # https://chromium.googlesource.com/chromium/src/+/master/docs/design/sandbox.md#cet-shadow-stack
+  enable_cet_shadow_stack = target_cpu == "x64"
+}
+
+assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO")
+
+# If true, optimize for size. Does not affect windows builds.
+# Linux & Mac favor speed over size.
+# TODO(brettw) it's weird that Mac and desktop Linux are different. We should
+# explore favoring size over speed in this case as well.
+optimize_for_size = is_android || is_chromecast || is_fuchsia || is_ios
+
+declare_args() {
+  # Whether we should consider the profile we're using to be accurate. Accurate
+  # profiles have the benefit of (potentially substantial) binary size
+  # reductions, by instructing the compiler to optimize cold and uncovered
+  # functions heavily for size. This often comes at the cost of performance.
+  sample_profile_is_accurate = optimize_for_size
+}
+
+# Determine whether to enable or disable frame pointers, based on the platform
+# and build arguments.
+# TODO(crbug.com/1052397): Consider changing is_chromeos_ash to is_chromeos after
+# lacros-chrome switches to target_os="chromeos".
+if (is_chromeos_ash || is_chromeos_lacros) {
+  # ChromeOS generally prefers frame pointers, to support CWP.
+  # However, Clang does not currently generate usable frame pointers in ARM
+  # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them
+  # there to avoid the unnecessary overhead.
+  enable_frame_pointers = current_cpu != "arm"
+} else if (is_apple || is_linux || is_chromeos) {
+  enable_frame_pointers = true
+} else if (is_win) {
+  # 64-bit Windows ABI doesn't support frame pointers.
+  if (current_cpu == "x64") {
+    enable_frame_pointers = false
+  } else {
+    enable_frame_pointers = true
+  }
+} else if (is_android) {
+  enable_frame_pointers =
+      enable_profiling ||
+      # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
+      current_cpu == "arm64" ||
+      # For x86 Android, unwind tables are huge without frame pointers
+      # (crbug.com/762629). Enabling frame pointers grows the code size slightly
+      # but overall shrinks binaries considerably by avoiding huge unwind
+      # tables.
+      (current_cpu == "x86" && !exclude_unwind_tables && optimize_for_size) ||
+      using_sanitizer ||
+      # For caller-callee instrumentation version which needs frame pointers to
+      # get the caller address.
+      use_call_graph
+} else {
+  # Explicitly ask for frame pointers, otherwise:
+  # * Stacks may be missing for sanitizer and profiling builds.
+  # * Debug tcmalloc can crash (crbug.com/636489).
+  enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
+}
+
+# In general assume that if we have frame pointers then we can use them to
+# unwind the stack. However, this requires that they are enabled by default for
+# most translation units, that they are emitted correctly, and that the
+# compiler or platform provides a way to access them.
+can_unwind_with_frame_pointers = enable_frame_pointers
+if (current_cpu == "arm" && arm_use_thumb) {
+  # We cannot currently unwind ARM Thumb frame pointers correctly.
+  # See https://bugs.llvm.org/show_bug.cgi?id=18505
+  can_unwind_with_frame_pointers = false
+} else if (is_win) {
+  # Windows 32-bit does provide frame pointers, but the compiler does not
+  # provide intrinsics to access them, so we don't use them.
+  can_unwind_with_frame_pointers = false
+}
+
+assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
+
+# Unwinding with CFI table is only possible on static library builds and
+# requried only when frame pointers are not enabled.
+can_unwind_with_cfi_table = is_android && !is_component_build &&
+                            !enable_frame_pointers && current_cpu == "arm"
+
+# Whether or not cfi table should be enabled on arm.
+# TODO(crbug.com/1090409): Replace can_unwind_with_cfi_table with this once
+# sampling profiler is enabled on android.
+enable_arm_cfi_table = is_android && !is_component_build && current_cpu == "arm"
+
+declare_args() {
+  # Set to true to use lld, the LLVM linker.
+  # Not supported for macOS (see docs/mac_lld.md), and not functional at all for
+  # iOS. But used for mac cross-compile on linux (may not work properly).
+  # The default linker everywhere else.
+  use_lld = is_clang && (!is_apple || host_os == "linux")
+}
+
+declare_args() {
+  # Whether to use the gold linker from binutils instead of lld or bfd.
+  use_gold = !use_lld && !(is_chromecast && is_linux &&
+                           (current_cpu == "arm" || current_cpu == "mipsel")) &&
+             (((is_linux || is_chromeos_lacros) &&
+               (current_cpu == "x64" || current_cpu == "x86" ||
+                current_cpu == "arm" || current_cpu == "arm64" ||
+                current_cpu == "mipsel" || current_cpu == "mips64el")) ||
+              (is_android && (current_cpu == "x86" || current_cpu == "x64" ||
+                              current_cpu == "arm" || current_cpu == "arm64")))
+}
+
+# Use relative paths for debug info. This is important to make the build
+# results independent of the checkout and build directory names, which
+# in turn is important for goma compile hit rate.
+# Setting this to true may make it harder to debug binaries on Linux, see
+# https://chromium.googlesource.com/chromium/src/+/master/docs/linux/debugging.md#Source-level-debug-with-fdebug_compilation_dir
+# It's not clear if the crash server will correctly handle dSYMs with relative
+# paths, so we disable this feature for official benefit. The main benefit is
+# deterministic builds to reduce compile times, so this is less relevant for
+# official builders.
+strip_absolute_paths_from_debug_symbols_default =
+    # TODO(crbug.com/1010267): remove '!use_clang_coverage', coverage build has
+    # dependency to absolute path of source files.
+    !use_clang_coverage &&
+    (is_android || is_fuchsia || is_nacl || (is_win && use_lld) || is_linux ||
+     is_chromeos || (is_apple && !enable_dsyms))
+
+# If the platform uses stripped absolute paths by default, then we don't expose
+# it as a configuration option. If this is causing problems, please file a bug.
+if (strip_absolute_paths_from_debug_symbols_default) {
+  strip_absolute_paths_from_debug_symbols = true
+} else {
+  declare_args() {
+    strip_absolute_paths_from_debug_symbols = false
+  }
+}
+
+# If it wasn't manually set, then default use_debug_fission to false.
+assert(
+    use_debug_fission == "default" || use_debug_fission || !use_debug_fission,
+    "Invalid use_debug_fission.")
+if (use_debug_fission == "default") {
+  use_debug_fission = is_debug && !is_android && !is_fuchsia && !is_apple &&
+                      !is_win && (use_gold || use_lld) && cc_wrapper == ""
+}
+
+# If it wasn't manually set, set to an appropriate default.
+assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
+if (symbol_level == -1) {
+  if (is_android && !is_component_build && !use_debug_fission) {
+    # Reduce symbol level when it will cause invalid elf files to be created
+    # (due to file size). https://crbug.com/648948.
+    symbol_level = 1
+  } else if (is_chromeos_device) {
+    # Use lower symbol level in Simple Chrome build for faster link time.
+    # For Simple Chrome, this should take precedence over is_official_build,
+    # turned on by --internal.
+    if ((target_cpu == "x64" || target_cpu == "x86") && !is_debug) {
+      # For release x86/x64 build, specify symbol_level=0 for faster link time.
+      # x86/x64 shows backtraces with symbol_level=0 (arm requires
+      # symbol_level=1).
+      symbol_level = 0
+    } else {
+      symbol_level = 1
+    }
+  } else if (using_sanitizer) {
+    # Sanitizers need line table info for stack traces. They don't need type
+    # info or variable info, so we can leave that out to speed up the build.
+    # Sanitizers also require symbols for filename suppressions to work.
+    symbol_level = 1
+  } else if ((!is_nacl && !is_linux && !is_chromeos && !is_fuchsia &&
+              current_os != "aix") || is_debug || is_official_build ||
+             is_chromecast) {
+    # Linux builds slower by having symbols as part of the target binary,
+    # whereas Mac and Windows have them separate, so in Release Linux, default
+    # them off, but keep them on for Official builds and Chromecast builds.
+    symbol_level = 2
+  } else {
+    symbol_level = 0
+  }
+}
+
+# Split dwarf works only for symbol_level == 2.
+use_debug_fission = use_debug_fission && symbol_level == 2
+
+# Non-component debug builds with symbol_level = 2 are an undesirable (very slow
+# build times) and unsupported (some test binaries will fail with > 4 GB PDBs)
+# combination. This is only checked when current_toolchain == default_toolchain
+# because the is_component_build flag is set to false in various components of
+# the build (like nacl) and we don't want to assert on those.
+# iOS does not support component builds so add an exception for this platform.
+if (forbid_non_component_debug_builds) {
+  assert(symbol_level != 2 || current_toolchain != default_toolchain ||
+             is_component_build || !is_debug || is_ios,
+         "Can't do non-component debug builds at symbol_level=2")
+}
+
+# Assert that the configuration isn't going to hit https://crbug.com/648948.
+# An exception is made when target_os == "chromeos" as we only use the Android
+# toolchain there to build relatively small binaries.
+assert(
+    ignore_elf32_limitations || !is_android || target_os == "chromeos" ||
+        is_component_build || symbol_level < 2 || use_debug_fission,
+    "Android 32-bit non-component builds without DWARF Fission cannot " +
+        "have symbol_level=2 due to 4GiB file size limit, see " +
+        "https://crbug.com/648948. " + "If you really want to try this out, " +
+        "set ignore_elf32_limitations=true.")
diff --git a/src/build/config/compiler/pgo/BUILD.gn b/src/build/config/compiler/pgo/BUILD.gn
new file mode 100644
index 0000000..3e8502e
--- /dev/null
+++ b/src/build/config/compiler/pgo/BUILD.gn
@@ -0,0 +1,100 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Configuration that enables PGO instrumentation.
+config("pgo_instrumentation_flags") {
+  visibility = [ ":default_pgo_flags" ]
+
+  # Only add flags when chrome_pgo_phase == 1, so that variables we would use
+  # are not required to be defined when we're not actually using PGO.
+  if (chrome_pgo_phase == 1 && is_clang && !is_nacl && is_a_target_toolchain) {
+    cflags = [ "-fprofile-generate" ]
+    if (!is_win) {
+      # Windows directly calls link.exe instead of the compiler driver when
+      # linking, and embeds the path to the profile runtime library as
+      # dependent library into each object file.
+      ldflags = [ "-fprofile-generate" ]
+    }
+  }
+}
+
+# Configuration that enables optimization using profile data.
+config("pgo_optimization_flags") {
+  visibility = [ ":default_pgo_flags" ]
+
+  # Only add flags when chrome_pgo_phase == 2, so that variables we would use
+  # are not required to be defined when we're not actually using PGO.
+  if (chrome_pgo_phase == 2 && is_clang && !is_nacl && is_a_target_toolchain) {
+    _pgo_target = ""
+
+    # There are txt files used by //tools/update_pgo_profiles.py to decide which
+    # profiles to use, adding them as inputs so that analyzer recognizes the
+    # dependencies.
+    inputs = []
+
+    if (is_win) {
+      if (target_cpu == "x64") {
+        _pgo_target = "win64"
+        inputs = [ "//chrome/build/win64.pgo.txt" ]
+      } else {
+        _pgo_target = "win32"
+        inputs = [ "//chrome/build/win32.pgo.txt" ]
+      }
+    } else if (is_mac) {
+      _pgo_target = "mac"
+      inputs = [ "//chrome/build/mac.pgo.txt" ]
+    } else if (is_linux || is_chromeos_lacros) {
+      _pgo_target = "linux"
+      inputs = [ "//chrome/build/linux.pgo.txt" ]
+    }
+
+    if (pgo_data_path == "" && _pgo_target != "") {
+      pgo_data_path = rebase_path(exec_script("//tools/update_pgo_profiles.py",
+                                              [
+                                                "--target",
+                                                _pgo_target,
+                                                "get_profile_path",
+                                              ],
+                                              "value"),
+                                  root_build_dir)
+    }
+    assert(pgo_data_path != "",
+           "Please set pgo_data_path to point at the profile data")
+    cflags = [
+      "-fprofile-instr-use=$pgo_data_path",
+
+      # It's possible to have some profile data legitimately missing,
+      # and at least some profile data always ends up being considered
+      # out of date, so make sure we don't error for those cases.
+      "-Wno-profile-instr-unprofiled",
+      "-Wno-profile-instr-out-of-date",
+
+      # Some hashing conflict results in a lot of warning like this when doing
+      # a PGO build:
+      #   warning: foo.cc: Function control flow change detected (hash mismatch)
+      #   [-Wbackend-plugin]
+      # See https://crbug.com/978401
+      "-Wno-backend-plugin",
+    ]
+  }
+}
+
+# Applies flags necessary when profile-guided optimization is used.
+# Flags are only added if PGO is enabled, so that this config is safe to
+# include by default.
+config("default_pgo_flags") {
+  if (chrome_pgo_phase == 0) {
+    # Nothing. This config should be a no-op when chrome_pgo_phase == 0.
+  } else if (chrome_pgo_phase == 1) {
+    configs = [ ":pgo_instrumentation_flags" ]
+  } else if (chrome_pgo_phase == 2) {
+    configs = [ ":pgo_optimization_flags" ]
+  }
+}
diff --git a/src/build/config/compiler/pgo/pgo.gni b/src/build/config/compiler/pgo/pgo.gni
new file mode 100644
index 0000000..c053eb5
--- /dev/null
+++ b/src/build/config/compiler/pgo/pgo.gni
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/ui_mode.gni")
+
+declare_args() {
+  # Specify the current PGO phase.
+  # Here's the different values that can be used:
+  #     0 : Means that PGO is turned off.
+  #     1 : Used during the PGI (instrumentation) phase.
+  #     2 : Used during the PGO (optimization) phase.
+  chrome_pgo_phase = 0
+  if (is_official_build &&
+      # TODO(crbug.com/1052397): Remove chromeos_is_browser_only once
+      # target_os switch for lacros-chrome is completed.
+      (is_win || is_mac ||
+       (is_linux && !chromeos_is_browser_only && !is_chromecast))) {
+    chrome_pgo_phase = 2
+  }
+
+  # When using chrome_pgo_phase = 2, read profile data from this path.
+  pgo_data_path = ""
+}
diff --git a/src/build/config/compute_inputs_for_analyze.gni b/src/build/config/compute_inputs_for_analyze.gni
new file mode 100644
index 0000000..050ab70
--- /dev/null
+++ b/src/build/config/compute_inputs_for_analyze.gni
@@ -0,0 +1,14 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Enable this flag when running "gn analyze".
+  #
+  # This causes some gn actions to compute inputs immediately (via exec_script)
+  # where they would normally compute them only when executed (and write them to
+  # a depfile).
+  #
+  # This flag will slow down GN, but is required for analyze to work properly.
+  compute_inputs_for_analyze = false
+}
diff --git a/src/build/config/coverage/BUILD.gn b/src/build/config/coverage/BUILD.gn
new file mode 100644
index 0000000..09c227d
--- /dev/null
+++ b/src/build/config/coverage/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+
+config("default_coverage") {
+  if (use_clang_coverage) {
+    ldflags = []
+    if (!is_win) {
+      # Windows directly calls link.exe instead of the compiler driver when
+      # linking, and embeds the path to the profile runtime library as
+      # dependent library into each object file.
+      ldflags += [ "-fprofile-instr-generate" ]
+    }
+
+    cflags = [
+      "-fprofile-instr-generate",
+      "-fcoverage-mapping",
+
+      # Following experimental flags removes unused header functions from the
+      # coverage mapping data embedded in the test binaries, and the reduction
+      # of binary size enables building Chrome's large unit test targets on
+      # MacOS. Please refer to crbug.com/796290 for more details.
+      "-mllvm",
+      "-limited-coverage-experimental=true",
+    ]
+
+    if (is_linux || is_chromeos) {
+      # TODO(crbug.com/1194301): Remove this flag.
+      cflags += [ "-fno-use-cxa-atexit" ]
+    }
+  }
+}
diff --git a/src/build/config/coverage/coverage.gni b/src/build/config/coverage/coverage.gni
new file mode 100644
index 0000000..9586d8d
--- /dev/null
+++ b/src/build/config/coverage/coverage.gni
@@ -0,0 +1,33 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+# There are two ways to enable code coverage instrumentation:
+# 1. When |use_clang_coverage| or |use_jacoco_coverage| is true and
+#    |coverage_instrumentation_input_file| is empty, all source files or
+#    Java class files are instrumented.
+# 2. When |use_clang_coverage| or |use_jacoco_coverage| is true and
+#    |coverage_instrumentation_input_file| is NOT empty and points to
+#    a text file on the file system, ONLY source files specified in the
+#    input file or Java class files related to source files are instrumented.
+declare_args() {
+  # Enable Clang's Source-based Code Coverage.
+  use_clang_coverage = false
+
+  # Enables JaCoCo Java code coverage.
+  use_jacoco_coverage = false
+
+  # The path to the coverage instrumentation input file should be a source root
+  # absolute path (e.g. //out/Release/coverage_instrumentation_input.txt), and
+  # the file consists of multiple lines where each line represents a path to a
+  # source file, and the paths must be relative to the root build directory.
+  # e.g. ../../base/task/post_task.cc for build directory 'out/Release'.
+  #
+  # NOTE that this arg will be non-op if use_clang_coverage is false.
+  coverage_instrumentation_input_file = ""
+}
+
+assert(!use_clang_coverage || is_clang,
+       "Clang Source-based Code Coverage requires clang.")
diff --git a/src/build/config/crypto.gni b/src/build/config/crypto.gni
new file mode 100644
index 0000000..4d2c011
--- /dev/null
+++ b/src/build/config/crypto.gni
@@ -0,0 +1,15 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+# True if NSS is used for certificate handling.
+use_nss_certs = is_linux || is_chromeos
diff --git a/src/build/config/dcheck_always_on.gni b/src/build/config/dcheck_always_on.gni
new file mode 100644
index 0000000..e7d6a79
--- /dev/null
+++ b/src/build/config/dcheck_always_on.gni
@@ -0,0 +1,20 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only.
+  # DCHECKS can then be set as fatal/non-fatal via the DCheckIsFatal feature.
+  # See https://bit.ly/dcheck-albatross for details on how this is used.
+  dcheck_is_configurable = false
+}
+
+declare_args() {
+  # Set to true to enable dcheck in Release builds.
+  dcheck_always_on = dcheck_is_configurable
+}
+
+declare_args() {
+  # Set to false to disable EXPENSIVE_DCHECK()s.
+  enable_expensive_dchecks = is_debug || dcheck_always_on
+}
diff --git a/src/build/config/features.gni b/src/build/config/features.gni
new file mode 100644
index 0000000..62bf4bc
--- /dev/null
+++ b/src/build/config/features.gni
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+
+declare_args() {
+  # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4.
+  # We always build Google Chrome and Chromecast with proprietary codecs.
+  #
+  # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it
+  # out of //build will require using the build_overrides directory.
+  proprietary_codecs = is_chrome_branded || is_chromecast
+
+  # libudev usage. This currently only affects the content layer.
+  use_udev = (is_linux || is_chromeos) && !is_chromecast
+
+  use_dbus = (is_linux || is_chromeos) && !is_chromecast
+
+  use_gio = is_linux && !is_chromecast
+}
+#
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
diff --git a/src/build/config/freetype/BUILD.gn b/src/build/config/freetype/BUILD.gn
new file mode 100644
index 0000000..76cb025
--- /dev/null
+++ b/src/build/config/freetype/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/freetype/freetype.gni")
+
+group("freetype") {
+  if (use_system_freetype) {
+    public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
+  } else {
+    public_deps = [ "//third_party:freetype_harfbuzz" ]
+  }
+}
diff --git a/src/build/config/freetype/freetype.gni b/src/build/config/freetype/freetype.gni
new file mode 100644
index 0000000..b4eced2
--- /dev/null
+++ b/src/build/config/freetype/freetype.gni
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Blink needs a recent and properly build-configured FreeType version to
+  # support OpenType variations, color emoji and avoid security bugs. By default
+  # we ship and link such a version as part of Chrome. For distributions that
+  # prefer to keep linking to the version the system, FreeType must be newer
+  # than version 2.7.1 and have color bitmap support compiled in. WARNING:
+  # System FreeType configurations other than as described WILL INTRODUCE TEXT
+  # RENDERING AND SECURITY REGRESSIONS.
+  use_system_freetype = false
+}
diff --git a/src/build/config/fuchsia/BUILD.gn b/src/build/config/fuchsia/BUILD.gn
new file mode 100644
index 0000000..88922a1
--- /dev/null
+++ b/src/build/config/fuchsia/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+
+assert(is_fuchsia)
+assert(!is_posix)
+
+config("compiler") {
+  configs = [ "//third_party/fuchsia-sdk/sdk/build/config:compiler" ]
+
+  # TODO(https://crbug.com/706592): The stack defaults to 256k on Fuchsia (see
+  # https://fuchsia.googlesource.com/zircon/+/master/system/private/zircon/stack.h#9),
+  # but on other platforms it's much higher, so a variety of code assumes more
+  # will be available. Raise to 8M which matches e.g. macOS.
+  ldflags = [ "-Wl,-z,stack-size=0x800000" ]
+
+  # Allow this in chromium-only builds, but do not allow this in Chromecast
+  # builds.
+  if (!is_chromecast) {
+    cflags_cc = [ "-fexperimental-relative-c++-abi-vtables" ]
+    ldflags += [ "-fexperimental-relative-c++-abi-vtables" ]
+  }
+}
+
+# Settings for executables.
+config("executable_config") {
+  ldflags = [ "-pie" ]
+}
diff --git a/src/build/config/fuchsia/DIR_METADATA b/src/build/config/fuchsia/DIR_METADATA
new file mode 100644
index 0000000..6d8f079
--- /dev/null
+++ b/src/build/config/fuchsia/DIR_METADATA
@@ -0,0 +1,7 @@
+monorail {
+  component: "Fuchsia"
+}
+
+team_email: "cr-fuchsia@chromium.org"
+
+os: FUCHSIA
diff --git a/src/build/config/fuchsia/add_DebugData_service.test-cmx b/src/build/config/fuchsia/add_DebugData_service.test-cmx
new file mode 100644
index 0000000..33fb6b0
--- /dev/null
+++ b/src/build/config/fuchsia/add_DebugData_service.test-cmx
@@ -0,0 +1,7 @@
+{
+  "sandbox": {
+    "services": [
+      "fuchsia.debugdata.DebugData"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/build_cmx_from_fragment.py b/src/build/config/fuchsia/build_cmx_from_fragment.py
new file mode 100644
index 0000000..ac7e349
--- /dev/null
+++ b/src/build/config/fuchsia/build_cmx_from_fragment.py
@@ -0,0 +1,49 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a complete CMX (v1) component manifest, from a program name and
+   manifest fragment file."""
+
+import argparse
+import json
+import sys
+
+
+def BuildCmxFromFragment(output_file, fragment_file, program_binary):
+  """Reads a CMX fragment specifying e.g. features & sandbox, and a program
+     binary's filename, and writes out the full CMX.
+
+     output_file: Build-relative filename at which to write the full CMX.
+     fragment_file: Build-relative filename of the CMX fragment to read from.
+     program_binary: Package-relative filename of the program binary.
+  """
+
+  with open(output_file, 'w') as component_manifest_file:
+    component_manifest = json.load(open(fragment_file, 'r'))
+    component_manifest.update({
+        'program': {
+            'binary': program_binary
+        },
+    })
+    json.dump(component_manifest, component_manifest_file)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--cmx-fragment',
+      required=True,
+      help='Path to the CMX fragment to read from')
+  parser.add_argument(
+      '--cmx', required=True, help='Path to write the complete CMX file to')
+  parser.add_argument(
+      '--program',
+      required=True,
+      help='Package-relative path to the program binary')
+  args = parser.parse_args()
+
+  return BuildCmxFromFragment(args.cmx, args.cmx_fragment, args.program)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/config/fuchsia/build_symbol_archive.py b/src/build/config/fuchsia/build_symbol_archive.py
new file mode 100755
index 0000000..c763627
--- /dev/null
+++ b/src/build/config/fuchsia/build_symbol_archive.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a compressed archive of unstripped binaries cataloged by
+"ids.txt"."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tarfile
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('ids_txt', type=str, nargs=1,
+                      help='Path to ids.txt files.')
+  parser.add_argument('-o', '--output_tarball', nargs=1, type=str,
+                      help='Path which the tarball will be written to.')
+  parser.add_argument('--fuchsia-build-id-dir', type=str, required=True,
+                      help='Directory containing symbols for SDK prebuilts.')
+  args = parser.parse_args(args)
+
+  ids_txt = args.ids_txt[0]
+  build_ids_archive = tarfile.open(args.output_tarball[0], 'w:bz2')
+  for line in open(ids_txt, 'r'):
+    build_id, binary_path = line.strip().split(' ')
+
+    # Look for prebuilt symbols in the SDK first.
+    symbol_source_path = os.path.join(args.fuchsia_build_id_dir,
+                                      build_id[:2],
+                                      build_id[2:] + '.debug')
+    if not os.path.exists(symbol_source_path):
+      symbol_source_path = os.path.abspath(
+          os.path.join(os.path.dirname(ids_txt), binary_path))
+
+      if os.path.getsize(symbol_source_path) == 0:
+        # This is a prebuilt which wasn't accompanied by SDK symbols.
+        continue
+
+    # Exclude stripped binaries (indicated by their lack of symbol tables).
+    readelf_output = subprocess.check_output(
+        ['readelf', '-S', symbol_source_path], universal_newlines=True)
+    if not '.symtab' in readelf_output:
+      continue
+
+    # Archive the unstripped ELF binary, placing it in a hierarchy keyed to the
+    # GNU build ID. The binary resides in a directory whose name is the first
+    # two characters of the build ID, with the binary file itself named after
+    # the remaining characters of the build ID. So, a binary file with the build
+    # ID "deadbeef" would be located at the path 'de/adbeef.debug'.
+    build_ids_archive.add(symbol_source_path,
+                          '%s/%s.debug' % (build_id[:2], build_id[2:]))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/config/fuchsia/config.gni b/src/build/config/fuchsia/config.gni
new file mode 100644
index 0000000..8e9e2be
--- /dev/null
+++ b/src/build/config/fuchsia/config.gni
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+# Compute the AEMU path.
+aemu_root = "//third_party/aemu-${host_os}-${host_cpu}"
+
+# Compute the path to the arch-specific boot image directory.
+boot_image_root = "//third_party/fuchsia-sdk/images/${target_cpu}"
diff --git a/src/build/config/fuchsia/extend_fvm.py b/src/build/config/fuchsia/extend_fvm.py
new file mode 100644
index 0000000..44e5ee3
--- /dev/null
+++ b/src/build/config/fuchsia/extend_fvm.py
@@ -0,0 +1,26 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies a FVM file and extends it by a specified amount.
+
+Arg #1: path to 'fvm'.
+    #2: the path to the source fvm.blk.
+    #3: the path that the extended FVM file will be written to.
+    #4: the additional number of bytes to grow fvm.blk by."""
+
+import os
+import shutil
+import subprocess
+import sys
+
+def ExtendFVM(fvm_tool_path, src_path, dest_path, delta):
+  old_size = os.path.getsize(src_path)
+  new_size = old_size + int(delta)
+  shutil.copyfile(src_path, dest_path)
+  subprocess.check_call([fvm_tool_path, dest_path, 'extend', '--length',
+                         str(new_size)])
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(ExtendFVM(*sys.argv[1:]))
diff --git a/src/build/config/fuchsia/generate_runner_scripts.gni b/src/build/config/fuchsia/generate_runner_scripts.gni
new file mode 100644
index 0000000..7fac16f
--- /dev/null
+++ b/src/build/config/fuchsia/generate_runner_scripts.gni
@@ -0,0 +1,269 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/fuchsia/config.gni")
+import("//build/config/fuchsia/package.gni")
+import("//build/config/gclient_args.gni")
+import("//build/config/sysroot.gni")
+import("//build/util/generate_wrapper.gni")
+
+declare_args() {
+  # Sets the Fuchsia Amber repository which will be used by default by the
+  # generated installation scripts. If not specified, then no default directory
+  # will be used.
+  default_fuchsia_build_dir_for_installation = ""
+
+  # CPU architecture of the host used to run the tests.
+  test_host_cpu = host_cpu
+
+  # Sets whether emulators need to be included in the test isolates
+  test_isolate_uses_emulator = true
+
+  # A list of additional Fuchsia boot images to include in the test isolates.
+  fuchsia_additional_boot_images = []
+}
+
+# Generates a script which deploys and optionally executes a package on a
+# device.
+#
+# Parameters:
+#   package: The package() target which will be run.
+#   package_name_override: Specifies the name of the generated package, if its
+#       name is different than the |package| target name. This value must match
+#       package_name_override in the |package| target.
+#   package_deps: An array of [package, package_name_override] array pairs
+#       which specify additional dependency packages to be installed
+#       prior to execution.
+#   runner_script: The runner script implementation to use, relative to
+#       "build/fuchsia". Defaults to "test_runner.py".
+#   install_only: If true, executing the script will only install the package
+#       on the device, but not run it.
+#   is_test_exe: If true, the generated script will run the command under
+#       test_env.py and add arguments expected to be passed to test exes.
+template("fuchsia_package_runner") {
+  forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "runner_script" ])
+
+  if (defined(invoker.package_name_override)) {
+    _pkg_shortname = invoker.package_name_override
+  } else {
+    _pkg_shortname = get_label_info(invoker.package, "name")
+  }
+
+  _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") +
+             "/" + _pkg_shortname
+  _package_path = "$_pkg_dir/${_pkg_shortname}.far"
+
+  generated_run_pkg_script_path = "$root_build_dir/bin/run_${_pkg_shortname}"
+  generated_install_pkg_script_path =
+      "$root_build_dir/bin/install_$_pkg_shortname"
+
+  _generate_runner_target = "${target_name}__generate_runner"
+  _generate_installer_target = "${target_name}__generate_installer"
+
+  # Generates a script which installs and runs a test.
+  generate_wrapper(_generate_runner_target) {
+    forward_variables_from(invoker, [ "target" ])
+
+    _is_test_exe = defined(invoker.is_test_exe) && invoker.is_test_exe
+
+    if (defined(runner_script)) {
+      _runner_script = runner_script
+    } else {
+      _runner_script = "//build/fuchsia/test_runner.py"
+    }
+
+    if (_is_test_exe) {
+      executable = "//testing/test_env.py"
+      executable_args =
+          [ "@WrappedPath(" + rebase_path(_runner_script, root_out_dir) + ")" ]
+      data = [
+        _runner_script,
+        "//.vpython",
+      ]
+      data_deps = [ "//testing:test_scripts_shared" ]
+    } else {
+      executable = rebase_path(_runner_script)
+      executable_args = []
+      data = []
+      data_deps = []
+    }
+
+    if (defined(invoker.data)) {
+      data += invoker.data
+    }
+
+    wrapper_script = generated_run_pkg_script_path
+
+    data_deps += [
+      invoker.package,
+
+      # Runner scripts require access to "ids.txt" for symbolization, and to
+      # the "package" from which to get the name & version to deploy, which
+      # are outputs of the archive manifest generation action.
+      "${invoker.package}__archive-manifest",
+
+      # Runner scripts require access to "meta.far" from which to calculate the
+      # expected Merkle root for the package, to verify it has been cached.
+      "${invoker.package}__archive-metadata",
+    ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+
+    # Declares the files that are needed for test execution on the
+    # swarming test client.
+    data += [
+      "//build/fuchsia/",
+      "//build/util/lib/",
+      "//third_party/fuchsia-sdk/sdk/.build-id/",
+      "//third_party/fuchsia-sdk/sdk/bin/fpave.sh",
+      "//third_party/fuchsia-sdk/sdk/bin/fuchsia-common.sh",
+      "//third_party/fuchsia-sdk/sdk/meta/manifest.json",
+    ]
+
+    # TODO(crbug.com/1137662): Remove checkout_fuchsia_for_arm64_host from
+    # gclient_gn_args in //DEPS as well as this condition when builders have
+    # test_host_cpu set correctly.
+    if (checkout_fuchsia_for_arm64_host) {
+      test_host_cpu = "arm64"
+    }
+
+    if (test_host_cpu == "x64") {
+      data_deps +=
+          [ "//build/config/clang:llvm-symbolizer_data($host_toolchain)" ]
+    }
+
+    data += [
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/device-finder",
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm",
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot",
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm",
+
+      # TODO(crbug.com/1162314) Remove "symbolize" when transition to
+      # "symbolizer" is complete.
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolize",
+
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer",
+      "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi",
+    ]
+
+    if (test_isolate_uses_emulator) {
+      data += [
+        "${boot_image_root}/qemu/qemu-kernel.kernel",
+        "${boot_image_root}/qemu/storage-full.blk",
+        "${boot_image_root}/qemu/zircon-a.zbi",
+        "//third_party/qemu-${host_os}-${test_host_cpu}/",
+      ]
+
+      # Include AEMU for x64 emulator hosts.
+      if (test_host_cpu == "x64") {
+        data += [ "${aemu_root}/" ]
+      }
+    }
+
+    foreach(fuchsia_additional_boot_image, fuchsia_additional_boot_images) {
+      data += [ "${fuchsia_additional_boot_image}/" ]
+    }
+
+    package_paths = [ rebase_path(_package_path, root_build_dir) ]
+    if (defined(invoker.package_deps)) {
+      foreach(package_dep, invoker.package_deps) {
+        package_dep_target = package_dep[0]
+        package_dep_name = package_dep[1]
+
+        data_deps += [
+          package_dep_target,
+          package_dep_target + "__archive-manifest",
+          package_dep_target + "__archive-metadata",
+        ]
+        package_dep_path = rebase_path(
+                get_label_info(package_dep_target, "target_gen_dir") + "/" +
+                    package_dep_name + "/" + package_dep_name + ".far",
+                root_build_dir)
+        package_paths += [ package_dep_path ]
+      }
+    }
+
+    foreach(package_path, package_paths) {
+      executable_args += [
+        "--package",
+        "@WrappedPath(${package_path})",
+      ]
+    }
+
+    executable_args += [
+      "--out-dir",
+      "@WrappedPath(.)",
+      "--target-cpu",
+      target_cpu,
+      "--package-name",
+      _pkg_shortname,
+    ]
+
+    if (defined(invoker.use_test_server) && invoker.use_test_server) {
+      executable_args += [ "--enable-test-server" ]
+    }
+
+    if (default_fuchsia_build_dir_for_installation != "") {
+      executable_args += [
+        "--fuchsia-out-dir",
+        default_fuchsia_build_dir_for_installation,
+      ]
+    }
+  }
+
+  # Produces a script which installs a package and its dependencies into the
+  # Amber repository of a pre-existing Fuchsia build directory.
+  generate_wrapper(_generate_installer_target) {
+    executable = rebase_path("//build/fuchsia/deploy_to_amber_repo.py")
+    wrapper_script = generated_install_pkg_script_path
+
+    data_deps = [ invoker.package ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+
+    # Build a list of all packages to install, and pass the list to the runner
+    # script.
+    package_paths = [ rebase_path(_package_path, root_build_dir) ]
+    if (defined(invoker.package_deps)) {
+      foreach(package_dep, invoker.package_deps) {
+        package_dep_target = package_dep[0]
+        package_dep_name = package_dep[1]
+
+        data_deps += [ package_dep_target ]
+        package_dep_path = rebase_path(
+                get_label_info(package_dep_target, "target_gen_dir") + "/" +
+                    package_dep_name + "/" + package_dep_name + ".far",
+                root_build_dir)
+        package_paths += [ package_dep_path ]
+      }
+    }
+    executable_args = []
+    foreach(package_path, package_paths) {
+      executable_args += [
+        "--package",
+        "@WrappedPath(${package_path})",
+      ]
+
+      if (default_fuchsia_build_dir_for_installation != "") {
+        executable_args += [
+          "--fuchsia-out-dir",
+          default_fuchsia_build_dir_for_installation,
+        ]
+      }
+    }
+  }
+
+  group(target_name) {
+    deps = [ ":${_generate_installer_target}" ]
+
+    if (!defined(invoker.install_only) || invoker.install_only == false) {
+      deps += [ ":${_generate_runner_target}" ]
+    }
+  }
+}
diff --git a/src/build/config/fuchsia/gfx_tests.cmx b/src/build/config/fuchsia/gfx_tests.cmx
new file mode 100644
index 0000000..c02975d
--- /dev/null
+++ b/src/build/config/fuchsia/gfx_tests.cmx
@@ -0,0 +1,30 @@
+{
+  "sandbox": {
+    "features": [
+      "deprecated-ambient-replace-as-executable",
+      "isolated-persistent-storage",
+      "isolated-temp",
+      "vulkan"
+    ],
+    "dev": [
+      "null",
+      "zero"
+    ],
+    "services": [
+      "fuchsia.accessibility.semantics.SemanticsManager",
+      "fuchsia.device.NameProvider",
+      "fuchsia.fonts.Provider",
+      "fuchsia.intl.PropertyProvider",
+      "fuchsia.logger.LogSink",
+      "fuchsia.memorypressure.Provider",
+      "fuchsia.process.Launcher",
+      "fuchsia.sys.Environment",
+      "fuchsia.sys.Loader",
+      "fuchsia.sysmem.Allocator",
+      "fuchsia.tracing.provider.Registry",
+      "fuchsia.ui.policy.Presenter",
+      "fuchsia.ui.scenic.Scenic",
+      "fuchsia.vulkan.loader.Loader"
+    ]
+  }
+}
diff --git a/src/build/config/fuchsia/package.gni b/src/build/config/fuchsia/package.gni
new file mode 100644
index 0000000..ff6ffd0
--- /dev/null
+++ b/src/build/config/fuchsia/package.gni
@@ -0,0 +1,114 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/fuchsia-sdk/sdk/build/component.gni")
+import("//third_party/fuchsia-sdk/sdk/build/package.gni")
+
+# DEPRECATED: Use the Fuchsia SDK's fuchsia_component() and fuchsia_package()
+# templates directly, in new code.
+#
+# Creates a Fuchsia .far package file containing a Fuchsia component.
+#
+# Parameters are:
+# package_name_override: Specifies the name of the package to generate,
+#     if different than |target_name|.
+# binary: The executable target which should be launched.
+# manifest: A path to the manifest that will be used.
+#     "testonly" targets default to using
+#     //build/config/fuchsia/tests-with-exec.cmx.
+#     Non-test targets must explicitly specify a |manifest|.
+# additional_manifests: Manifest files that should be included in the package in
+#     the /meta directory. This allows to package more than one component per
+#     manifest. These manifest files must specify program/binary to run, which
+#     is not required for the main manifest file where this parameter is added
+#     during build.
+# component_name_override: If set, specifies the name of the component.
+#     By default, the component name is the same as the package name.
+# deps: Additional targets to build and include in the package (optional).
+#
+# TODO(https://crbug.com/1050703): Migrate consumers to GN SDK equivalents.
+template("cr_fuchsia_package") {
+  assert(defined(invoker.binary))
+
+  if (defined(invoker.package_name_override)) {
+    _package_name = invoker.package_name_override
+  } else {
+    _package_name = invoker.target_name
+  }
+
+  _package_contents = [ invoker.binary ]
+  if (defined(invoker.deps)) {
+    _package_contents += invoker.deps
+  }
+
+  _component_cmx_target = target_name + "__cr-component-cmx"
+  _component_target = target_name + "__cr-component"
+  _package_components = [ ":${_component_target}" ]
+  _component_manifest = "${target_gen_dir}/${target_name}.cmx"
+
+  # Process the CMX fragment in |manifest| to get a full manifest.
+  action(_component_cmx_target) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "testonly",
+                           ])
+
+    script = "//build/config/fuchsia/build_cmx_from_fragment.py"
+
+    inputs = [ invoker.manifest ]
+    outputs = [ _component_manifest ]
+
+    args = [
+      "--cmx-fragment",
+      rebase_path(invoker.manifest),
+      "--cmx",
+      rebase_path(_component_manifest),
+      "--program",
+      get_label_info(invoker.binary, "name"),
+    ]
+  }
+
+  # Declare the primary component for this package.
+  fuchsia_component(_component_target) {
+    forward_variables_from(invoker, [ "testonly" ])
+
+    deps = [ ":${_component_cmx_target}" ]
+    data_deps = _package_contents
+    manifest = _component_manifest
+
+    if (defined(invoker.component_name_override)) {
+      manifest_output_name = "${invoker.component_name_override}"
+    } else {
+      manifest_output_name = "${_package_name}"
+    }
+  }
+
+  # Bundle manifests providing additional entrypoints into the package.
+  if (defined(invoker.additional_manifests)) {
+    foreach(filename, invoker.additional_manifests) {
+      _additional_component_target =
+          target_name + "_" + get_path_info(filename, "name")
+      _package_components += [ ":${_additional_component_target}" ]
+      fuchsia_component(_additional_component_target) {
+        forward_variables_from(invoker, [ "testonly" ])
+        data_deps = _package_contents
+        manifest = filename
+
+        # Depend upon the invoker's |deps|, in case they include a dependency
+        # responsible for generating this additional component's manifest file.
+        deps = _package_contents
+      }
+    }
+  }
+
+  fuchsia_package(target_name) {
+    forward_variables_from(invoker, [ "testonly" ])
+    package_name = _package_name
+    if (defined(invoker.excluded_files)) {
+      excluded_files = invoker.excluded_files
+    }
+    deps = _package_components
+  }
+}
diff --git a/src/build/config/fuchsia/rules.gni b/src/build/config/fuchsia/rules.gni
new file mode 100644
index 0000000..689e130
--- /dev/null
+++ b/src/build/config/fuchsia/rules.gni
@@ -0,0 +1,5 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/generate_runner_scripts.gni")
diff --git a/src/build/config/fuchsia/sizes.gni b/src/build/config/fuchsia/sizes.gni
new file mode 100644
index 0000000..20a5bf8
--- /dev/null
+++ b/src/build/config/fuchsia/sizes.gni
@@ -0,0 +1,46 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/generate_wrapper.gni")
+
+template("compute_fuchsia_package_sizes") {
+  generate_wrapper(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data",
+                             "data_deps",
+                           ])
+    testonly = true
+    executable = "//build/fuchsia/binary_sizes.py"
+    wrapper_script = "$root_out_dir/bin/run_${target_name}"
+
+    assert(target_cpu == "arm64" || target_cpu == "x64",
+           "target_cpu must be arm64 or x64")
+
+    if (!defined(data)) {
+      data = []
+    }
+
+    if (!defined(data_deps)) {
+      data_deps = []
+    }
+
+    # Declares the files that are needed for test execution on the
+    # swarming test client.
+    data += [
+      "//build/fuchsia/",
+      "//fuchsia/release/size_tests/",
+      "//third_party/fuchsia-sdk/sdk/arch/",
+      "//third_party/fuchsia-sdk/sdk/tools/${target_cpu}/",
+    ]
+
+    executable_args = [
+      "--output-directory",
+      "@WrappedPath(.)",
+    ]
+    if (defined(invoker.executable_args)) {
+      executable_args += invoker.executable_args
+    }
+  }
+}
diff --git a/src/build/config/fuchsia/symbol_archive.gni b/src/build/config/fuchsia/symbol_archive.gni
new file mode 100644
index 0000000..9dcb53c
--- /dev/null
+++ b/src/build/config/fuchsia/symbol_archive.gni
@@ -0,0 +1,47 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+# Creates a tarball of unstripped binaries, structured according to the
+# ".build_ids" convention used by the symbolizer and GNU GDB.
+#
+# Parameters:
+#   deps: Must all be cr_fuchsia_package() or fuchsia_package() targets.
+#   ids_txt: The "ids.txt" file which lists the relative paths to unstripped
+#            executables and libraries, along with their build IDs.
+#   archive_name: The path to the compressed tarball that will be generated.
+template("symbol_archive") {
+  assert(!is_debug)
+
+  action(target_name) {
+    _ids_txt = invoker.ids_txt
+    _build_ids = invoker.archive_name
+
+    script = "//build/config/fuchsia/build_symbol_archive.py"
+
+    inputs = [ _ids_txt ]
+
+    outputs = [ _build_ids ]
+
+    # For each package in |deps| it is necessary to additionally depend upon
+    # the corresponding archive-manifest target, which is what creates the
+    # ids.txt file.
+    deps = []
+    foreach(package, invoker.deps) {
+      deps += [
+        package,
+        package + "__archive-manifest",
+      ]
+    }
+
+    args = [
+      rebase_path(_ids_txt),
+      "-o",
+      rebase_path(_build_ids),
+      "--fuchsia-build-id-dir",
+      rebase_path("//third_party/fuchsia-sdk/sdk/.build-id"),
+    ]
+  }
+}
diff --git a/src/build/config/fuchsia/test/README.md b/src/build/config/fuchsia/test/README.md
new file mode 100644
index 0000000..c5f1762
--- /dev/null
+++ b/src/build/config/fuchsia/test/README.md
@@ -0,0 +1,58 @@
+## CMX Fragments
+
+This directory contains the cmx fragments that are required for running
+Fuchsia tests hermetically. Tests start from `minimum_capabilities.test-cmx`
+and add additional capabilities as necessary by providing the
+`additional_manifest_fragments` argument. Some fragments are explained in detail
+below:
+
+### General Purpose Fragments
+
+#### font_capabilities.test-cmx
+For tests that test fonts by providing `fuchsia.fonts.Provider`.
+
+#### jit_capabilities.test-cmx
+Required by tests that execute JavaScript. Should only be required in a small
+number of tests.
+
+#### minimum_capabilites.test-cmx
+Capabilities required by anything that uses `//base/test`, used as the base
+fragment for all test suites.
+
+#### read_debug_data.test-cmx
+Required by tests that need access to its debug directory. Should only be
+required in a small number of tests.
+
+#### test_logger_capabilities.test-cmx
+For tests that test logging functionality by providing `fuchsia.logger.Log`.
+
+### WebEngine Fragments
+The following fragments are specific to WebEngine functionality as documented
+documentation at
+https://fuchsia.dev/reference/fidl/fuchsia.web#CreateContextParams and
+https://fuchsia.dev/reference/fidl/fuchsia.web#ContextFeatureFlags.
+Any test-specific exceptions are documented for each file.
+
+#### audio_capabilities.test-cmx
+Corresponds to the `AUDIO` flag. Required for enabling audio input and output.
+
+#### network_capabilities.test-cmx
+Corresponds to the `NETWORK` flag. Required for enabling network access. Note
+that access to the root SSL certificates is not needed if ContextProvider is
+used to launch the `Context`. The `fuchsia.device.NameProvider` dependency comes
+from fdio.
+
+#### present_view_capabilities.test-cmx
+Services that are needed to render web content in a Scenic view and present it.
+Most services are required per the FIDL documentation.
+`fuchsia.ui.policy.Presenter` is additionally required by tests that create
+views.
+
+#### vulkan_capabilities.test-cmx
+Corresponds to the `VULKAN` flag. Required for enabling GPU-accelerated
+rendering of the web content.
+
+#### web_engine_required_capabilities.test-cmx
+Contains services that need to be present when creating a
+`fuchsia.web.Context`. Note that the `fuchsia.scheduler.ProfileProvider` service
+is only used in tests that encounter memory pressure code.
diff --git a/src/build/config/fuchsia/test/access_test_data_dir.test-cmx b/src/build/config/fuchsia/test/access_test_data_dir.test-cmx
new file mode 100644
index 0000000..5757778
--- /dev/null
+++ b/src/build/config/fuchsia/test/access_test_data_dir.test-cmx
@@ -0,0 +1,7 @@
+{
+  "sandbox": {
+    "features": [
+      "isolated-cache-storage"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/audio_capabilities.test-cmx b/src/build/config/fuchsia/test/audio_capabilities.test-cmx
new file mode 100644
index 0000000..2e2013f
--- /dev/null
+++ b/src/build/config/fuchsia/test/audio_capabilities.test-cmx
@@ -0,0 +1,18 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.mediacodec.CodecFactory": "fuchsia-pkg://fuchsia.com/codec_factory#meta/codec_factory.cmx"
+      },
+      "system-services": [
+        "fuchsia.media.Audio"
+      ]
+    }
+  },
+  "sandbox": {
+    "services": [
+      "fuchsia.media.Audio",
+      "fuchsia.mediacodec.CodecFactory"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/font_capabilities.test-cmx b/src/build/config/fuchsia/test/font_capabilities.test-cmx
new file mode 100644
index 0000000..4c8661b
--- /dev/null
+++ b/src/build/config/fuchsia/test/font_capabilities.test-cmx
@@ -0,0 +1,14 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.fonts.Provider": "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cmx",
+      }
+    }
+  },
+  "sandbox": {
+    "services": [
+      "fuchsia.fonts.Provider"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/jit_capabilities.test-cmx b/src/build/config/fuchsia/test/jit_capabilities.test-cmx
new file mode 100644
index 0000000..ff70e25
--- /dev/null
+++ b/src/build/config/fuchsia/test/jit_capabilities.test-cmx
@@ -0,0 +1,7 @@
+{
+  "sandbox": {
+    "features": [
+      "deprecated-ambient-replace-as-executable"
+    ]
+  }
+}
diff --git a/src/build/config/fuchsia/test/minimum_capabilities.test-cmx b/src/build/config/fuchsia/test/minimum_capabilities.test-cmx
new file mode 100644
index 0000000..a1d469d
--- /dev/null
+++ b/src/build/config/fuchsia/test/minimum_capabilities.test-cmx
@@ -0,0 +1,29 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.intl.PropertyProvider": "fuchsia-pkg://fuchsia.com/intl_property_manager#meta/intl_property_manager.cmx"
+      },
+      "system-services": [
+        "fuchsia.boot.ReadOnlyLog"
+      ]
+    }
+  },
+  "sandbox": {
+    "dev": [
+      "null",
+      "zero"
+    ],
+    "features": [
+      "isolated-persistent-storage",
+      "isolated-temp"
+    ],
+    "services": [
+      "fuchsia.intl.PropertyProvider",
+      "fuchsia.logger.LogSink",
+      "fuchsia.process.Launcher",
+      "fuchsia.sys.Launcher",
+      "fuchsia.sys.Loader"
+    ]
+  }
+}
diff --git a/src/build/config/fuchsia/test/network_capabilities.test-cmx b/src/build/config/fuchsia/test/network_capabilities.test-cmx
new file mode 100644
index 0000000..54b9e41
--- /dev/null
+++ b/src/build/config/fuchsia/test/network_capabilities.test-cmx
@@ -0,0 +1,25 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.net.NameLookup": "fuchsia-pkg://fuchsia.com/dns-resolver#meta/dns-resolver.cmx",
+        "fuchsia.net.interfaces.State": "fuchsia-pkg://fuchsia.com/netstack#meta/netstack.cmx",
+        "fuchsia.posix.socket.Provider": "fuchsia-pkg://fuchsia.com/netstack#meta/netstack.cmx"
+      }
+    },
+    "system-services": [
+      "fuchsia.device.NameProvider"
+    ]
+  },
+  "sandbox": {
+    "features": [
+      "root-ssl-certificates"
+    ],
+    "services": [
+      "fuchsia.device.NameProvider",
+      "fuchsia.net.NameLookup",
+      "fuchsia.net.interfaces.State",
+      "fuchsia.posix.socket.Provider"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/present_view_capabilities.test-cmx b/src/build/config/fuchsia/test/present_view_capabilities.test-cmx
new file mode 100644
index 0000000..201c8b2
--- /dev/null
+++ b/src/build/config/fuchsia/test/present_view_capabilities.test-cmx
@@ -0,0 +1,24 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.accessibility.semantics.SemanticsManager": "fuchsia-pkg://fuchsia.com/a11y-manager#meta/a11y-manager.cmx",
+        "fuchsia.ui.input3.Keyboard": "fuchsia-pkg://fuchsia.com/ime_service#meta/ime_service.cmx",
+      },
+      "system-services": [
+        "fuchsia.sysmem.Allocator",
+        "fuchsia.ui.policy.Presenter",
+        "fuchsia.ui.scenic.Scenic"
+      ]
+    }
+  },
+  "sandbox": {
+    "services": [
+      "fuchsia.accessibility.semantics.SemanticsManager",
+      "fuchsia.sysmem.Allocator",
+      "fuchsia.ui.input3.Keyboard",
+      "fuchsia.ui.policy.Presenter",
+      "fuchsia.ui.scenic.Scenic"
+    ]
+  }
+}
diff --git a/src/build/config/fuchsia/test/read_debug_data.test-cmx b/src/build/config/fuchsia/test/read_debug_data.test-cmx
new file mode 100644
index 0000000..b0c95b0
--- /dev/null
+++ b/src/build/config/fuchsia/test/read_debug_data.test-cmx
@@ -0,0 +1,7 @@
+{
+  "sandbox": {
+    "features": [
+      "hub"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/test_logger_capabilities.test-cmx b/src/build/config/fuchsia/test/test_logger_capabilities.test-cmx
new file mode 100644
index 0000000..68b2a67
--- /dev/null
+++ b/src/build/config/fuchsia/test/test_logger_capabilities.test-cmx
@@ -0,0 +1,7 @@
+{
+  "sandbox": {
+    "services": [
+      "fuchsia.logger.Log"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/vulkan_capabilities.test-cmx b/src/build/config/fuchsia/test/vulkan_capabilities.test-cmx
new file mode 100644
index 0000000..0436ffd
--- /dev/null
+++ b/src/build/config/fuchsia/test/vulkan_capabilities.test-cmx
@@ -0,0 +1,19 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "system-services": [
+        "fuchsia.sysmem.Allocator",
+        "fuchsia.vulkan.loader.Loader"
+      ]
+    }
+  },
+  "sandbox": {
+    "features": [
+      "vulkan"
+    ],
+    "services": [
+      "fuchsia.sysmem.Allocator",
+      "fuchsia.vulkan.loader.Loader"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx b/src/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx
new file mode 100644
index 0000000..4cb61fe
--- /dev/null
+++ b/src/build/config/fuchsia/test/web_engine_required_capabilities.test-cmx
@@ -0,0 +1,25 @@
+{
+  "facets": {
+    "fuchsia.test": {
+      "injected-services": {
+        "fuchsia.fonts.Provider": "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cmx",
+        "fuchsia.memorypressure.Provider": "fuchsia-pkg://fuchsia.com/memory_monitor#meta/memory_monitor.cmx",
+        "fuchsia.web.ContextProvider": "fuchsia-pkg://fuchsia.com/web_engine#meta/context_provider.cmx",
+      },
+      "system-services": [
+        "fuchsia.device.NameProvider",
+        "fuchsia.scheduler.ProfileProvider",
+        "fuchsia.sysmem.Allocator"
+      ]
+    }
+  },
+  "sandbox": {
+    "services": [
+      "fuchsia.device.NameProvider",
+      "fuchsia.fonts.Provider",
+      "fuchsia.memorypressure.Provider",
+      "fuchsia.sysmem.Allocator",
+      "fuchsia.web.ContextProvider"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/src/build/config/gcc/BUILD.gn b/src/build/config/gcc/BUILD.gn
new file mode 100644
index 0000000..154b259
--- /dev/null
+++ b/src/build/config/gcc/BUILD.gn
@@ -0,0 +1,116 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # When non empty, overrides the target rpath value. This allows a user to
+  # make a Chromium build where binaries and shared libraries are meant to be
+  # installed into separate directories, like /usr/bin/chromium and
+  # /usr/lib/chromium for instance. It is useful when a build system that
+  # generates a whole target root filesystem (like Yocto) is used on top of gn,
+  # especially when cross-compiling.
+  # Note: this gn arg is similar to gyp target_rpath generator flag.
+  gcc_target_rpath = ""
+  ldso_path = ""
+}
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functions we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+  cflags = [ "-fvisibility=hidden" ]
+
+  # Visibility attribute is not supported on AIX.
+  if (current_os != "aix") {
+    cflags_cc = [ "-fvisibility-inlines-hidden" ]
+    cflags_objcc = cflags_cc
+  }
+}
+
+# This config is usually set when :symbol_visibility_hidden is removed.
+# It's often a good idea to set visibility explicitly, as there're flags
+# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
+config("symbol_visibility_default") {
+  cflags = [ "-fvisibility=default" ]
+}
+
+# The rpath is the dynamic library search path. Setting this config on a link
+# step will put the directory where the build generates shared libraries into
+# the rpath.
+#
+# This is required for component builds since the build generates many shared
+# libraries in the build directory that we expect to be automatically loaded.
+# It will be automatically applied in this case by :executable_config.
+#
+# In non-component builds, certain test binaries may expect to load dynamic
+# libraries from the current directory. As long as these aren't distributed,
+# this is OK. For these cases use something like this:
+#
+#  if ((is_linux || is_chromeos) && !is_component_build) {
+#    configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
+#  }
+config("rpath_for_built_shared_libraries") {
+  if (!is_android) {
+    # Note: Android doesn't support rpath.
+    if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
+      ldflags = [
+        # Want to pass "\$". GN will re-escape as required for ninja.
+        "-Wl,-rpath=\$ORIGIN",
+      ]
+    } else {
+      ldflags = [ "-Wl,-rpath=${gcc_target_rpath}" ]
+    }
+    if (current_toolchain == default_toolchain && ldso_path != "") {
+      ldflags += [ "-Wl,--dynamic-linker=${ldso_path}" ]
+    }
+  }
+}
+
+if (is_component_build && !is_android) {
+  # See the rpath_for... config above for why this is necessary for component
+  # builds.
+  executable_and_shared_library_configs_ =
+      [ ":rpath_for_built_shared_libraries" ]
+} else {
+  executable_and_shared_library_configs_ = []
+}
+
+# Settings for executables.
+config("executable_config") {
+  configs = executable_and_shared_library_configs_
+  ldflags = [ "-pie" ]
+  if (is_android) {
+    ldflags += [
+      "-Bdynamic",
+      "-Wl,-z,nocopyreloc",
+    ]
+  }
+
+  if (!is_android && current_os != "aix") {
+    ldflags += [
+      # TODO(GYP): Do we need a check on the binutils version here?
+      #
+      # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+      # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+      "-Wl,--disable-new-dtags",
+    ]
+  }
+}
+
+# Settings for shared libraries.
+config("shared_library_config") {
+  configs = executable_and_shared_library_configs_
+}
diff --git a/src/build/config/get_host_byteorder.py b/src/build/config/get_host_byteorder.py
new file mode 100755
index 0000000..fc01d85
--- /dev/null
+++ b/src/build/config/get_host_byteorder.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Get Byteorder of host architecture"""
+
+from __future__ import print_function
+
+import sys
+
+print(sys.byteorder)
diff --git a/src/build/config/host_byteorder.gni b/src/build/config/host_byteorder.gni
new file mode 100644
index 0000000..48a1a7f
--- /dev/null
+++ b/src/build/config/host_byteorder.gni
@@ -0,0 +1,27 @@
+# Copyright (c) 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "host_byteorder" variable.
+# Not that this is currently used only for building v8.
+# The chromium code generally assumes little-endianness.
+declare_args() {
+  host_byteorder = "undefined"
+}
+
+# Detect host byteorder
+# ppc64 can be either BE or LE
+if (host_cpu == "ppc64") {
+  if (current_os == "aix") {
+    host_byteorder = "big"
+  } else {
+    # Only use the script when absolutely necessary
+    host_byteorder =
+        exec_script("//build/config/get_host_byteorder.py", [], "trim string")
+  }
+} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" ||
+           host_cpu == "mips" || host_cpu == "mips64") {
+  host_byteorder = "big"
+} else {
+  host_byteorder = "little"
+}
diff --git a/src/build/config/ios/BUILD.gn b/src/build/config/ios/BUILD.gn
new file mode 100644
index 0000000..c4cd317
--- /dev/null
+++ b/src/build/config/ios/BUILD.gn
@@ -0,0 +1,264 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+declare_args() {
+  # Enabling this option makes clang compile to an intermediate
+  # representation ("bitcode"), and not to native code. This is preferred
+  # when including WebRTC in the apps that will be sent to Apple's App Store
+  # and mandatory for the apps that run on watchOS or tvOS.
+  # The option only works when building with Xcode (use_xcode_clang = true).
+  # Mimicking how Xcode handles it, the production builds (is_debug = false)
+  # get real bitcode sections added, while the debug builds (is_debug = true)
+  # only get bitcode-section "markers" added in them.
+  # NOTE: This option is ignored when building versions for the iOS simulator,
+  # where a part of libvpx is compiled from the assembly code written using
+  # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
+  # That is not a limitation for now as Xcode mandates the presence of bitcode
+  # only when building bitcode-enabled projects for real devices (ARM CPUs).
+  enable_ios_bitcode = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  common_flags = []
+
+  # CPU architecture.
+  if (current_cpu == "x64") {
+    triplet_cpu = "x86_64"
+  } else if (current_cpu == "x86") {
+    triplet_cpu = "i386"
+  } else if (current_cpu == "arm" || current_cpu == "armv7") {
+    triplet_cpu = "armv7"
+  } else if (current_cpu == "arm64") {
+    triplet_cpu = "arm64"
+  } else {
+    assert(false, "unsupported cpu: $current_cpu")
+  }
+
+  # Environment.
+  if (target_environment == "simulator") {
+    triplet_environment = "-simulator"
+  } else if (target_environment == "device") {
+    triplet_environment = ""
+  } else if (target_environment == "catalyst") {
+    triplet_environment = "-macabi"
+  } else {
+    assert(false, "unsupported environment: $target_environment")
+  }
+
+  # OS.
+  triplet_os = "apple-ios"
+
+  # Set target.
+  common_flags = [
+    "-target",
+    "$triplet_cpu-$triplet_os$ios_deployment_target$triplet_environment",
+  ]
+
+  # This is here so that all files get recompiled after an Xcode update.
+  # (defines are passed via the command line, and build system rebuild things
+  # when their commandline changes). Nothing should ever read this define.
+  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+  asmflags = common_flags
+  cflags = common_flags
+  swiftflags = common_flags
+
+  swiftflags += [
+    "-swift-version",
+    "5",
+  ]
+
+  # Without this, the constructors and destructors of a C++ object inside
+  # an Objective C struct won't be called, which is very bad.
+  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+  ldflags = common_flags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is iOS-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  # The variable ios_sdk_path is relative to root_build_dir when using Goma RBE
+  # and system Xcode (since Goma RBE only supports paths relative to source).
+  # Rebase the value in that case since gn does not convert paths in compiler
+  # flags (since it is not aware they are paths).
+  _sdk_root = ios_sdk_path
+  if (use_system_xcode && use_goma) {
+    _sdk_root = rebase_path(ios_sdk_path, root_build_dir)
+  }
+
+  common_flags = [
+    "-isysroot",
+    _sdk_root,
+  ]
+  swiftflags = [
+    "-sdk",
+    _sdk_root,
+  ]
+
+  if (target_environment == "catalyst") {
+    common_flags += [
+      "-isystem",
+      "$_sdk_root/System/iOSSupport/usr/include",
+      "-iframework",
+      "$_sdk_root/System/iOSSupport/System/Library/Frameworks",
+    ]
+  }
+
+  if (use_xcode_clang && enable_ios_bitcode && target_environment == "device") {
+    if (is_debug) {
+      common_flags += [ "-fembed-bitcode-marker" ]
+    } else {
+      common_flags += [ "-fembed-bitcode" ]
+    }
+  }
+
+  asmflags = common_flags
+  cflags = common_flags
+  ldflags = common_flags
+}
+
+config("ios_executable_flags") {
+  ldflags = []
+
+  # On "catalyst", the bundle structure is different (uses the same structure
+  # as a regular macOS app), so an additional -rpath is required.
+  if (target_environment == "catalyst") {
+    ldflags += [ "-Wl,-rpath,@loader_path/../Frameworks" ]
+  }
+
+  ldflags += [ "-Wl,-rpath,@executable_path/Frameworks" ]
+}
+
+config("ios_extension_executable_flags") {
+  configs = default_executable_configs
+
+  ldflags = [
+    "-e",
+    "_NSExtensionMain",
+    "-fapplication-extension",
+  ]
+
+  # On "catalyst", the bundle structure is different (uses the same structure
+  # as a regular macOS app), so an additional -rpath is required.
+  if (target_environment == "catalyst") {
+    ldflags += [ "-Wl,-rpath,@loader_path/../../../../Frameworks" ]
+  }
+
+  ldflags += [ "-Wl,-rpath,@executable_path/../../Frameworks" ]
+}
+
+config("ios_dynamic_flags") {
+  ldflags = [
+    # Always load Objective-C categories and class.
+    "-Wl,-ObjC",
+
+    # Uses version 2 of Objective-C ABI.
+    "-Wl,-objc_abi_version,2",
+  ]
+
+  # The path to the Swift compatibility libraries (required to run code built
+  # with version N of the SDK on older version of the OS) is relative to the
+  # toolchains directory and changes with the environment.
+  _swift_compatibility_libs_dir_prefix = "$ios_toolchains_path/usr/lib/swift"
+  if (target_environment == "simulator") {
+    _swift_compatibility_libs_dir =
+        "$_swift_compatibility_libs_dir_prefix/iphonesimulator"
+  } else if (target_environment == "device") {
+    _swift_compatibility_libs_dir =
+        "$_swift_compatibility_libs_dir_prefix/iphoneos"
+  } else if (target_environment == "catalyst") {
+    _swift_compatibility_libs_dir =
+        "$_swift_compatibility_libs_dir_prefix/maccatalyst"
+  }
+
+  lib_dirs = [
+    "$ios_sdk_path/usr/lib/swift",
+    _swift_compatibility_libs_dir,
+  ]
+}
+
+config("ios_shared_library_flags") {
+  ldflags = [
+    "-Wl,-rpath,@executable_path/Frameworks",
+    "-Wl,-rpath,@loader_path/Frameworks",
+  ]
+}
+
+config("disable_implicit_retain_self_warning") {
+  cflags_objc = [ "-Wno-implicit-retain-self" ]
+  cflags_objcc = cflags_objc
+}
+
+config("xctest_config") {
+  framework_dirs = [ "$ios_sdk_platform_path/Developer/Library/Frameworks" ]
+
+  frameworks = [
+    "Foundation.framework",
+    "XCTest.framework",
+  ]
+}
+
+group("xctest") {
+  public_configs = [ ":xctest_config" ]
+}
+
+_xctrunner_path =
+    "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app"
+
+# When building with Goma RBE, $ios_sdk_platform_path corresponds to a symlink
+# below $root_build_dir that points to the real SDK to use. Because the files
+# are below $root_build_dir, it is not possible to list them as a target input
+# without gn complaining (as it can't find a target creating those files).
+#
+# The symlinks are created by //build/config/apple/sdk_info.py script invoked
+# via exec_script() from //build/config/{ios/ios_sdk.gni,mac/mac_sdk.gni}.
+# As the invocation is done by exec_script, there is no target that can list
+# those files as output.
+#
+# To workaround this, add a target that pretends to create those files
+# (but does nothing). See https://crbug.com/1061487 for why this is needed.
+if (use_system_xcode && use_goma) {
+  action("copy_xctrunner_app") {
+    testonly = true
+    script = "//build/noop.py"
+    outputs = [
+      "$_xctrunner_path/Info.plist",
+      "$_xctrunner_path/PkgInfo",
+      "$_xctrunner_path/XCTRunner",
+    ]
+  }
+}
+
+# When creating the test runner for an XCUITest, the arm64e slice of the binary
+# must be removed (at least until the app ships with arm64e slice which is not
+# yet supported by Apple).
+action("xctest_runner_without_arm64e") {
+  testonly = true
+  script = "//build/config/ios/strip_arm64e.py"
+  sources = [ "$_xctrunner_path/XCTRunner" ]
+  outputs = [ "$target_out_dir/XCTRunner" ]
+  args = [
+    "--output",
+    rebase_path(outputs[0], root_build_dir),
+    "--input",
+    rebase_path(sources[0], root_build_dir),
+    "--xcode-version",
+    xcode_version,
+  ]
+
+  if (use_system_xcode && use_goma) {
+    deps = [ ":copy_xctrunner_app" ]
+  }
+}
diff --git a/src/build/config/ios/BuildInfo.plist b/src/build/config/ios/BuildInfo.plist
new file mode 100644
index 0000000..3595e5a
--- /dev/null
+++ b/src/build/config/ios/BuildInfo.plist
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>BuildMachineOSBuild</key>
+  <string>${BUILD_MACHINE_OS_BUILD}</string>
+  <key>CFBundleSupportedPlatforms</key>
+  <array>
+    <string>${IOS_SUPPORTED_PLATFORM}</string>
+  </array>
+  <key>DTCompiler</key>
+  <string>${GCC_VERSION}</string>
+  <key>DTPlatformName</key>
+  <string>${IOS_PLATFORM_NAME}</string>
+  <key>DTPlatformVersion</key>
+  <string>${IOS_PLATFORM_VERSION}</string>
+  <key>DTPlatformBuild</key>
+  <string>${IOS_PLATFORM_BUILD}</string>
+  <key>DTSDKBuild</key>
+  <string>${IOS_SDK_BUILD}</string>
+  <key>DTSDKName</key>
+  <string>${IOS_SDK_NAME}</string>
+  <key>MinimumOSVersion</key>
+  <string>${IOS_DEPLOYMENT_TARGET}</string>
+  <key>DTXcode</key>
+  <string>${XCODE_VERSION}</string>
+  <key>DTXcodeBuild</key>
+  <string>${XCODE_BUILD}</string>
+  <key>UIDeviceFamily</key>
+  <array>
+    <integer>1</integer>
+    <integer>2</integer>
+  </array>
+</dict>
+</plist>
diff --git a/src/build/config/ios/Host-Info.plist b/src/build/config/ios/Host-Info.plist
new file mode 100644
index 0000000..9f6f5de
--- /dev/null
+++ b/src/build/config/ios/Host-Info.plist
@@ -0,0 +1,126 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>en</string>
+	<key>CFBundleDisplayName</key>
+	<string>${PRODUCT_NAME}</string>
+	<key>CFBundleExecutable</key>
+	<string>${EXECUTABLE_NAME}</string>
+	<key>CFBundleIdentifier</key>
+	<string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleName</key>
+	<string>${PRODUCT_NAME}</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1.0</string>
+	<key>LSRequiresIPhoneOS</key>
+	<true/>
+	<key>NSAppTransportSecurity</key>
+	<dict>
+		<key>NSAllowsArbitraryLoads</key>
+		<true/>
+	</dict>
+	<key>UIRequiredDeviceCapabilities</key>
+	<array>
+		<string>armv7</string>
+	</array>
+	<key>UILaunchImages</key>
+	<array>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{320, 480}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{320, 568}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{375, 667}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{414, 736}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Landscape</string>
+			<key>UILaunchImageSize</key>
+			<string>{414, 736}</string>
+		</dict>
+	</array>
+	<key>UILaunchImages~ipad</key>
+	<array>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{768, 1024}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Landscape</string>
+			<key>UILaunchImageSize</key>
+			<string>{768, 1024}</string>
+		</dict>
+	</array>
+	<key>UISupportedInterfaceOrientations</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+	<key>UISupportedInterfaceOrientations~ipad</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationPortraitUpsideDown</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+</dict>
+</plist>
diff --git a/src/build/config/ios/Module-Info.plist b/src/build/config/ios/Module-Info.plist
new file mode 100644
index 0000000..d1bf77f
--- /dev/null
+++ b/src/build/config/ios/Module-Info.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>CFBundleDevelopmentRegion</key>
+  <string>en</string>
+  <key>CFBundleExecutable</key>
+  <string>${EXECUTABLE_NAME}</string>
+  <key>CFBundleIdentifier</key>
+  <string>${IOS_BUNDLE_ID_PREFIX}.${MODULE_BUNDLE_ID:rfc1034identifier}</string>
+  <key>CFBundleInfoDictionaryVersion</key>
+  <string>6.0</string>
+  <key>CFBundleName</key>
+  <string>${PRODUCT_NAME}</string>
+  <key>CFBundlePackageType</key>
+  <string>BNDL</string>
+  <key>CFBundleShortVersionString</key>
+  <string>1.0</string>
+  <key>CFBundleSignature</key>
+  <string>????</string>
+  <key>CFBundleVersion</key>
+  <string>1</string>
+  <key>NSPrincipalClass</key>
+  <string>${XCTEST_BUNDLE_PRINCIPAL_CLASS}</string>
+</dict>
+</plist>
diff --git a/src/build/config/ios/asset_catalog.gni b/src/build/config/ios/asset_catalog.gni
new file mode 100644
index 0000000..84dd92c
--- /dev/null
+++ b/src/build/config/ios/asset_catalog.gni
@@ -0,0 +1,150 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+
+# This template declares a bundle_data target that references an asset
+# catalog so that it is compiled to the asset catalog of the generated
+# bundle.
+#
+# The create_bundle target requires that all asset catalogs are part of an
+# .xcasset bundle. This requirement comes from actool that only receives
+# the path to the .xcasset bundle directory and not to the individual
+# .imageset directories.
+#
+# The requirement is a bit problematic as it prevents compiling only a
+# subset of the asset catakig that are contained in a .xcasset. This template
+# fixes that by instead copying the content of the asset catalog to temporary
+# .xcasset directory (below $root_out_dir) and defining a bundle_data
+# target that refers to those copies (this is efficient as the "copy" is
+# implemented by hardlinking if possible on macOS).
+#
+# Since the create_data target will only refer to the .xcasset directory
+# and additional "action" target that runs a dummy script is defined. It
+# does nothing but pretends to generate the .xcassets directory (while
+# it is really created as a side-effect of the "copy" step). This allows
+# to workaround the check in "gn" that all inputs below $root_out_dir have
+# to be outputs of another target with a public dependency path.
+#
+# This template also ensures that the file are only copied once when the
+# build targets multiple architectures at the same time (aka "fat build").
+#
+# Arguments
+#
+#     sources:
+#       required, list of strings, paths to the file contained in the
+#       asset catalog directory; this must contain the Contents.json file
+#       and all the image referenced by it (not enforced by the template).
+#
+#     asset_type:
+#       required, string, type of the asset catalog, that is the extension
+#       of the directory containing the images and the Contents.json file.
+#
+template("asset_catalog") {
+  assert(defined(invoker.sources) && invoker.sources != [],
+         "sources must be defined and not empty for $target_name")
+
+  assert(defined(invoker.asset_type) && invoker.asset_type != "",
+         "asset_type must be defined and not empty for $target_name")
+
+  if (is_fat_secondary_toolchain) {
+    group(target_name) {
+      public_deps = [ ":$target_name($primary_fat_toolchain_name)" ]
+    }
+  } else {
+    _copy_target_name = target_name + "__copy"
+    _data_target_name = target_name
+
+    _sources = invoker.sources
+    _outputs = []
+
+    # The compilation of resources into Assets.car is enabled automatically
+    # by the "create_bundle" target if any of the "bundle_data" sources's
+    # path is in a .xcassets directory and matches one of the know asset
+    # catalog type.
+    _xcassets_dir = "$target_gen_dir/${target_name}.xcassets"
+    _output_dir = "$_xcassets_dir/" +
+                  get_path_info(get_path_info(_sources[0], "dir"), "file")
+
+    foreach(_source, invoker.sources) {
+      _dir = get_path_info(_source, "dir")
+      _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ]
+
+      assert(get_path_info(_dir, "extension") == invoker.asset_type,
+             "$_source dirname must have .${invoker.asset_type} extension")
+    }
+
+    action(_copy_target_name) {
+      # Forward "deps", "public_deps" and "testonly" in case some of the
+      # source files are generated.
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "testonly",
+                             ])
+
+      script = "//build/config/ios/hardlink.py"
+
+      visibility = [ ":$_data_target_name" ]
+      sources = _sources
+      outputs = _outputs + [ _xcassets_dir ]
+
+      args = [
+        rebase_path(get_path_info(_sources[0], "dir"), root_build_dir),
+        rebase_path(_output_dir, root_build_dir),
+      ]
+    }
+
+    bundle_data(_data_target_name) {
+      forward_variables_from(invoker,
+                             "*",
+                             [
+                               "deps",
+                               "outputs",
+                               "public_deps",
+                               "sources",
+                             ])
+
+      sources = _outputs
+      outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+      public_deps = [ ":$_copy_target_name" ]
+    }
+  }
+}
+
+# Those templates are specialisation of the asset_catalog template for known
+# types of asset catalog types (imageset, launchimage, appiconset).
+#
+# Arguments
+#
+#     sources:
+#       required, list of strings, paths to the file contained in the
+#       asset catalog directory; this must contain the Contents.json file
+#       and all the image referenced by it (not enforced by the template).
+#
+template("appiconset") {
+  asset_catalog(target_name) {
+    forward_variables_from(invoker, "*", [ "asset_type" ])
+    asset_type = "appiconset"
+  }
+}
+template("colorset") {
+  asset_catalog(target_name) {
+    forward_variables_from(invoker, "*", [ "asset_type" ])
+    asset_type = "colorset"
+  }
+}
+template("imageset") {
+  asset_catalog(target_name) {
+    forward_variables_from(invoker, "*", [ "asset_type" ])
+    asset_type = "imageset"
+  }
+}
+template("launchimage") {
+  asset_catalog(target_name) {
+    forward_variables_from(invoker, "*", [ "asset_type" ])
+    asset_type = "launchimage"
+  }
+}
diff --git a/src/build/config/ios/codesign.py b/src/build/config/ios/codesign.py
new file mode 100644
index 0000000..15d25a7
--- /dev/null
+++ b/src/build/config/ios/codesign.py
@@ -0,0 +1,691 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import codecs
+import datetime
+import fnmatch
+import glob
+import json
+import os
+import plistlib
+import shutil
+import subprocess
+import sys
+import tempfile
+
+if sys.version_info.major < 3:
+  basestring_compat = basestring
+else:
+  basestring_compat = str
+
+
+def GetProvisioningProfilesDir():
+  """Returns the location of the installed mobile provisioning profiles.
+
+  Returns:
+    The path to the directory containing the installed mobile provisioning
+    profiles as a string.
+  """
+  return os.path.join(
+      os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+
+
+def ReadPlistFromString(plist_bytes):
+  """Parse property list from given |plist_bytes|.
+
+    Args:
+      plist_bytes: contents of property list to load. Must be bytes in python 3.
+
+    Returns:
+      The contents of property list as a python object.
+    """
+  if sys.version_info.major == 2:
+    return plistlib.readPlistFromString(plist_bytes)
+  else:
+    return plistlib.loads(plist_bytes)
+
+
+def LoadPlistFile(plist_path):
+  """Loads property list file at |plist_path|.
+
+  Args:
+    plist_path: path to the property list file to load.
+
+  Returns:
+    The content of the property list file as a python object.
+  """
+  if sys.version_info.major == 2:
+    return plistlib.readPlistFromString(
+        subprocess.check_output(
+            ['xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path]))
+  else:
+    with open(plist_path, 'rb') as fp:
+      return plistlib.load(fp)
+
+
+def CreateSymlink(value, location):
+  """Creates symlink with value at location if the target exists."""
+  target = os.path.join(os.path.dirname(location), value)
+  if os.path.exists(location):
+    os.unlink(location)
+  os.symlink(value, location)
+
+
+class Bundle(object):
+  """Wraps a bundle."""
+
+  def __init__(self, bundle_path, platform):
+    """Initializes the Bundle object with data from bundle Info.plist file."""
+    self._path = bundle_path
+    self._kind = Bundle.Kind(platform, os.path.splitext(bundle_path)[-1])
+    self._data = None
+
+  def Load(self):
+    self._data = LoadPlistFile(self.info_plist_path)
+
+  @staticmethod
+  def Kind(platform, extension):
+    if platform == 'iphonesimulator' or platform == 'iphoneos':
+      return 'ios'
+    if platform == 'macosx':
+      if extension == '.framework':
+        return 'mac_framework'
+      return 'mac'
+    raise ValueError('unknown bundle type %s for %s' % (extension, platform))
+
+  @property
+  def kind(self):
+    return self._kind
+
+  @property
+  def path(self):
+    return self._path
+
+  @property
+  def contents_dir(self):
+    if self._kind == 'mac':
+      return os.path.join(self.path, 'Contents')
+    if self._kind == 'mac_framework':
+      return os.path.join(self.path, 'Versions/A')
+    return self.path
+
+  @property
+  def executable_dir(self):
+    if self._kind == 'mac':
+      return os.path.join(self.contents_dir, 'MacOS')
+    return self.contents_dir
+
+  @property
+  def resources_dir(self):
+    if self._kind == 'mac' or self._kind == 'mac_framework':
+      return os.path.join(self.contents_dir, 'Resources')
+    return self.path
+
+  @property
+  def info_plist_path(self):
+    if self._kind == 'mac_framework':
+      return os.path.join(self.resources_dir, 'Info.plist')
+    return os.path.join(self.contents_dir, 'Info.plist')
+
+  @property
+  def signature_dir(self):
+    return os.path.join(self.contents_dir, '_CodeSignature')
+
+  @property
+  def identifier(self):
+    return self._data['CFBundleIdentifier']
+
+  @property
+  def binary_name(self):
+    return self._data['CFBundleExecutable']
+
+  @property
+  def binary_path(self):
+    return os.path.join(self.executable_dir, self.binary_name)
+
+  def Validate(self, expected_mappings):
+    """Checks that keys in the bundle have the expected value.
+
+    Args:
+      expected_mappings: a dictionary of string to object, each mapping will
+      be looked up in the bundle data to check it has the same value (missing
+      values will be ignored)
+
+    Returns:
+      A dictionary of the key with a different value between expected_mappings
+      and the content of the bundle (i.e. errors) so that caller can format the
+      error message. The dictionary will be empty if there are no errors.
+    """
+    errors = {}
+    for key, expected_value in expected_mappings.items():
+      if key in self._data:
+        value = self._data[key]
+        if value != expected_value:
+          errors[key] = (value, expected_value)
+    return errors
+
+
+class ProvisioningProfile(object):
+  """Wraps a mobile provisioning profile file."""
+
+  def __init__(self, provisioning_profile_path):
+    """Initializes the ProvisioningProfile with data from profile file."""
+    self._path = provisioning_profile_path
+    self._data = ReadPlistFromString(
+        subprocess.check_output([
+            'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA', '-i',
+            provisioning_profile_path
+        ]))
+
+  @property
+  def path(self):
+    return self._path
+
+  @property
+  def team_identifier(self):
+    return self._data.get('TeamIdentifier', [''])[0]
+
+  @property
+  def name(self):
+    return self._data.get('Name', '')
+
+  @property
+  def application_identifier_pattern(self):
+    return self._data.get('Entitlements', {}).get('application-identifier', '')
+
+  @property
+  def application_identifier_prefix(self):
+    return self._data.get('ApplicationIdentifierPrefix', [''])[0]
+
+  @property
+  def entitlements(self):
+    return self._data.get('Entitlements', {})
+
+  @property
+  def expiration_date(self):
+    return self._data.get('ExpirationDate', datetime.datetime.now())
+
+  def ValidToSignBundle(self, bundle_identifier):
+    """Checks whether the provisioning profile can sign bundle_identifier.
+
+    Args:
+      bundle_identifier: the identifier of the bundle that needs to be signed.
+
+    Returns:
+      True if the mobile provisioning profile can be used to sign a bundle
+      with the corresponding bundle_identifier, False otherwise.
+    """
+    return fnmatch.fnmatch(
+        '%s.%s' % (self.application_identifier_prefix, bundle_identifier),
+        self.application_identifier_pattern)
+
+  def Install(self, installation_path):
+    """Copies mobile provisioning profile info to |installation_path|."""
+    shutil.copy2(self.path, installation_path)
+
+
+class Entitlements(object):
+  """Wraps an Entitlement plist file."""
+
+  def __init__(self, entitlements_path):
+    """Initializes Entitlements object from entitlement file."""
+    self._path = entitlements_path
+    self._data = LoadPlistFile(self._path)
+
+  @property
+  def path(self):
+    return self._path
+
+  def ExpandVariables(self, substitutions):
+    self._data = self._ExpandVariables(self._data, substitutions)
+
+  def _ExpandVariables(self, data, substitutions):
+    if isinstance(data, basestring_compat):
+      for key, substitution in substitutions.items():
+        data = data.replace('$(%s)' % (key,), substitution)
+      return data
+
+    if isinstance(data, dict):
+      for key, value in data.items():
+        data[key] = self._ExpandVariables(value, substitutions)
+      return data
+
+    if isinstance(data, list):
+      for i, value in enumerate(data):
+        data[i] = self._ExpandVariables(value, substitutions)
+
+    return data
+
+  def LoadDefaults(self, defaults):
+    for key, value in defaults.items():
+      if key not in self._data:
+        self._data[key] = value
+
+  def WriteTo(self, target_path):
+    with open(target_path, 'wb') as fp:
+      if sys.version_info.major == 2:
+        plistlib.writePlist(self._data, fp)
+      else:
+        plistlib.dump(self._data, fp)
+
+
+def FindProvisioningProfile(bundle_identifier, required):
+  """Finds mobile provisioning profile to use to sign bundle.
+
+  Args:
+    bundle_identifier: the identifier of the bundle to sign.
+
+  Returns:
+    The ProvisioningProfile object that can be used to sign the Bundle
+    object or None if no matching provisioning profile was found.
+  """
+  provisioning_profile_paths = glob.glob(
+      os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision'))
+
+  # Iterate over all installed mobile provisioning profiles and filter those
+  # that can be used to sign the bundle, ignoring expired ones.
+  now = datetime.datetime.now()
+  valid_provisioning_profiles = []
+  one_hour = datetime.timedelta(0, 3600)
+  for provisioning_profile_path in provisioning_profile_paths:
+    provisioning_profile = ProvisioningProfile(provisioning_profile_path)
+    if provisioning_profile.expiration_date - now < one_hour:
+      sys.stderr.write(
+          'Warning: ignoring expired provisioning profile: %s.\n' %
+          provisioning_profile_path)
+      continue
+    if provisioning_profile.ValidToSignBundle(bundle_identifier):
+      valid_provisioning_profiles.append(provisioning_profile)
+
+  if not valid_provisioning_profiles:
+    if required:
+      sys.stderr.write(
+          'Error: no mobile provisioning profile found for "%s".\n' %
+          bundle_identifier)
+      sys.exit(1)
+    return None
+
+  # Select the most specific mobile provisioning profile, i.e. the one with
+  # the longest application identifier pattern (prefer the one with the latest
+  # expiration date as a secondary criteria).
+  selected_provisioning_profile = max(
+      valid_provisioning_profiles,
+      key=lambda p: (len(p.application_identifier_pattern), p.expiration_date))
+
+  one_week = datetime.timedelta(7)
+  if selected_provisioning_profile.expiration_date - now < 2 * one_week:
+    sys.stderr.write(
+        'Warning: selected provisioning profile will expire soon: %s' %
+        selected_provisioning_profile.path)
+  return selected_provisioning_profile
+
+
+def CodeSignBundle(bundle_path, identity, extra_args):
+  process = subprocess.Popen(
+      ['xcrun', 'codesign', '--force', '--sign', identity, '--timestamp=none'] +
+      list(extra_args) + [bundle_path],
+      stderr=subprocess.PIPE,
+      universal_newlines=True)
+  _, stderr = process.communicate()
+  if process.returncode:
+    sys.stderr.write(stderr)
+    sys.exit(process.returncode)
+  for line in stderr.splitlines():
+    if line.endswith(': replacing existing signature'):
+      # Ignore warning about replacing existing signature as this should only
+      # happen when re-signing system frameworks (and then it is expected).
+      continue
+    sys.stderr.write(line)
+    sys.stderr.write('\n')
+
+
+def InstallSystemFramework(framework_path, bundle_path, args):
+  """Install framework from |framework_path| to |bundle| and code-re-sign it."""
+  installed_framework_path = os.path.join(
+      bundle_path, 'Frameworks', os.path.basename(framework_path))
+
+  if os.path.isfile(framework_path):
+    shutil.copy(framework_path, installed_framework_path)
+  elif os.path.isdir(framework_path):
+    if os.path.exists(installed_framework_path):
+      shutil.rmtree(installed_framework_path)
+    shutil.copytree(framework_path, installed_framework_path)
+
+  CodeSignBundle(installed_framework_path, args.identity,
+      ['--deep', '--preserve-metadata=identifier,entitlements,flags'])
+
+
+def GenerateEntitlements(path, provisioning_profile, bundle_identifier):
+  """Generates an entitlements file.
+
+  Args:
+    path: path to the entitlements template file
+    provisioning_profile: ProvisioningProfile object to use, may be None
+    bundle_identifier: identifier of the bundle to sign.
+  """
+  entitlements = Entitlements(path)
+  if provisioning_profile:
+    entitlements.LoadDefaults(provisioning_profile.entitlements)
+    app_identifier_prefix = \
+      provisioning_profile.application_identifier_prefix + '.'
+  else:
+    app_identifier_prefix = '*.'
+  entitlements.ExpandVariables({
+      'CFBundleIdentifier': bundle_identifier,
+      'AppIdentifierPrefix': app_identifier_prefix,
+  })
+  return entitlements
+
+
+def GenerateBundleInfoPlist(bundle, plist_compiler, partial_plist):
+  """Generates the bundle Info.plist for a list of partial .plist files.
+
+  Args:
+    bundle: a Bundle instance
+    plist_compiler: string, path to the Info.plist compiler
+    partial_plist: list of path to partial .plist files to merge
+  """
+
+  # Filter empty partial .plist files (this happens if an application
+  # does not compile any asset catalog, in which case the partial .plist
+  # file from the asset catalog compilation step is just a stamp file).
+  filtered_partial_plist = []
+  for plist in partial_plist:
+    plist_size = os.stat(plist).st_size
+    if plist_size:
+      filtered_partial_plist.append(plist)
+
+  # Invoke the plist_compiler script. It needs to be a python script.
+  subprocess.check_call([
+      'python',
+      plist_compiler,
+      'merge',
+      '-f',
+      'binary1',
+      '-o',
+      bundle.info_plist_path,
+  ] + filtered_partial_plist)
+
+
+class Action(object):
+  """Class implementing one action supported by the script."""
+
+  @classmethod
+  def Register(cls, subparsers):
+    parser = subparsers.add_parser(cls.name, help=cls.help)
+    parser.set_defaults(func=cls._Execute)
+    cls._Register(parser)
+
+
+class CodeSignBundleAction(Action):
+  """Class implementing the code-sign-bundle action."""
+
+  name = 'code-sign-bundle'
+  help = 'perform code signature for a bundle'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '--entitlements', '-e', dest='entitlements_path',
+        help='path to the entitlements file to use')
+    parser.add_argument(
+        'path', help='path to the iOS bundle to codesign')
+    parser.add_argument(
+        '--identity', '-i', required=True,
+        help='identity to use to codesign')
+    parser.add_argument(
+        '--binary', '-b', required=True,
+        help='path to the iOS bundle binary')
+    parser.add_argument(
+        '--framework', '-F', action='append', default=[], dest='frameworks',
+        help='install and resign system framework')
+    parser.add_argument(
+        '--disable-code-signature', action='store_true', dest='no_signature',
+        help='disable code signature')
+    parser.add_argument(
+        '--disable-embedded-mobileprovision', action='store_false',
+        default=True, dest='embedded_mobileprovision',
+        help='disable finding and embedding mobileprovision')
+    parser.add_argument(
+        '--platform', '-t', required=True,
+        help='platform the signed bundle is targeting')
+    parser.add_argument(
+        '--partial-info-plist', '-p', action='append', default=[],
+        help='path to partial Info.plist to merge to create bundle Info.plist')
+    parser.add_argument(
+        '--plist-compiler-path', '-P', action='store',
+        help='path to the plist compiler script (for --partial-info-plist)')
+    parser.set_defaults(no_signature=False)
+
+  @staticmethod
+  def _Execute(args):
+    if not args.identity:
+      args.identity = '-'
+
+    bundle = Bundle(args.path, args.platform)
+
+    if args.partial_info_plist:
+      GenerateBundleInfoPlist(bundle, args.plist_compiler_path,
+                              args.partial_info_plist)
+
+    # The bundle Info.plist may have been updated by GenerateBundleInfoPlist()
+    # above. Load the bundle information from Info.plist after the modification
+    # have been written to disk.
+    bundle.Load()
+
+    # According to Apple documentation, the application binary must be the same
+    # as the bundle name without the .app suffix. See crbug.com/740476 for more
+    # information on what problem this can cause.
+    #
+    # To prevent this class of error, fail with an error if the binary name is
+    # incorrect in the Info.plist as it is not possible to update the value in
+    # Info.plist at this point (the file has been copied by a different target
+    # and ninja would consider the build dirty if it was updated).
+    #
+    # Also checks that the name of the bundle is correct too (does not cause the
+    # build to be considered dirty, but still terminate the script in case of an
+    # incorrect bundle name).
+    #
+    # Apple documentation is available at:
+    # https://developer.apple.com/library/content/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html
+    bundle_name = os.path.splitext(os.path.basename(bundle.path))[0]
+    errors = bundle.Validate({
+        'CFBundleName': bundle_name,
+        'CFBundleExecutable': bundle_name,
+    })
+    if errors:
+      for key in sorted(errors):
+        value, expected_value = errors[key]
+        sys.stderr.write('%s: error: %s value incorrect: %s != %s\n' % (
+            bundle.path, key, value, expected_value))
+      sys.stderr.flush()
+      sys.exit(1)
+
+    # Delete existing embedded mobile provisioning.
+    embedded_provisioning_profile = os.path.join(
+        bundle.path, 'embedded.mobileprovision')
+    if os.path.isfile(embedded_provisioning_profile):
+      os.unlink(embedded_provisioning_profile)
+
+    # Delete existing code signature.
+    if os.path.exists(bundle.signature_dir):
+      shutil.rmtree(bundle.signature_dir)
+
+    # Install system frameworks if requested.
+    for framework_path in args.frameworks:
+      InstallSystemFramework(framework_path, args.path, args)
+
+    # Copy main binary into bundle.
+    if not os.path.isdir(bundle.executable_dir):
+      os.makedirs(bundle.executable_dir)
+    shutil.copy(args.binary, bundle.binary_path)
+
+    if bundle.kind == 'mac_framework':
+      # Create Versions/Current -> Versions/A symlink
+      CreateSymlink('A', os.path.join(bundle.path, 'Versions/Current'))
+
+      # Create $binary_name -> Versions/Current/$binary_name symlink
+      CreateSymlink(os.path.join('Versions/Current', bundle.binary_name),
+                    os.path.join(bundle.path, bundle.binary_name))
+
+      # Create optional symlinks.
+      for name in ('Headers', 'Resources', 'Modules'):
+        target = os.path.join(bundle.path, 'Versions/A', name)
+        if os.path.exists(target):
+          CreateSymlink(os.path.join('Versions/Current', name),
+                        os.path.join(bundle.path, name))
+        else:
+          obsolete_path = os.path.join(bundle.path, name)
+          if os.path.exists(obsolete_path):
+            os.unlink(obsolete_path)
+
+    if args.no_signature:
+      return
+
+    codesign_extra_args = []
+
+    if args.embedded_mobileprovision:
+      # Find mobile provisioning profile and embeds it into the bundle (if a
+      # code signing identify has been provided, fails if no valid mobile
+      # provisioning is found).
+      provisioning_profile_required = args.identity != '-'
+      provisioning_profile = FindProvisioningProfile(
+          bundle.identifier, provisioning_profile_required)
+      if provisioning_profile and args.platform != 'iphonesimulator':
+        provisioning_profile.Install(embedded_provisioning_profile)
+
+        if args.entitlements_path is not None:
+          temporary_entitlements_file = \
+              tempfile.NamedTemporaryFile(suffix='.xcent')
+          codesign_extra_args.extend(
+              ['--entitlements', temporary_entitlements_file.name])
+
+          entitlements = GenerateEntitlements(
+              args.entitlements_path, provisioning_profile, bundle.identifier)
+          entitlements.WriteTo(temporary_entitlements_file.name)
+
+    CodeSignBundle(bundle.path, args.identity, codesign_extra_args)
+
+
+class CodeSignFileAction(Action):
+  """Class implementing code signature for a single file."""
+
+  name = 'code-sign-file'
+  help = 'code-sign a single file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        'path', help='path to the file to codesign')
+    parser.add_argument(
+        '--identity', '-i', required=True,
+        help='identity to use to codesign')
+    parser.add_argument(
+        '--output', '-o',
+        help='if specified copy the file to that location before signing it')
+    parser.set_defaults(sign=True)
+
+  @staticmethod
+  def _Execute(args):
+    if not args.identity:
+      args.identity = '-'
+
+    install_path = args.path
+    if args.output:
+
+      if os.path.isfile(args.output):
+        os.unlink(args.output)
+      elif os.path.isdir(args.output):
+        shutil.rmtree(args.output)
+
+      if os.path.isfile(args.path):
+        shutil.copy(args.path, args.output)
+      elif os.path.isdir(args.path):
+        shutil.copytree(args.path, args.output)
+
+      install_path = args.output
+
+    CodeSignBundle(install_path, args.identity,
+      ['--deep', '--preserve-metadata=identifier,entitlements'])
+
+
+class GenerateEntitlementsAction(Action):
+  """Class implementing the generate-entitlements action."""
+
+  name = 'generate-entitlements'
+  help = 'generate entitlements file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '--entitlements', '-e', dest='entitlements_path',
+        help='path to the entitlements file to use')
+    parser.add_argument(
+        'path', help='path to the entitlements file to generate')
+    parser.add_argument(
+        '--info-plist', '-p', required=True,
+        help='path to the bundle Info.plist')
+
+  @staticmethod
+  def _Execute(args):
+    info_plist = LoadPlistFile(args.info_plist)
+    bundle_identifier = info_plist['CFBundleIdentifier']
+    provisioning_profile = FindProvisioningProfile(bundle_identifier, False)
+    entitlements = GenerateEntitlements(
+        args.entitlements_path, provisioning_profile, bundle_identifier)
+    entitlements.WriteTo(args.path)
+
+
+class FindProvisioningProfileAction(Action):
+  """Class implementing the find-codesign-identity action."""
+
+  name = 'find-provisioning-profile'
+  help = 'find provisioning profile for use by Xcode project generator'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument('--bundle-id',
+                        '-b',
+                        required=True,
+                        help='bundle identifier')
+
+  @staticmethod
+  def _Execute(args):
+    provisioning_profile_info = {}
+    provisioning_profile = FindProvisioningProfile(args.bundle_id, False)
+    for key in ('team_identifier', 'name'):
+      if provisioning_profile:
+        provisioning_profile_info[key] = getattr(provisioning_profile, key)
+      else:
+        provisioning_profile_info[key] = ''
+    print(json.dumps(provisioning_profile_info))
+
+
+def Main():
+  # Cache this codec so that plistlib can find it. See
+  # https://crbug.com/999461#c12 for more details.
+  codecs.lookup('utf-8')
+
+  parser = argparse.ArgumentParser('codesign iOS bundles')
+  subparsers = parser.add_subparsers()
+
+  actions = [
+      CodeSignBundleAction,
+      CodeSignFileAction,
+      GenerateEntitlementsAction,
+      FindProvisioningProfileAction,
+  ]
+
+  for action in actions:
+    action.Register(subparsers)
+
+  args = parser.parse_args()
+  args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/config/ios/compile_ib_files.py b/src/build/config/ios/compile_ib_files.py
new file mode 100644
index 0000000..84781c1
--- /dev/null
+++ b/src/build/config/ios/compile_ib_files.py
@@ -0,0 +1,57 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import logging
+import os
+import re
+import subprocess
+import sys
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='A script to compile xib and storyboard.',
+      fromfile_prefix_chars='@')
+  parser.add_argument('-o', '--output', required=True,
+                      help='Path to output bundle.')
+  parser.add_argument('-i', '--input', required=True,
+                      help='Path to input xib or storyboard.')
+  args, unknown_args = parser.parse_known_args()
+
+  ibtool_args = [
+      'xcrun', 'ibtool',
+      '--errors', '--warnings', '--notices',
+      '--output-format', 'human-readable-text'
+  ]
+  ibtool_args += unknown_args
+  ibtool_args += [
+      '--compile',
+      os.path.abspath(args.output),
+      os.path.abspath(args.input)
+  ]
+
+  ibtool_section_re = re.compile(r'/\*.*\*/')
+  ibtool_re = re.compile(r'.*note:.*is clipping its content')
+  try:
+    stdout = subprocess.check_output(ibtool_args)
+  except subprocess.CalledProcessError as e:
+    print(e.output)
+    raise
+  current_section_header = None
+  for line in stdout.splitlines():
+    if ibtool_section_re.match(line):
+      current_section_header = line
+    elif not ibtool_re.match(line):
+      if current_section_header:
+        print(current_section_header)
+        current_section_header = None
+      print(line)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/config/ios/compile_xcassets_unittests.py b/src/build/config/ios/compile_xcassets_unittests.py
new file mode 100644
index 0000000..7655df8
--- /dev/null
+++ b/src/build/config/ios/compile_xcassets_unittests.py
@@ -0,0 +1,141 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+import compile_xcassets
+
+
+class TestFilterCompilerOutput(unittest.TestCase):
+
+  relative_paths = {
+    '/Users/janedoe/chromium/src/Chromium.xcassets':
+        '../../Chromium.xcassets',
+    '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car':
+        'Chromium.app/Assets.car',
+  }
+
+  def testNoError(self):
+    self.assertEquals(
+        '',
+        compile_xcassets.FilterCompilerOutput(
+            '/* com.apple.actool.compilation-results */\n'
+            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+            self.relative_paths))
+
+  def testNoErrorRandomMessages(self):
+    self.assertEquals(
+        '',
+        compile_xcassets.FilterCompilerOutput(
+            '2017-07-04 04:59:19.460 ibtoold[23487:41214] CoreSimulator is att'
+                'empting to unload a stale CoreSimulatorService job.  Existing'
+                ' job (com.apple.CoreSimulator.CoreSimulatorService.179.1.E8tt'
+                'yeDeVgWK) is from an older version and is being removed to pr'
+                'event problems.\n'
+            '/* com.apple.actool.compilation-results */\n'
+            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+            self.relative_paths))
+
+  def testWarning(self):
+    self.assertEquals(
+        '/* com.apple.actool.document.warnings */\n'
+        '../../Chromium.xcassets:./image1.imageset/[universal][][][1x][][][]['
+            '][][]: warning: The file "image1.png" for the image set "image1"'
+            ' does not exist.\n',
+        compile_xcassets.FilterCompilerOutput(
+            '/* com.apple.actool.document.warnings */\n'
+            '/Users/janedoe/chromium/src/Chromium.xcassets:./image1.imageset/['
+                'universal][][][1x][][][][][][]: warning: The file "image1.png'
+                '" for the image set "image1" does not exist.\n'
+            '/* com.apple.actool.compilation-results */\n'
+            '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+            self.relative_paths))
+
+  def testError(self):
+    self.assertEquals(
+        '/* com.apple.actool.errors */\n'
+        '../../Chromium.xcassets: error: The output directory "/Users/janedoe/'
+            'chromium/src/out/Default/Chromium.app" does not exist.\n',
+        compile_xcassets.FilterCompilerOutput(
+            '/* com.apple.actool.errors */\n'
+            '/Users/janedoe/chromium/src/Chromium.xcassets: error: The output '
+                'directory "/Users/janedoe/chromium/src/out/Default/Chromium.a'
+                'pp" does not exist.\n'
+            '/* com.apple.actool.compilation-results */\n',
+            self.relative_paths))
+
+  def testSpurious(self):
+    self.assertEquals(
+        '/* com.apple.actool.document.warnings */\n'
+        '../../Chromium.xcassets:./AppIcon.appiconset: warning: A 1024x1024 ap'
+            'p store icon is required for iOS apps\n',
+        compile_xcassets.FilterCompilerOutput(
+            '/* com.apple.actool.document.warnings */\n'
+            '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
+                't: warning: A 1024x1024 app store icon is required for iOS ap'
+                'ps\n'
+            '/* com.apple.actool.document.notices */\n'
+            '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
+                't/[][ipad][76x76][][][1x][][]: notice: (null)\n',
+            self.relative_paths))
+
+  def testComplexError(self):
+    self.assertEquals(
+        '/* com.apple.actool.errors */\n'
+        ': error: Failed to find a suitable device for the type SimDeviceType '
+            ': com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime SimRunt'
+            'ime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimRuntime.iOS-1'
+            '0-3\n'
+        '    Failure Reason: Failed to create SimDeviceSet at path /Users/jane'
+            'doe/Library/Developer/Xcode/UserData/IB Support/Simulator Devices'
+            '. You\'ll want to check the logs in ~/Library/Logs/CoreSimulator '
+            'to see why creating the SimDeviceSet failed.\n'
+        '    Underlying Errors:\n'
+        '        Description: Failed to initialize simulator device set.\n'
+        '        Failure Reason: Failed to subscribe to notifications from Cor'
+            'eSimulatorService.\n'
+        '        Underlying Errors:\n'
+        '            Description: Error returned in reply to notification requ'
+            'est: Connection invalid\n'
+        '            Failure Reason: Software caused connection abort\n',
+        compile_xcassets.FilterCompilerOutput(
+            '2017-07-07 10:37:27.367 ibtoold[88538:12553239] CoreSimulator det'
+                'ected Xcode.app relocation or CoreSimulatorService version ch'
+                'ange.  Framework path (/Applications/Xcode.app/Contents/Devel'
+                'oper/Library/PrivateFrameworks/CoreSimulator.framework) and v'
+                'ersion (375.21) does not match existing job path (/Library/De'
+                'veloper/PrivateFrameworks/CoreSimulator.framework/Versions/A/'
+                'XPCServices/com.apple.CoreSimulator.CoreSimulatorService.xpc)'
+                ' and version (459.13).  Attempting to remove the stale servic'
+                'e in order to add the expected version.\n'
+            '2017-07-07 10:37:27.625 ibtoold[88538:12553256] CoreSimulatorServ'
+                'ice connection interrupted.  Resubscribing to notifications.\n'
+            '2017-07-07 10:37:27.632 ibtoold[88538:12553264] CoreSimulatorServ'
+                'ice connection became invalid.  Simulator services will no lo'
+                'nger be available.\n'
+            '2017-07-07 10:37:27.642 ibtoold[88538:12553274] CoreSimulatorServ'
+                'ice connection became invalid.  Simulator services will no lo'
+                'nger be available.\n'
+            '/* com.apple.actool.errors */\n'
+            ': error: Failed to find a suitable device for the type SimDeviceT'
+                'ype : com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime'
+                ' SimRuntime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimR'
+                'untime.iOS-10-3\n'
+            '    Failure Reason: Failed to create SimDeviceSet at path /Users/'
+                'janedoe/Library/Developer/Xcode/UserData/IB Support/Simulator'
+                ' Devices. You\'ll want to check the logs in ~/Library/Logs/Co'
+                'reSimulator to see why creating the SimDeviceSet failed.\n'
+            '    Underlying Errors:\n'
+            '        Description: Failed to initialize simulator device set.\n'
+            '        Failure Reason: Failed to subscribe to notifications from'
+                ' CoreSimulatorService.\n'
+            '        Underlying Errors:\n'
+            '            Description: Error returned in reply to notification '
+                'request: Connection invalid\n'
+            '            Failure Reason: Software caused connection abort\n'
+            '/* com.apple.actool.compilation-results */\n',
+            self.relative_paths))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/config/ios/config.gni b/src/build/config/ios/config.gni
new file mode 100644
index 0000000..b25ecd9
--- /dev/null
+++ b/src/build/config/ios/config.gni
@@ -0,0 +1,23 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Configure the environment for which to build. Could be either "device",
+  # "simulator" or "catalyst". If unspecified, then it will be assumed to be
+  # "simulator" if the target_cpu is "x68" or "x64", "device" otherwise. The
+  # default is only there for compatibility reasons and will be removed (see
+  # crbug.com/1138425 for more details).
+  target_environment = ""
+}
+
+if (target_environment == "") {
+  if (current_cpu == "x86" || current_cpu == "x64") {
+    target_environment = "simulator"
+  } else {
+    target_environment = "device"
+  }
+}
+
+assert(target_environment == "simulator" || target_environment == "device" ||
+       target_environment == "catalyst")
diff --git a/src/build/config/ios/dummy.py b/src/build/config/ios/dummy.py
new file mode 100644
index 0000000..b23b7da
--- /dev/null
+++ b/src/build/config/ios/dummy.py
@@ -0,0 +1,15 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Empty script that does nothing and return success error code.
+
+This script is used by some gn targets that pretend creating some output
+but instead depend on another target creating the output indirectly (in
+general this output is a directory that is used as input by a bundle_data
+target).
+
+It ignores all parameters and terminate with a success error code. It
+does the same thing as the unix command "true", but gn can only invoke
+python scripts.
+"""
diff --git a/src/build/config/ios/entitlements.plist b/src/build/config/ios/entitlements.plist
new file mode 100644
index 0000000..429762e
--- /dev/null
+++ b/src/build/config/ios/entitlements.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>application-identifier</key>
+	<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+	<key>keychain-access-groups</key>
+	<array>
+		<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+	</array>
+</dict>
+</plist>
diff --git a/src/build/config/ios/find_signing_identity.py b/src/build/config/ios/find_signing_identity.py
new file mode 100644
index 0000000..d508e2b
--- /dev/null
+++ b/src/build/config/ios/find_signing_identity.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+import re
+
+
+def Redact(value, from_nth_char=5):
+  """Redact value past the N-th character."""
+  return value[:from_nth_char] + '*' * (len(value) - from_nth_char)
+
+
+class Identity(object):
+  """Represents a valid identity."""
+
+  def __init__(self, identifier, name, team):
+    self.identifier = identifier
+    self.name = name
+    self.team = team
+
+  def redacted(self):
+    return Identity(Redact(self.identifier), self.name, Redact(self.team))
+
+  def format(self):
+    return '%s: "%s (%s)"' % (self.identifier, self.name, self.team)
+
+
+def ListIdentities():
+  return subprocess.check_output([
+      'xcrun',
+      'security',
+      'find-identity',
+      '-v',
+      '-p',
+      'codesigning',
+  ]).decode('utf8')
+
+
+def FindValidIdentity(pattern):
+  """Find all identities matching the pattern."""
+  lines = list(l.strip() for l in ListIdentities().splitlines())
+  # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
+  regex = re.compile('[0-9]+\) ([A-F0-9]+) "([^"(]*) \(([^)"]*)\)"')
+
+  result = []
+  for line in lines:
+    res = regex.match(line)
+    if res is None:
+      continue
+    if pattern is None or pattern in res.group(2):
+      result.append(Identity(*res.groups()))
+  return result
+
+
+def Main(args):
+  parser = argparse.ArgumentParser('codesign iOS bundles')
+  parser.add_argument('--matching-pattern',
+                      dest='pattern',
+                      help='Pattern used to select the code signing identity.')
+  parsed = parser.parse_args(args)
+
+  identities = FindValidIdentity(parsed.pattern)
+  if len(identities) == 1:
+    print(identities[0].identifier, end='')
+    return 0
+
+  all_identities = FindValidIdentity(None)
+
+  print('Automatic code signing identity selection was enabled but could not')
+  print('find exactly one codesigning identity matching "%s".' % parsed.pattern)
+  print('')
+  print('Check that the keychain is accessible and that there is exactly one')
+  print('valid codesigning identity matching the pattern. Here is the parsed')
+  print('output of `xcrun security find-identity -v -p codesigning`:')
+  print()
+  for i, identity in enumerate(all_identities):
+    print('  %d) %s' % (i + 1, identity.redacted().format()))
+  print('    %d valid identities found' % (len(all_identities)))
+  return 1
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/config/ios/generate_umbrella_header.py b/src/build/config/ios/generate_umbrella_header.py
new file mode 100644
index 0000000..8547e18
--- /dev/null
+++ b/src/build/config/ios/generate_umbrella_header.py
@@ -0,0 +1,75 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an umbrella header for an iOS framework."""
+
+import argparse
+import datetime
+import os
+import re
+import string
+
+
+HEADER_TEMPLATE = string.Template('''\
+// Copyright $year The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file is auto-generated by //build/ios/config/generate_umbrella_header.py
+
+#ifndef $header_guard
+#define $header_guard
+
+$imports
+
+#endif  // $header_guard
+''')
+
+
+def ComputeHeaderGuard(file_path):
+  """Computes the header guard for a file path.
+
+  Args:
+    file_path: The path to convert into an header guard.
+  Returns:
+    The header guard string for the file_path.
+  """
+  return re.sub(r'[.+/\\]', r'_', file_path.upper()) + '_'
+
+
+def WriteUmbrellaHeader(output_path, imported_headers):
+  """Writes the umbrella header.
+
+  Args:
+    output_path: The path to the umbrella header.
+    imported_headers: A list of headers to #import in the umbrella header.
+  """
+  year = datetime.date.today().year
+  header_guard = ComputeHeaderGuard(output_path)
+  imports = '\n'.join([
+      '#import "%s"' % os.path.basename(header)
+          for header in sorted(imported_headers)
+      ])
+  with open(output_path, 'w') as output_file:
+    output_file.write(
+        HEADER_TEMPLATE.safe_substitute({
+            'year': year,
+            'header_guard': header_guard,
+            'imports': imports,
+        }))
+
+
+def Main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--output-path', required=True, type=str,
+                      help='Path to the generated umbrella header.')
+  parser.add_argument('imported_headers', type=str, nargs='+',
+                      help='Headers to #import in the umbrella header.')
+  options = parser.parse_args()
+
+  return WriteUmbrellaHeader(options.output_path, options.imported_headers)
+
+
+if __name__ == '__main__':
+  Main()
diff --git a/src/build/config/ios/hardlink.py b/src/build/config/ios/hardlink.py
new file mode 100644
index 0000000..38f60d4
--- /dev/null
+++ b/src/build/config/ios/hardlink.py
@@ -0,0 +1,71 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Recursively create hardlink to target named output."""
+
+
+import argparse
+import os
+import shutil
+
+
+def CreateHardlinkHelper(target, output):
+  """Recursively create a hardlink named output pointing to target.
+
+  Args:
+    target: path to an existing file or directory
+    output: path to the newly created hardlink
+
+  This function assumes that output does not exists but that the parent
+  directory containing output does. If those conditions are false, then
+  the function will fails with an exception corresponding to an OS error.
+  """
+  if os.path.islink(target):
+    os.symlink(os.readlink(target), output)
+  elif not os.path.isdir(target):
+    try:
+      os.link(target, output)
+    except:
+      shutil.copy(target, output)
+  else:
+    os.mkdir(output)
+    for name in os.listdir(target):
+      CreateHardlinkHelper(
+          os.path.join(target, name),
+          os.path.join(output, name))
+
+
+def CreateHardlink(target, output):
+  """Recursively create a hardlink named output pointing to target.
+
+  Args:
+    target: path to an existing file or directory
+    output: path to the newly created hardlink
+
+  If output already exists, it is first removed. In all cases, the
+  parent directory containing output is created.
+  """
+  if os.path.isdir(output):
+    shutil.rmtree(output)
+  elif os.path.exists(output):
+    os.unlink(output)
+
+  parent_dir = os.path.dirname(os.path.abspath(output))
+  if not os.path.isdir(parent_dir):
+    os.makedirs(parent_dir)
+
+  CreateHardlinkHelper(target, output)
+
+
+def Main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('target', help='path to the file or directory to link to')
+  parser.add_argument('output', help='name of the hardlink to create')
+  args = parser.parse_args()
+
+  CreateHardlink(args.target, args.output)
+
+
+if __name__ == '__main__':
+  Main()
diff --git a/src/build/config/ios/ios_sdk.gni b/src/build/config/ios/ios_sdk.gni
new file mode 100644
index 0000000..fbff8b4
--- /dev/null
+++ b/src/build/config/ios/ios_sdk.gni
@@ -0,0 +1,185 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/config.gni")
+import("//build/config/ios/ios_sdk_overrides.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+assert(current_os == "ios")
+assert(use_system_xcode, "Hermetic xcode doesn't work for ios.")
+
+declare_args() {
+  # SDK path to use. When empty this will use the default SDK based on the
+  # value of target_environment.
+  ios_bin_path = ""
+  ios_sdk_path = ""
+  ios_sdk_name = ""
+  ios_sdk_version = ""
+  ios_sdk_platform = ""
+  ios_sdk_platform_path = ""
+  ios_toolchains_path = ""
+  xcode_version = ""
+  xcode_version_int = 0
+  xcode_build = ""
+  machine_os_build = ""
+
+  # Set DEVELOPER_DIR while running sdk_info.py.
+  ios_sdk_developer_dir = ""
+
+  # Control whether codesiging is enabled (ignored for simulator builds).
+  ios_enable_code_signing = true
+
+  # Explicitly select the identity to use for codesigning. If defined, must
+  # be set to a non-empty string that will be passed to codesigning. Can be
+  # left unspecified if ios_code_signing_identity_description is used instead.
+  ios_code_signing_identity = ""
+
+  # Pattern used to select the identity to use for codesigning. If defined,
+  # must be a substring of the description of exactly one of the identities by
+  # `security find-identity -v -p codesigning`.
+  ios_code_signing_identity_description = "Apple Development"
+
+  # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the
+  # "Organization Identifier" in Xcode). Code signing will fail if no mobile
+  # provisioning for the selected code signing identify support that prefix.
+  ios_app_bundle_id_prefix = "org.chromium"
+
+  # If non-empty, this list must contain valid cpu architecture, and the final
+  # build will be a multi-architecture build (aka fat build) supporting the
+  # main $target_cpu architecture and all of $additional_target_cpus.
+  #
+  # For example to build an application that will run on both arm64 and armv7
+  # devices, you would use the following in args.gn file when running "gn args":
+  #
+  #   target_os = "ios"
+  #   target_cpu = "arm64"
+  #   additional_target_cpus = [ "arm" ]
+  #
+  # You can also pass the value via "--args" parameter for "gn gen" command by
+  # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
+  additional_target_cpus = []
+}
+
+declare_args() {
+  # This variable is set by the toolchain. It is set to true if the toolchain
+  # is a secondary toolchain as part of a "fat" build.
+  is_fat_secondary_toolchain = false
+
+  # This variable is set by the toolchain. It is the name of the primary
+  # toolchain for the fat build (could be current_toolchain).
+  primary_fat_toolchain_name = ""
+}
+
+# Official builds may not use goma.
+assert(!(use_goma && is_chrome_branded && is_official_build &&
+             target_cpu == "arm64"),
+       "goma use is forbidden for official iOS builds.")
+
+assert(custom_toolchain == "" || additional_target_cpus == [],
+       "cannot define both custom_toolchain and additional_target_cpus")
+
+# If codesigning is enabled, use must configure either a codesigning identity
+# or a filter to automatically select the codesigning identity.
+if (target_environment == "device" && ios_enable_code_signing) {
+  assert(ios_code_signing_identity == "" ||
+             ios_code_signing_identity_description == "",
+         "You should either specify the precise identity to use with " +
+             "ios_code_signing_identity or let the code select an identity " +
+             "automatically (via find_signing_identity.py which use the " +
+             "variable ios_code_signing_identity_description to set the " +
+             "pattern to match the identity to use).")
+}
+
+# Initialize additional_toolchains from additional_target_cpus. Assert here
+# that the list does not contains $target_cpu nor duplicates as this would
+# cause weird errors during the build.
+additional_toolchains = []
+if (additional_target_cpus != []) {
+  foreach(_additional_target_cpu, additional_target_cpus) {
+    assert(_additional_target_cpu != target_cpu,
+           "target_cpu must not be listed in additional_target_cpus")
+
+    _toolchain = "//build/toolchain/ios:ios_clang_${_additional_target_cpu}_fat"
+    foreach(_additional_toolchain, additional_toolchains) {
+      assert(_toolchain != _additional_toolchain,
+             "additional_target_cpus must not contains duplicate values")
+    }
+
+    additional_toolchains += [ _toolchain ]
+  }
+}
+
+if (ios_sdk_path == "") {
+  # Compute default target.
+  if (target_environment == "simulator") {
+    ios_sdk_name = "iphonesimulator"
+    ios_sdk_platform = "iPhoneSimulator"
+  } else if (target_environment == "device") {
+    ios_sdk_name = "iphoneos"
+    ios_sdk_platform = "iPhoneOS"
+  } else if (target_environment == "catalyst") {
+    ios_sdk_name = "macosx"
+    ios_sdk_platform = "MacOSX"
+  } else {
+    assert(false, "unsupported environment: $target_environment")
+  }
+
+  ios_sdk_info_args = [
+    "--get_sdk_info",
+    "--get_machine_info",
+  ]
+  ios_sdk_info_args += [ ios_sdk_name ]
+  if (ios_sdk_developer_dir != "") {
+    ios_sdk_info_args += [
+      "--developer_dir",
+      ios_sdk_developer_dir,
+    ]
+  }
+  if (use_system_xcode && use_goma) {
+    ios_sdk_info_args += [
+      "--create_symlink_at",
+      "sdk/xcode_links",
+    ]
+  }
+  script_name = "//build/config/apple/sdk_info.py"
+  _ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope")
+  ios_bin_path =
+      rebase_path("${_ios_sdk_result.toolchains_path}/usr/bin/", root_build_dir)
+  ios_sdk_path = _ios_sdk_result.sdk_path
+  ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path
+  ios_sdk_version = _ios_sdk_result.sdk_version
+  ios_sdk_build = _ios_sdk_result.sdk_build
+  ios_toolchains_path = _ios_sdk_result.toolchains_path
+  xcode_version = _ios_sdk_result.xcode_version
+  xcode_version_int = _ios_sdk_result.xcode_version_int
+  xcode_build = _ios_sdk_result.xcode_build
+  machine_os_build = _ios_sdk_result.machine_os_build
+  if (target_environment == "simulator") {
+    # This is weird, but Xcode sets DTPlatformBuild to an empty field for
+    # simulator builds.
+    ios_platform_build = ""
+  } else {
+    ios_platform_build = ios_sdk_build
+  }
+}
+
+if (target_environment == "device" && ios_enable_code_signing) {
+  # Automatically select a codesigning identity if no identity is configured.
+  # This only applies to device build as simulator builds are not signed.
+  if (ios_code_signing_identity == "") {
+    find_signing_identity_args = []
+    if (ios_code_signing_identity_description != "") {
+      find_signing_identity_args = [
+        "--matching-pattern",
+        ios_code_signing_identity_description,
+      ]
+    }
+    ios_code_signing_identity = exec_script("find_signing_identity.py",
+                                            find_signing_identity_args,
+                                            "trim string")
+  }
+}
diff --git a/src/build/config/ios/ios_sdk_overrides.gni b/src/build/config/ios/ios_sdk_overrides.gni
new file mode 100644
index 0000000..bd990bc
--- /dev/null
+++ b/src/build/config/ios/ios_sdk_overrides.gni
@@ -0,0 +1,17 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains arguments that subprojects may choose to override. It
+# asserts that those overrides are used, to prevent unused args warnings.
+
+declare_args() {
+  # Version of iOS that we're targeting.
+  ios_deployment_target = "12.2"
+}
+
+# Always assert that ios_deployment_target is used on non-iOS platforms to
+# prevent unused args warnings.
+if (!is_ios) {
+  assert(ios_deployment_target == "12.2" || true)
+}
diff --git a/src/build/config/ios/ios_test_runner_wrapper.gni b/src/build/config/ios/ios_test_runner_wrapper.gni
new file mode 100644
index 0000000..8911071
--- /dev/null
+++ b/src/build/config/ios/ios_test_runner_wrapper.gni
@@ -0,0 +1,139 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+import("//build/config/ios/ios_sdk.gni")
+import("//build/util/generate_wrapper.gni")
+
+# Invokes generate_wrapper to create an executable script wrapping iOS'
+# run.py with baked in arguments. Only takes effect when test entry in
+# gn_isolate_map.pyl is updated to type="generated_script" with script
+# set to the wrapper output path.
+#
+# Arguments:
+#
+# data
+#   (optional, default [ "//ios/build/bots/scripts/" ]) list of files or
+#   directories required to run target
+#
+# data_deps
+#   (optional) list of target non-linked labels
+#
+# deps
+#   (optional) list of files or directories required to run target
+#
+# executable_args
+#   (optional) a list of string arguments to pass to run.py
+#
+# retries
+#   (optional, default 3) number of retry attempts
+#
+# shards
+#   (optional, default 1) number of shards to execute tests in parallel. not
+#   the same as swarming shards.
+#
+# wrapper_output_name
+#   (optional, default "run_${target_name}") name of the wrapper script
+#
+template("ios_test_runner_wrapper") {
+  generate_wrapper(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "retries",
+                             "shards",
+                             "wrapper_output_name",
+                           ])
+    testonly = true
+    executable = "//testing/test_env.py"
+
+    # iOS main test runner
+    _runner_path =
+        rebase_path("//ios/build/bots/scripts/run.py", root_build_dir)
+
+    executable_args = [ "@WrappedPath(${_runner_path})" ]
+
+    # arguments passed to run.py
+    if (defined(invoker.executable_args)) {
+      executable_args += invoker.executable_args
+    }
+
+    _rebased_mac_toolchain = rebase_path("//mac_toolchain", root_build_dir)
+    _rebased_xcode_path = rebase_path("//Xcode.app", root_build_dir)
+
+    # --out-dir argument is specified in gn_isolate_map.pyl because
+    # ${ISOLATED_OUTDIR} doesn't get resolved through this wrapper.
+    executable_args += [
+      "--xcode-path",
+      "@WrappedPath(${_rebased_xcode_path})",
+      "--mac-toolchain-cmd",
+      "@WrappedPath(${_rebased_mac_toolchain})",
+    ]
+
+    # Default retries to 3
+    if (!defined(retries)) {
+      retries = 3
+    }
+    executable_args += [
+      "--retries",
+      "${retries}",
+    ]
+
+    # Default shards to 1
+    if (!defined(shards)) {
+      shards = 1
+    }
+    executable_args += [
+      "--shards",
+      "${shards}",
+    ]
+
+    data_deps = [ "//testing:test_scripts_shared" ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+
+    # test runner relies on iossim for simulator builds.
+    if (target_environment == "simulator") {
+      _rebased_root_build_dir = rebase_path("${root_build_dir}", root_build_dir)
+      data_deps += [ "//testing/iossim" ]
+
+      executable_args += [
+        "--iossim",
+        "@WrappedPath(${_rebased_root_build_dir}/iossim)",
+      ]
+    }
+
+    if (use_clang_coverage) {
+      executable_args += [ "--use-clang-coverage" ]
+    }
+
+    if (!is_debug) {
+      executable_args += [ "--release" ]
+    }
+
+    # wrapper script output name and path
+    if (!defined(wrapper_output_name)) {
+      _wrapper_output_name = "run_${target_name}"
+    } else {
+      _wrapper_output_name = wrapper_output_name
+    }
+
+    # Test targets may attempt to generate multiple wrappers for a suite with
+    # multiple different toolchains when running with additional_target_cpus.
+    # Generate the wrapper script into root_out_dir rather than root_build_dir
+    # to ensure those wrappers are distinct.
+    wrapper_script = "${root_out_dir}/bin/${_wrapper_output_name}"
+
+    data = []
+    if (defined(invoker.data)) {
+      data += invoker.data
+    }
+    data += [
+      "//.vpython",
+      "//ios/build/bots/scripts/",
+      "//testing/test_env.py",
+    ]
+  }
+}
diff --git a/src/build/config/ios/resources/XCTRunnerAddition+Info.plist b/src/build/config/ios/resources/XCTRunnerAddition+Info.plist
new file mode 100644
index 0000000..cf9463f
--- /dev/null
+++ b/src/build/config/ios/resources/XCTRunnerAddition+Info.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>CFBundleIdentifier</key>
+  <string>com.apple.test.${EXECUTABLE_NAME}</string>
+  <key>CFBundleName</key>
+  <string>${PRODUCT_NAME}</string>
+  <key>CFBundleExecutable</key>
+  <string>${EXECUTABLE_NAME}</string>
+</dict>
+</plist>
diff --git a/src/build/config/ios/rules.gni b/src/build/config/ios/rules.gni
new file mode 100644
index 0000000..a572548
--- /dev/null
+++ b/src/build/config/ios/rules.gni
@@ -0,0 +1,2148 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/apple/apple_info_plist.gni")
+import("//build/config/apple/symbols.gni")
+import("//build/config/ios/ios_sdk.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+declare_args() {
+  # Set to true if an Xcode project is generated for this build. Set this to
+  # false if you do not plan to run `gn gen --ide=xcode` in this directory.
+  # This will speed up the generation at the cost of generating an invalid
+  # Xcode project if `gn gen --ide=xcode` is used. Defaults to true (favor
+  # correctness over speed).
+  ios_set_attributes_for_xcode_project_generation = true
+}
+
+# Constants corresponding to the bundle type identifiers use application,
+# application extension, XCTest and XCUITest targets respectively.
+_ios_xcode_app_bundle_id = "com.apple.product-type.application"
+_ios_xcode_appex_bundle_id = "com.apple.product-type.app-extension"
+_ios_xcode_xctest_bundle_id = "com.apple.product-type.bundle.unit-test"
+_ios_xcode_xcuitest_bundle_id = "com.apple.product-type.bundle.ui-testing"
+
+# Invokes lipo on multiple arch-specific binaries to create a fat binary.
+#
+# Arguments
+#
+#   arch_binary_target
+#     name of the target generating the arch-specific binaries, they must
+#     be named $target_out_dir/$toolchain_cpu/$arch_binary_output.
+#
+#   arch_binary_output
+#     (optional, defaults to the name of $arch_binary_target) base name of
+#     the arch-specific binary generated by arch_binary_target.
+#
+#   output_name
+#     (optional, defaults to $target_name) base name of the target output,
+#     the full path will be $target_out_dir/$output_name.
+#
+#   configs
+#     (optional) a list of configurations, this is used to check whether
+#     the binary should be stripped, when "enable_stripping" is true.
+#
+template("lipo_binary") {
+  assert(defined(invoker.arch_binary_target),
+         "arch_binary_target must be defined for $target_name")
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _all_target_cpu = [ current_cpu ] + additional_target_cpus
+  _all_toolchains = [ current_toolchain ] + additional_toolchains
+
+  _arch_binary_target = invoker.arch_binary_target
+  _arch_binary_output = get_label_info(_arch_binary_target, "name")
+  if (defined(invoker.arch_binary_output)) {
+    _arch_binary_output = invoker.arch_binary_output
+  }
+
+  action(_target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "arch_binary_output",
+                             "arch_binary_target",
+                             "configs",
+                             "output_name",
+                           ])
+
+    script = "//build/toolchain/apple/linker_driver.py"
+
+    # http://crbug.com/762840. Fix for bots running out of memory.
+    pool = "//build/toolchain:link_pool($default_toolchain)"
+
+    outputs = [ "$target_out_dir/$_output_name" ]
+
+    deps = []
+    _index = 0
+    inputs = []
+    foreach(_cpu, _all_target_cpu) {
+      _toolchain = _all_toolchains[_index]
+      _index = _index + 1
+
+      inputs +=
+          [ get_label_info("$_arch_binary_target($_toolchain)",
+                           "target_out_dir") + "/$_cpu/$_arch_binary_output" ]
+
+      deps += [ "$_arch_binary_target($_toolchain)" ]
+    }
+
+    args = [
+             "xcrun",
+             "lipo",
+             "-create",
+             "-output",
+             rebase_path("$target_out_dir/$_output_name", root_build_dir),
+           ] + rebase_path(inputs, root_build_dir)
+
+    if (enable_dsyms) {
+      _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM"
+      outputs += [
+        "$_dsyms_output_dir/",
+        "$_dsyms_output_dir/Contents/Info.plist",
+        "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name",
+      ]
+      args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ]
+      if (!use_xcode_clang) {
+        args += [ "-Wcrl,dsymutilpath," +
+                  rebase_path("//tools/clang/dsymutil/bin/dsymutil",
+                              root_build_dir) ]
+      }
+    }
+
+    if (enable_stripping) {
+      args += [ "-Wcrl,strip,-x,-S" ]
+      if (save_unstripped_output) {
+        outputs += [ "$root_out_dir/$_output_name.unstripped" ]
+        args += [ "-Wcrl,unstripped," +
+                  rebase_path("$root_out_dir/.", root_build_dir) ]
+      }
+    }
+  }
+}
+
+# Wrapper around create_bundle taking care of code signature settings.
+#
+# Arguments
+#
+#   product_type
+#       string, product type for the generated Xcode project.
+#
+#   bundle_gen_dir
+#       (optional) directory where the bundle is generated; must be below
+#       root_out_dir and defaults to root_out_dir if omitted.
+#
+#   bundle_deps
+#       (optional) list of additional dependencies.
+#
+#   bundle_deps_filter
+#       (optional) list of dependencies to filter (for more information
+#       see "gn help bundle_deps_filter").
+#
+#   bundle_extension
+#       string, extension of the bundle, used to generate bundle name.
+#
+#   bundle_binary_target
+#       (optional) string, label of the target generating the bundle main
+#       binary. This target and bundle_binary_path are mutually exclusive.
+#
+#   bundle_binary_output
+#       (optional) string, base name of the binary generated by the
+#       bundle_binary_target target, defaults to the target name.
+#
+#   bundle_binary_path
+#       (optional) string, path to the bundle main binary. This target and
+#       bundle_binary_target are mutually exclusive.
+#
+#   output_name:
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+#   extra_system_frameworks
+#       (optional) list of system framework to copy to the bundle.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+#   entitlements_path:
+#       (optional) path to the template to use to generate the application
+#       entitlements by performing variable substitutions, defaults to
+#       //build/config/ios/entitlements.plist.
+#
+#   entitlements_target:
+#       (optional) label of the target generating the application
+#       entitlements (must generate a single file as output); cannot be
+#       defined if entitlements_path is set.
+#
+#   has_public_headers:
+#       (optional) boolean, defaults to false; only meaningful if the bundle
+#       is a framework bundle; if true, then the frameworks includes public
+#       headers
+#
+#   disable_entitlements
+#       (optional, defaults to false) boolean, control whether entitlements willi
+#       be embedded in the application during signature. If false and no
+#       entitlements are provided, default empty entitlements will be used.
+#
+#   disable_embedded_mobileprovision
+#       (optional, default to false) boolean, control whether mobile provisions
+#       will be embedded in the bundle. If true, the existing
+#       embedded.mobileprovision will be deleted.
+#
+#   xcode_extra_attributes
+#       (optional) scope, extra attributes for Xcode projects.
+#
+#   xcode_test_application_name:
+#       (optional) string, name of the test application for Xcode unit or ui
+#       test target.
+#
+#   xcode_product_bundle_id:
+#       (optional) string, the bundle ID that will be added in the XCode
+#       attributes to enable some features when debugging (e.g. MetricKit).
+#
+#   primary_info_plist:
+#       (optional) path to Info.plist to merge with the $partial_info_plist
+#       generated by the compilation of the asset catalog.
+#
+#   partial_info_plist:
+#       (optional) path to the partial Info.plist generated by the asset
+#       catalog compiler; if defined $primary_info_plist must also be defined.
+#
+template("create_signed_bundle") {
+  assert(defined(invoker.product_type),
+         "product_type must be defined for $target_name")
+  assert(defined(invoker.bundle_extension),
+         "bundle_extension must be defined for $target_name")
+  assert(defined(invoker.bundle_binary_target) !=
+             defined(invoker.bundle_binary_path),
+         "Only one of bundle_binary_target or bundle_binary_path may be " +
+             "specified for $target_name")
+  assert(!defined(invoker.partial_info_plist) ||
+             defined(invoker.primary_info_plist),
+         "primary_info_plist must be defined when partial_info_plist is " +
+             "defined for $target_name")
+
+  if (defined(invoker.xcode_test_application_name)) {
+    assert(
+        invoker.product_type == _ios_xcode_xctest_bundle_id ||
+            invoker.product_type == _ios_xcode_xcuitest_bundle_id,
+        "xcode_test_application_name can be only defined for Xcode unit or ui test target.")
+  }
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  if (defined(invoker.bundle_binary_path)) {
+    _bundle_binary_path = invoker.bundle_binary_path
+  } else {
+    _bundle_binary_target = invoker.bundle_binary_target
+    _bundle_binary_output = get_label_info(_bundle_binary_target, "name")
+    if (defined(invoker.bundle_binary_output)) {
+      _bundle_binary_output = invoker.bundle_binary_output
+    }
+    _bundle_binary_path =
+        get_label_info(_bundle_binary_target, "target_out_dir") +
+        "/$_bundle_binary_output"
+  }
+
+  _bundle_gen_dir = root_out_dir
+  if (defined(invoker.bundle_gen_dir)) {
+    _bundle_gen_dir = invoker.bundle_gen_dir
+  }
+
+  _bundle_extension = invoker.bundle_extension
+
+  _enable_embedded_mobileprovision = true
+  if (defined(invoker.disable_embedded_mobileprovision)) {
+    _enable_embedded_mobileprovision = !invoker.disable_embedded_mobileprovision
+  }
+
+  if (target_environment == "catalyst") {
+    _enable_embedded_mobileprovision = false
+  }
+
+  _enable_entitlements = true
+  if (defined(invoker.disable_entitlements)) {
+    _enable_entitlements = !invoker.disable_entitlements
+  }
+
+  if (_enable_entitlements) {
+    if (!defined(invoker.entitlements_target)) {
+      _entitlements_path = "//build/config/ios/entitlements.plist"
+      if (defined(invoker.entitlements_path)) {
+        _entitlements_path = invoker.entitlements_path
+      }
+    } else {
+      assert(!defined(invoker.entitlements_path),
+             "Cannot define both entitlements_path and entitlements_target " +
+                 "for $target_name")
+
+      _entitlements_target_outputs =
+          get_target_outputs(invoker.entitlements_target)
+      _entitlements_path = _entitlements_target_outputs[0]
+    }
+  }
+
+  _enable_code_signing = ios_enable_code_signing
+  if (defined(invoker.enable_code_signing)) {
+    _enable_code_signing = invoker.enable_code_signing
+  }
+
+  if (!ios_set_attributes_for_xcode_project_generation) {
+    not_needed(invoker,
+               [
+                 "xcode_product_bundle_id",
+                 "xcode_extra_attributes",
+               ])
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "deps",
+                             "partial_info_plist",
+                             "product_type",
+                             "public_configs",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                             "xcode_test_application_name",
+                           ])
+
+    bundle_root_dir = "$_bundle_gen_dir/$_output_name$_bundle_extension"
+    if (target_environment == "simulator" || target_environment == "device") {
+      bundle_contents_dir = bundle_root_dir
+      bundle_resources_dir = bundle_contents_dir
+      bundle_executable_dir = bundle_contents_dir
+    } else if (target_environment == "catalyst") {
+      if (_bundle_extension != ".framework") {
+        bundle_contents_dir = "$bundle_root_dir/Contents"
+        bundle_resources_dir = "$bundle_contents_dir/Resources"
+        bundle_executable_dir = "$bundle_contents_dir/MacOS"
+      } else {
+        bundle_contents_dir = "$bundle_root_dir/Versions/A"
+        bundle_resources_dir = "$bundle_contents_dir/Resources"
+        bundle_executable_dir = bundle_contents_dir
+      }
+    }
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+
+    if (ios_set_attributes_for_xcode_project_generation) {
+      _xcode_product_bundle_id = ""
+      if (defined(invoker.xcode_product_bundle_id)) {
+        _xcode_product_bundle_id = invoker.xcode_product_bundle_id
+      }
+
+      if (_xcode_product_bundle_id != "") {
+        _ios_provisioning_profile_info =
+            exec_script("//build/config/ios/codesign.py",
+                        [
+                          "find-provisioning-profile",
+                          "-b=" + _xcode_product_bundle_id,
+                        ],
+                        "json")
+      }
+
+      xcode_extra_attributes = {
+        IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
+        if (_xcode_product_bundle_id != "") {
+          CODE_SIGN_IDENTITY = "iPhone Developer"
+          DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier
+          PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id
+          PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name
+        }
+
+        # If invoker has defined extra attributes, they override the defaults.
+        if (defined(invoker.xcode_extra_attributes)) {
+          forward_variables_from(invoker.xcode_extra_attributes, "*")
+        }
+      }
+    }
+
+    if (defined(invoker.bundle_binary_target)) {
+      public_deps += [ invoker.bundle_binary_target ]
+    }
+
+    if (defined(invoker.bundle_deps)) {
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += invoker.bundle_deps
+    }
+    if (!defined(deps)) {
+      deps = []
+    }
+
+    code_signing_script = "//build/config/ios/codesign.py"
+    code_signing_sources = [ _bundle_binary_path ]
+    if (_enable_entitlements) {
+      if (defined(invoker.entitlements_target)) {
+        deps += [ invoker.entitlements_target ]
+      }
+      code_signing_sources += [ _entitlements_path ]
+    }
+    code_signing_outputs = [ "$bundle_executable_dir/$_output_name" ]
+    if (_enable_code_signing) {
+      code_signing_outputs +=
+          [ "$bundle_contents_dir/_CodeSignature/CodeResources" ]
+    }
+    if (ios_code_signing_identity != "" && target_environment == "device" &&
+        _enable_embedded_mobileprovision) {
+      code_signing_outputs +=
+          [ "$bundle_contents_dir/embedded.mobileprovision" ]
+    }
+    if (_bundle_extension == ".framework") {
+      if (target_environment == "catalyst") {
+        code_signing_outputs += [
+          "$bundle_root_dir/Versions/Current",
+          "$bundle_root_dir/$_output_name",
+        ]
+
+        if (defined(invoker.has_public_headers) && invoker.has_public_headers) {
+          code_signing_outputs += [
+            "$bundle_root_dir/Headers",
+            "$bundle_root_dir/Modules",
+          ]
+        }
+      } else {
+        not_needed(invoker, [ "has_public_headers" ])
+      }
+    }
+
+    if (defined(invoker.extra_system_frameworks)) {
+      foreach(_framework, invoker.extra_system_frameworks) {
+        code_signing_outputs += [ "$bundle_contents_dir/Frameworks/" +
+                                  get_path_info(_framework, "file") ]
+      }
+    }
+
+    code_signing_args = [
+      "code-sign-bundle",
+      "-t=" + ios_sdk_name,
+      "-i=" + ios_code_signing_identity,
+      "-b=" + rebase_path(_bundle_binary_path, root_build_dir),
+    ]
+    if (_enable_entitlements) {
+      code_signing_args +=
+          [ "-e=" + rebase_path(_entitlements_path, root_build_dir) ]
+    }
+    if (!_enable_embedded_mobileprovision) {
+      code_signing_args += [ "--disable-embedded-mobileprovision" ]
+    }
+    code_signing_args += [ rebase_path(bundle_root_dir, root_build_dir) ]
+    if (!_enable_code_signing) {
+      code_signing_args += [ "--disable-code-signature" ]
+    }
+    if (defined(invoker.extra_system_frameworks)) {
+      # All framework in extra_system_frameworks are expected to be system
+      # framework and the path to be already system absolute so do not use
+      # rebase_path here unless using Goma RBE and system Xcode (as in that
+      # case the system framework are found via a symlink in root_build_dir).
+      foreach(_framework, invoker.extra_system_frameworks) {
+        if (use_system_xcode && use_goma) {
+          _framework_path = rebase_path(_framework, root_build_dir)
+        } else {
+          _framework_path = _framework
+        }
+        code_signing_args += [ "-F=$_framework_path" ]
+      }
+    }
+    if (defined(invoker.partial_info_plist)) {
+      _partial_info_plists = [
+        invoker.primary_info_plist,
+        invoker.partial_info_plist,
+      ]
+
+      _plist_compiler_path = "//build/apple/plist_util.py"
+
+      code_signing_sources += _partial_info_plists
+      code_signing_sources += [ _plist_compiler_path ]
+      if (target_environment != "catalyst" ||
+          _bundle_extension != ".framework") {
+        code_signing_outputs += [ "$bundle_contents_dir/Info.plist" ]
+      } else {
+        code_signing_outputs += [ "$bundle_resources_dir/Info.plist" ]
+      }
+
+      code_signing_args +=
+          [ "-P=" + rebase_path(_plist_compiler_path, root_build_dir) ]
+      foreach(_partial_info_plist, _partial_info_plists) {
+        code_signing_args +=
+            [ "-p=" + rebase_path(_partial_info_plist, root_build_dir) ]
+      }
+    }
+  }
+}
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("ios_info_plist") {
+  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+         "Only one of info_plist or info_plist_target may be specified in " +
+             target_name)
+
+  if (defined(invoker.info_plist)) {
+    _info_plist = invoker.info_plist
+  } else {
+    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+    _info_plist = _info_plist_target_output[0]
+  }
+
+  apple_info_plist(target_name) {
+    format = "binary1"
+    extra_substitutions = []
+    if (defined(invoker.extra_substitutions)) {
+      extra_substitutions = invoker.extra_substitutions
+    }
+    extra_substitutions += [
+      "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix",
+      "IOS_PLATFORM_BUILD=$ios_platform_build",
+      "IOS_PLATFORM_NAME=$ios_sdk_name",
+      "IOS_PLATFORM_VERSION=$ios_sdk_version",
+      "IOS_SDK_BUILD=$ios_sdk_build",
+      "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version",
+      "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform",
+      "BUILD_MACHINE_OS_BUILD=$machine_os_build",
+      "IOS_DEPLOYMENT_TARGET=$ios_deployment_target",
+      "XCODE_BUILD=$xcode_build",
+      "XCODE_VERSION=$xcode_version",
+    ]
+    plist_templates = [
+      "//build/config/ios/BuildInfo.plist",
+      _info_plist,
+    ]
+    if (defined(invoker.info_plist_target)) {
+      deps = [ invoker.info_plist_target ]
+    }
+    forward_variables_from(invoker,
+                           [
+                             "executable_name",
+                             "output_name",
+                             "visibility",
+                             "testonly",
+                           ])
+  }
+}
+
+# Template to build an application bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+#   output_name:
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+#   extra_substitutions:
+#       (optional) list of string in "key=value" format, each value will
+#       be used as an additional variable substitution rule when generating
+#       the application Info.plist
+#
+#   info_plist:
+#       (optional) string, path to the Info.plist file that will be used for
+#       the bundle.
+#
+#   info_plist_target:
+#       (optional) string, if the info_plist is generated from an action,
+#       rather than a regular source file, specify the target name in lieu
+#       of info_plist. The two arguments are mutually exclusive.
+#
+#   entitlements_path:
+#       (optional) path to the template to use to generate the application
+#       entitlements by performing variable substitutions, defaults to
+#       //build/config/ios/entitlements.plist.
+#
+#   entitlements_target:
+#       (optional) label of the target generating the application
+#       entitlements (must generate a single file as output); cannot be
+#       defined if entitlements_path is set.
+#
+#   product_type
+#       (optional) string, product type for the generated Xcode project,
+#       default to "com.apple.product-type.application". Should only be
+#       overriden when building application extension.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+#   variants
+#       (optional) list of scopes, each scope needs to define the attributes
+#       "name" and "bundle_deps"; if defined and non-empty, then one bundle
+#       named $target_out_dir/$variant/$output_name will be created for each
+#       variant with the same binary but the correct bundle_deps, the bundle
+#       at $target_out_dir/$output_name will be a copy of the first variant.
+#
+#   xcode_product_bundle_id:
+#       (optional) string, the bundle ID that will be added in the XCode
+#       attributes to enable some features when debugging (e.g. MetricKit).
+#       defaults to "$ios_app_bundle_id_prefix.$output_name".
+#
+# For more information, see "gn help executable".
+template("ios_app_bundle") {
+  _output_name = target_name
+  _target_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _primary_toolchain = current_toolchain
+  if (is_fat_secondary_toolchain) {
+    _primary_toolchain = primary_fat_toolchain_name
+  }
+
+  assert(
+      !defined(invoker.bundle_extension),
+      "bundle_extension must not be set for ios_app_bundle template for $target_name")
+
+  _xcode_product_bundle_id = "$ios_app_bundle_id_prefix.$_output_name"
+  if (defined(invoker.xcode_product_bundle_id)) {
+    _xcode_product_bundle_id = invoker.xcode_product_bundle_id
+    _xcode_product_bundle_id =
+        "$ios_app_bundle_id_prefix.$_xcode_product_bundle_id"
+  } else if (defined(invoker.bundle_id)) {
+    _xcode_product_bundle_id = invoker.bundle_id
+  }
+
+  # Bundle ID should respect rfc1034 and replace _ with -.
+  _xcode_product_bundle_id =
+      string_replace("$_xcode_product_bundle_id", "_", "-")
+
+  _arch_executable_source = _target_name + "_arch_executable_sources"
+  _arch_executable_target = _target_name + "_arch_executable"
+  _lipo_executable_target = _target_name + "_executable"
+
+  if (defined(invoker.variants) && invoker.variants != []) {
+    _variants = []
+
+    foreach(_variant, invoker.variants) {
+      assert(defined(_variant.name) && _variant.name != "",
+             "name must be defined for all $target_name variants")
+
+      assert(defined(_variant.bundle_deps),
+             "bundle_deps must be defined for all $target_name variants")
+
+      _variants += [
+        {
+          name = _variant.name
+          bundle_deps = _variant.bundle_deps
+          target_name = "${_target_name}_variants_${_variant.name}"
+          bundle_gen_dir = "$root_out_dir/variants/${_variant.name}"
+        },
+      ]
+    }
+  } else {
+    # If no variants are passed to the template, use a fake variant with
+    # no name to avoid duplicating code. As no variant can have an empty
+    # name except this fake variant, it is possible to know if a variant
+    # is fake or not.
+    _variants = [
+      {
+        name = ""
+        bundle_deps = []
+        target_name = _target_name
+        bundle_gen_dir = root_out_dir
+      },
+    ]
+  }
+
+  _default_variant = _variants[0]
+
+  source_set(_arch_executable_source) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "bundle_extension",
+                             "enable_code_signing",
+                             "entitlements_path",
+                             "entitlements_target",
+                             "extra_substitutions",
+                             "extra_system_frameworks",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "product_type",
+                             "visibility",
+                             "xcode_extra_attributes",
+                           ])
+
+    visibility = [ ":$_arch_executable_target" ]
+  }
+
+  if (!is_fat_secondary_toolchain || target_environment == "simulator") {
+    _generate_entitlements_target = _target_name + "_gen_entitlements"
+    _generate_entitlements_output =
+        get_label_info(":$_generate_entitlements_target($_primary_toolchain)",
+                       "target_out_dir") + "/$_output_name.xcent"
+  }
+
+  _product_type = _ios_xcode_app_bundle_id
+  if (defined(invoker.product_type)) {
+    _product_type = invoker.product_type
+  }
+
+  if (_product_type == _ios_xcode_app_bundle_id) {
+    _bundle_extension = ".app"
+  } else if (_product_type == _ios_xcode_appex_bundle_id) {
+    _bundle_extension = ".appex"
+  } else {
+    assert(false, "unknown product_type \"$product_type\" for $_target_name")
+  }
+
+  _is_app_bundle = _product_type == _ios_xcode_app_bundle_id
+
+  executable(_arch_executable_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "bundle_extension",
+                             "enable_code_signing",
+                             "entitlements_path",
+                             "entitlements_target",
+                             "extra_substitutions",
+                             "extra_system_frameworks",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "product_type",
+                             "sources",
+                             "visibility",
+                             "xcode_extra_attributes",
+                           ])
+
+    visibility = [ ":$_lipo_executable_target($_primary_toolchain)" ]
+    if (is_fat_secondary_toolchain) {
+      visibility += [ ":$_target_name" ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_arch_executable_source" ]
+
+    if (!defined(frameworks)) {
+      frameworks = []
+    }
+    frameworks += [ "UIKit.framework" ]
+
+    if (target_environment == "simulator") {
+      deps += [ ":$_generate_entitlements_target($_primary_toolchain)" ]
+
+      if (!defined(inputs)) {
+        inputs = []
+      }
+      inputs += [ _generate_entitlements_output ]
+
+      if (!defined(ldflags)) {
+        ldflags = []
+      }
+      ldflags += [ "-Wl,-sectcreate,__TEXT,__entitlements," +
+                   rebase_path(_generate_entitlements_output, root_build_dir) ]
+    }
+
+    output_name = _output_name
+    output_prefix_override = true
+    output_dir = "$target_out_dir/$current_cpu"
+  }
+
+  if (is_fat_secondary_toolchain) {
+    # For fat builds, only the default toolchain will generate an application
+    # bundle. For the other toolchains, the template is only used for building
+    # the arch-specific binary, thus the default target is just a group().
+
+    group(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [ ":$_arch_executable_target" ]
+    }
+  } else {
+    lipo_binary(_lipo_executable_target) {
+      forward_variables_from(invoker,
+                             [
+                               "configs",
+                               "testonly",
+                             ])
+
+      visibility = []
+      foreach(_variant, _variants) {
+        visibility += [ ":${_variant.target_name}" ]
+      }
+
+      output_name = _output_name
+      arch_binary_target = ":$_arch_executable_target"
+      arch_binary_output = _output_name
+    }
+
+    _generate_info_plist = target_name + "_generate_info_plist"
+    ios_info_plist(_generate_info_plist) {
+      forward_variables_from(invoker,
+                             [
+                               "extra_substitutions",
+                               "info_plist",
+                               "info_plist_target",
+                             ])
+
+      executable_name = _output_name
+    }
+
+    if (!is_fat_secondary_toolchain) {
+      if (!defined(invoker.entitlements_target)) {
+        _entitlements_path = "//build/config/ios/entitlements.plist"
+        if (defined(invoker.entitlements_path)) {
+          _entitlements_path = invoker.entitlements_path
+        }
+      } else {
+        assert(!defined(invoker.entitlements_path),
+               "Cannot define both entitlements_path and entitlements_target" +
+                   "for $_target_name")
+
+        _entitlements_target_outputs =
+            get_target_outputs(invoker.entitlements_target)
+        _entitlements_path = _entitlements_target_outputs[0]
+      }
+
+      action(_generate_entitlements_target) {
+        _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
+        _info_plist_path = _gen_info_plist_outputs[0]
+
+        script = "//build/config/ios/codesign.py"
+        deps = [ ":$_generate_info_plist" ]
+        if (defined(invoker.entitlements_target)) {
+          deps += [ invoker.entitlements_target ]
+        }
+        sources = [
+          _entitlements_path,
+          _info_plist_path,
+        ]
+        outputs = [ _generate_entitlements_output ]
+
+        args = [
+                 "generate-entitlements",
+                 "-e=" + rebase_path(_entitlements_path, root_build_dir),
+                 "-p=" + rebase_path(_info_plist_path, root_build_dir),
+               ] + rebase_path(outputs, root_build_dir)
+      }
+    }
+
+    # Only write PkgInfo for real application, not application extension.
+    if (_is_app_bundle) {
+      _create_pkg_info = target_name + "_pkg_info"
+      action(_create_pkg_info) {
+        forward_variables_from(invoker, [ "testonly" ])
+        script = "//build/apple/write_pkg_info.py"
+        inputs = [ "//build/apple/plist_util.py" ]
+        sources = get_target_outputs(":$_generate_info_plist")
+        outputs = [
+          # Cannot name the output PkgInfo as the name will not be unique if
+          # multiple ios_app_bundle are defined in the same BUILD.gn file. The
+          # file is renamed in the bundle_data outputs to the correct name.
+          "$target_gen_dir/$target_name",
+        ]
+        args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+               [ "--output" ] + rebase_path(outputs, root_build_dir)
+        deps = [ ":$_generate_info_plist" ]
+      }
+
+      _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
+      bundle_data(_bundle_data_pkg_info) {
+        forward_variables_from(invoker, [ "testonly" ])
+        sources = get_target_outputs(":$_create_pkg_info")
+        outputs = [ "{{bundle_resources_dir}}/PkgInfo" ]
+        public_deps = [ ":$_create_pkg_info" ]
+      }
+    }
+
+    foreach(_variant, _variants) {
+      create_signed_bundle(_variant.target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "bundle_deps",
+                                 "bundle_deps_filter",
+                                 "data_deps",
+                                 "deps",
+                                 "enable_code_signing",
+                                 "entitlements_path",
+                                 "entitlements_target",
+                                 "extra_system_frameworks",
+                                 "public_configs",
+                                 "public_deps",
+                                 "testonly",
+                                 "visibility",
+                                 "xcode_extra_attributes",
+                               ])
+
+        output_name = _output_name
+        bundle_gen_dir = _variant.bundle_gen_dir
+        bundle_binary_target = ":$_lipo_executable_target"
+        bundle_binary_output = _output_name
+        bundle_extension = _bundle_extension
+        product_type = _product_type
+        xcode_product_bundle_id = _xcode_product_bundle_id
+
+        _generate_info_plist_outputs =
+            get_target_outputs(":$_generate_info_plist")
+        primary_info_plist = _generate_info_plist_outputs[0]
+        partial_info_plist =
+            "$target_gen_dir/${_variant.target_name}_partial_info.plist"
+
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += [ ":$_generate_info_plist" ]
+
+        if (!defined(bundle_deps)) {
+          bundle_deps = []
+        }
+        if (_is_app_bundle) {
+          bundle_deps += [ ":$_bundle_data_pkg_info" ]
+        }
+        bundle_deps += _variant.bundle_deps
+
+        if (target_environment == "simulator") {
+          if (!defined(data_deps)) {
+            data_deps = []
+          }
+          data_deps += [ "//testing/iossim" ]
+        }
+      }
+    }
+
+    if (_default_variant.name != "") {
+      _bundle_short_name = "$_output_name$_bundle_extension"
+      action(_target_name) {
+        forward_variables_from(invoker, [ "testonly" ])
+
+        script = "//build/config/ios/hardlink.py"
+        public_deps = []
+        foreach(_variant, _variants) {
+          public_deps += [ ":${_variant.target_name}" ]
+        }
+
+        sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ]
+        outputs = [ "$root_out_dir/$_bundle_short_name" ]
+
+        args = rebase_path(sources, root_build_dir) +
+               rebase_path(outputs, root_build_dir)
+      }
+    }
+  }
+
+  if (is_fat_secondary_toolchain) {
+    not_needed("*")
+  }
+}
+
+set_defaults("ios_app_bundle") {
+  configs = default_executable_configs
+}
+
+# Template to build an application extension bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+#   output_name:
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+#   extra_substitutions:
+#       (optional) list of string in "key=value" format, each value will
+#       be used as an additional variable substitution rule when generating
+#       the application Info.plist
+#
+#   info_plist:
+#       (optional) string, path to the Info.plist file that will be used for
+#       the bundle.
+#
+#   info_plist_target:
+#       (optional) string, if the info_plist is generated from an action,
+#       rather than a regular source file, specify the target name in lieu
+#       of info_plist. The two arguments are mutually exclusive.
+#
+# For more information, see "gn help executable".
+template("ios_appex_bundle") {
+  ios_app_bundle(target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_extension",
+                             "product_type",
+                           ])
+    product_type = _ios_xcode_appex_bundle_id
+  }
+}
+
+set_defaults("ios_appex_bundle") {
+  configs = [ "//build/config/ios:ios_extension_executable_flags" ]
+}
+
+# Template to compile .xib and .storyboard files.
+#
+# Arguments
+#
+#     sources:
+#         list of string, sources to compile
+#
+#     ibtool_flags:
+#         (optional) list of string, additional flags to pass to the ibtool
+template("compile_ib_files") {
+  action_foreach(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    assert(defined(invoker.sources),
+           "sources must be specified for $target_name")
+    assert(defined(invoker.output_extension),
+           "output_extension must be specified for $target_name")
+
+    ibtool_flags = []
+    if (defined(invoker.ibtool_flags)) {
+      ibtool_flags = invoker.ibtool_flags
+    }
+
+    _output_extension = invoker.output_extension
+
+    script = "//build/config/ios/compile_ib_files.py"
+    sources = invoker.sources
+    outputs = [
+      "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
+    ]
+    args = [
+      "--input",
+      "{{source}}",
+      "--output",
+      rebase_path(
+          "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
+          root_build_dir),
+    ]
+    args += ibtool_flags
+  }
+}
+
+# Compile a xib or storyboard file and add it to a bundle_data so that it is
+# available at runtime in the bundle.
+#
+# Arguments
+#
+#   source:
+#       string, path of the xib or storyboard to compile.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_ib_file") {
+  assert(defined(invoker.source), "source needs to be defined for $target_name")
+
+  _source_extension = get_path_info(invoker.source, "extension")
+  assert(_source_extension == "xib" || _source_extension == "storyboard",
+         "source must be a .xib or .storyboard for $target_name")
+
+  _target_name = target_name
+  if (_source_extension == "xib") {
+    _compile_ib_file = target_name + "_compile_xib"
+    _output_extension = "nib"
+  } else {
+    _compile_ib_file = target_name + "_compile_storyboard"
+    _output_extension = "storyboardc"
+  }
+
+  compile_ib_files(_compile_ib_file) {
+    sources = [ invoker.source ]
+    output_extension = _output_extension
+    visibility = [ ":$_target_name" ]
+    ibtool_flags = [
+      "--minimum-deployment-target",
+      ios_deployment_target,
+      "--auto-activate-custom-fonts",
+      "--target-device",
+      "iphone",
+      "--target-device",
+      "ipad",
+    ]
+  }
+
+  bundle_data(_target_name) {
+    forward_variables_from(invoker, "*", [ "source" ])
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [ ":$_compile_ib_file" ]
+
+    sources = get_target_outputs(":$_compile_ib_file")
+
+    outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+  }
+}
+
+# Compile a strings file and add it to a bundle_data so that it is available
+# at runtime in the bundle.
+#
+# Arguments
+#
+#   source:
+#       string, path of the strings file to compile.
+#
+#   output:
+#       string, path of the compiled file in the final bundle.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_strings") {
+  assert(defined(invoker.source), "source needs to be defined for $target_name")
+  assert(defined(invoker.output), "output needs to be defined for $target_name")
+
+  _source_extension = get_path_info(invoker.source, "extension")
+  assert(_source_extension == "strings",
+         "source must be a .strings for $target_name")
+
+  _target_name = target_name
+  _convert_target = target_name + "_compile_strings"
+
+  convert_plist(_convert_target) {
+    visibility = [ ":$_target_name" ]
+    source = invoker.source
+    output =
+        "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file")
+    format = "binary1"
+  }
+
+  bundle_data(_target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "source",
+                             "output",
+                           ])
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [ ":$_convert_target" ]
+
+    sources = get_target_outputs(":$_convert_target")
+
+    outputs = [ invoker.output ]
+  }
+}
+
+# Template to package a shared library into an iOS framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+#     output_name:
+#         (optional) string, name of the generated framework without the
+#         .framework suffix. If omitted, defaults to target_name.
+#
+#     public_headers:
+#         (optional) list of paths to header file that needs to be copied
+#         into the framework bundle Headers subdirectory. If omitted or
+#         empty then the Headers subdirectory is not created.
+#
+#     sources
+#         (optional) list of files. Needs to be defined and non-empty if
+#         public_headers is defined and non-empty.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+#   - $target_name only adds a build-time dependency. Targets that depend on
+#     it will not link against the framework.
+#   - $target_name+link adds a build-time and link-time dependency. Targets
+#     that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+#     framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+#     framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#       ldflags = [
+#         "-install_name",
+#         "@executable_path/../Frameworks/$target_name.framework"
+#       ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation+link" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("ios_framework_bundle") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _has_public_headers =
+      defined(invoker.public_headers) && invoker.public_headers != []
+
+  _primary_toolchain = current_toolchain
+  if (is_fat_secondary_toolchain) {
+    _primary_toolchain = primary_fat_toolchain_name
+  }
+
+  # Public configs are not propagated across toolchain (see crbug.com/675224)
+  # so some configs have to be defined for both default_toolchain and all others
+  # toolchains when performing a fat build. Use "get_label_info" to construct
+  # the path since they need to be relative to the default_toolchain.
+
+  _default_toolchain_root_out_dir =
+      get_label_info("$_target_name($_primary_toolchain)", "root_out_dir")
+
+  _arch_shared_library_source = _target_name + "_arch_shared_library_sources"
+  _arch_shared_library_target = _target_name + "_arch_shared_library"
+  _lipo_shared_library_target = _target_name + "_shared_library"
+  _link_target_name = _target_name + "+link"
+
+  if (_has_public_headers) {
+    _default_toolchain_target_gen_dir =
+        get_label_info("$_target_name($_primary_toolchain)", "target_gen_dir")
+
+    _framework_headers_target = _target_name + "_framework_headers"
+
+    _headers_map_config = _target_name + "_headers_map"
+    _header_map_filename =
+        "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap"
+    config(_headers_map_config) {
+      visibility = [
+        ":${_arch_shared_library_source}",
+        ":${_target_name}_signed_bundle",
+      ]
+      include_dirs = [ _header_map_filename ]
+    }
+  }
+
+  _framework_headers_config = _target_name + "_framework_headers_config"
+  config(_framework_headers_config) {
+    framework_dirs = [ _default_toolchain_root_out_dir ]
+  }
+
+  _framework_public_config = _target_name + "_public_config"
+  config(_framework_public_config) {
+    configs = [ ":$_framework_headers_config" ]
+    frameworks = [ "$_output_name.framework" ]
+  }
+
+  source_set(_arch_shared_library_source) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "enable_code_signing",
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "public_configs",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_arch_shared_library_target" ]
+
+    if (_has_public_headers) {
+      configs += [ ":$_headers_map_config" ]
+
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$_framework_headers_target($_primary_toolchain)" ]
+    }
+  }
+
+  shared_library(_arch_shared_library_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "enable_code_signing",
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "sources",
+                             "public_configs",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_lipo_shared_library_target($_primary_toolchain)" ]
+    if (is_fat_secondary_toolchain) {
+      visibility += [
+        ":${_target_name}",
+        ":${_target_name}_signed_bundle",
+      ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_arch_shared_library_source" ]
+    if (_has_public_headers) {
+      deps += [ ":$_framework_headers_target($_primary_toolchain)" ]
+    }
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags +=
+        [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ]
+
+    output_extension = ""
+    output_name = _output_name
+    output_prefix_override = true
+    output_dir = "$target_out_dir/$current_cpu"
+  }
+
+  if (is_fat_secondary_toolchain) {
+    # For fat builds, only the default toolchain will generate a framework
+    # bundle. For the other toolchains, the template is only used for building
+    # the arch-specific binary, thus the default target is just a group().
+
+    group(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [ ":$_arch_shared_library_target" ]
+    }
+
+    group(_link_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "public_configs",
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [ ":$_link_target_name($_primary_toolchain)" ]
+
+      if (_has_public_headers) {
+        if (!defined(public_configs)) {
+          public_configs = []
+        }
+        public_configs += [ ":$_framework_headers_config" ]
+      }
+      if (!defined(all_dependent_configs)) {
+        all_dependent_configs = []
+      }
+      all_dependent_configs += [ ":$_framework_public_config" ]
+    }
+
+    group("$_target_name+bundle") {
+      forward_variables_from(invoker, [ "testonly" ])
+      public_deps = [ ":$_target_name+bundle($_primary_toolchain)" ]
+    }
+
+    not_needed(invoker, "*")
+  } else {
+    if (_has_public_headers) {
+      _public_headers = invoker.public_headers
+
+      _framework_root_dir = "$root_out_dir/$_output_name.framework"
+      if (target_environment == "simulator" || target_environment == "device") {
+        _framework_contents_dir = _framework_root_dir
+      } else if (target_environment == "catalyst") {
+        _framework_contents_dir = "$_framework_root_dir/Versions/A"
+      }
+
+      _compile_headers_map_target = _target_name + "_compile_headers_map"
+      action(_compile_headers_map_target) {
+        visibility = [ ":$_framework_headers_target" ]
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "public_deps",
+                                 "testonly",
+                               ])
+        script = "//build/config/ios/write_framework_hmap.py"
+        outputs = [ _header_map_filename ]
+
+        # The header map generation only wants the list of headers, not all of
+        # sources, so filter any non-header source files from "sources". It is
+        # less error prone that having the developer duplicate the list of all
+        # headers in addition to "sources".
+        sources = []
+        foreach(_source, invoker.sources) {
+          if (get_path_info(_source, "extension") == "h") {
+            sources += [ _source ]
+          }
+        }
+
+        args = [
+                 rebase_path(_header_map_filename),
+                 rebase_path(_framework_root_dir, root_build_dir),
+               ] + rebase_path(sources, root_build_dir)
+      }
+
+      _create_module_map_target = _target_name + "_module_map"
+      action(_create_module_map_target) {
+        visibility = [ ":$_framework_headers_target" ]
+        script = "//build/config/ios/write_framework_modulemap.py"
+        outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ]
+        args = [
+          _output_name,
+          rebase_path("$_framework_contents_dir/Modules", root_build_dir),
+        ]
+      }
+
+      _copy_public_headers_target = _target_name + "_copy_public_headers"
+      copy(_copy_public_headers_target) {
+        forward_variables_from(invoker,
+                               [
+                                 "testonly",
+                                 "deps",
+                               ])
+        visibility = [ ":$_framework_headers_target" ]
+        sources = _public_headers
+        outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ]
+
+        # Do not use forward_variables_from for "public_deps" as
+        # we do not want to forward those dependencies.
+        if (defined(invoker.public_deps)) {
+          if (!defined(deps)) {
+            deps = []
+          }
+          deps += invoker.public_deps
+        }
+      }
+
+      group(_framework_headers_target) {
+        forward_variables_from(invoker, [ "testonly" ])
+        deps = [
+          ":$_compile_headers_map_target",
+          ":$_create_module_map_target",
+        ]
+        public_deps = [ ":$_copy_public_headers_target" ]
+      }
+    }
+
+    lipo_binary(_lipo_shared_library_target) {
+      forward_variables_from(invoker,
+                             [
+                               "configs",
+                               "testonly",
+                             ])
+
+      visibility = [ ":${_target_name}_signed_bundle" ]
+      output_name = _output_name
+      arch_binary_target = ":$_arch_shared_library_target"
+      arch_binary_output = _output_name
+    }
+
+    _info_plist_target = _target_name + "_info_plist"
+    _info_plist_bundle = _target_name + "_info_plist_bundle"
+    ios_info_plist(_info_plist_target) {
+      visibility = [ ":$_info_plist_bundle" ]
+      executable_name = _output_name
+      forward_variables_from(invoker,
+                             [
+                               "extra_substitutions",
+                               "info_plist",
+                               "info_plist_target",
+                             ])
+    }
+
+    bundle_data(_info_plist_bundle) {
+      visibility = [ ":${_target_name}_signed_bundle" ]
+      forward_variables_from(invoker, [ "testonly" ])
+      sources = get_target_outputs(":$_info_plist_target")
+      public_deps = [ ":$_info_plist_target" ]
+
+      if (target_environment != "catalyst") {
+        outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+      } else {
+        outputs = [ "{{bundle_resources_dir}}/Info.plist" ]
+      }
+    }
+
+    create_signed_bundle(_target_name + "_signed_bundle") {
+      forward_variables_from(invoker,
+                             [
+                               "bundle_deps",
+                               "bundle_deps_filter",
+                               "data_deps",
+                               "deps",
+                               "enable_code_signing",
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+
+      product_type = "com.apple.product-type.framework"
+      bundle_extension = ".framework"
+
+      output_name = _output_name
+      bundle_binary_target = ":$_lipo_shared_library_target"
+      bundle_binary_output = _output_name
+
+      has_public_headers = _has_public_headers
+
+      # Framework do not have entitlements nor mobileprovision because they use
+      # the one from the bundle using them (.app or .appex) as they are just
+      # dynamic library with shared code.
+      disable_entitlements = true
+      disable_embedded_mobileprovision = true
+
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$_info_plist_bundle" ]
+    }
+
+    group(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += [ ":${_target_name}_signed_bundle" ]
+
+      if (_has_public_headers) {
+        if (!defined(public_configs)) {
+          public_configs = []
+        }
+        public_configs += [ ":$_framework_headers_config" ]
+      }
+    }
+
+    group(_link_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += [ ":$_target_name" ]
+
+      if (!defined(all_dependent_configs)) {
+        all_dependent_configs = []
+      }
+      all_dependent_configs += [ ":$_framework_public_config" ]
+    }
+
+    bundle_data(_target_name + "+bundle") {
+      forward_variables_from(invoker,
+                             [
+                               "testonly",
+                               "visibility",
+                             ])
+      public_deps = [ ":$_target_name" ]
+      sources = [ "$root_out_dir/$_output_name.framework" ]
+      outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ]
+    }
+  }
+}
+
+set_defaults("ios_framework_bundle") {
+  configs = default_shared_library_configs
+}
+
+# Template to build a xctest bundle that contains a loadable module for iOS.
+#
+# Arguments
+#
+#   deps:
+#       list of labels to depends on, these values are used to create the
+#       loadable module.
+#
+#   product_type
+#       string, product type for the generated Xcode project, use
+#       "com.apple.product-type.bundle.unit-test" for unit test and
+#       "com.apple.product-type.bundle.ui-testing" for UI testing.
+#
+#   host_target:
+#       string, name of the target that depends on the generated bundle, this
+#       value is used to restrict visibilities.
+#
+#   xcode_test_application_name:
+#       string, name of the test application for Xcode unit or ui test target.
+#
+#   output_name
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+# This template defines two targets, one named "${target_name}" is the xctest
+# bundle, and the other named "${target_name}_bundle" is a bundle_data that
+# wraps the xctest bundle and that only the "${host_target}" can depend on.
+#
+template("ios_xctest_bundle") {
+  assert(defined(invoker.deps), "deps must be defined for $target_name")
+  assert(defined(invoker.product_type),
+         "product_type must be defined for $target_name")
+  assert(invoker.product_type == _ios_xcode_xctest_bundle_id ||
+             invoker.product_type == _ios_xcode_xcuitest_bundle_id,
+         "product_type defined for $target_name is invalid.")
+  assert(defined(invoker.host_target),
+         "host_target must be defined for $target_name")
+  assert(defined(invoker.xcode_test_application_name),
+         "xcode_test_application_name must be defined for $target_name")
+
+  # Silence "assignment had no effect" error for non-default toolchains as
+  # following variables are only used in the expansion of the template for the
+  # default toolchain.
+  if (is_fat_secondary_toolchain) {
+    not_needed(invoker, "*")
+  }
+
+  _target_name = target_name
+  _output_name = target_name
+
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _arch_loadable_module_source = _target_name + "_arch_loadable_module_source"
+  _arch_loadable_module_target = _target_name + "_arch_loadable_module"
+  _lipo_loadable_module_target = _target_name + "_loadable_module"
+
+  _primary_toolchain = current_toolchain
+  if (is_fat_secondary_toolchain) {
+    _primary_toolchain = primary_fat_toolchain_name
+  }
+
+  source_set(_arch_loadable_module_source) {
+    forward_variables_from(invoker, [ "deps" ])
+
+    testonly = true
+    visibility = [ ":$_arch_loadable_module_target" ]
+  }
+
+  loadable_module(_arch_loadable_module_target) {
+    testonly = true
+    visibility = [ ":$_lipo_loadable_module_target($_primary_toolchain)" ]
+    if (is_fat_secondary_toolchain) {
+      visibility += [ ":$_target_name" ]
+    }
+
+    deps = [ ":$_arch_loadable_module_source" ]
+    configs += [ "//build/config/ios:xctest_config" ]
+
+    output_dir = "$target_out_dir/$current_cpu"
+    output_name = _output_name
+    output_prefix_override = true
+    output_extension = ""
+  }
+
+  if (is_fat_secondary_toolchain) {
+    # For fat builds, only the default toolchain will generate a test bundle.
+    # For the other toolchains, the template is only used for building the
+    # arch-specific binary, thus the default target is just a group().
+    group(_target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      testonly = true
+
+      public_deps = [ ":$_arch_loadable_module_target" ]
+    }
+
+    not_needed(invoker, "*")
+  } else {
+    _info_plist_target = _target_name + "_info_plist"
+    _info_plist_bundle = _target_name + "_info_plist_bundle"
+
+    ios_info_plist(_info_plist_target) {
+      testonly = true
+      visibility = [ ":$_info_plist_bundle" ]
+
+      info_plist = "//build/config/ios/Module-Info.plist"
+      executable_name = _output_name
+
+      if (defined(invoker.xctest_bundle_principal_class)) {
+        _principal_class = invoker.xctest_bundle_principal_class
+      } else {
+        # Fall back to a reasonable default value.
+        _principal_class = "NSObject"
+      }
+      extra_substitutions = [
+        "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}",
+        "MODULE_BUNDLE_ID=gtest.$_output_name",
+      ]
+    }
+
+    bundle_data(_info_plist_bundle) {
+      testonly = true
+      visibility = [ ":$_target_name" ]
+
+      public_deps = [ ":$_info_plist_target" ]
+
+      sources = get_target_outputs(":$_info_plist_target")
+      outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+    }
+
+    lipo_binary(_lipo_loadable_module_target) {
+      forward_variables_from(invoker, [ "configs" ])
+
+      testonly = true
+      visibility = [ ":$_target_name" ]
+
+      output_name = _output_name
+      arch_binary_target = ":$_arch_loadable_module_target"
+      arch_binary_output = _output_name
+    }
+
+    _xctest_bundle = _target_name + "_bundle"
+    create_signed_bundle(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "bundle_id",
+                               "data_deps",
+                               "enable_code_signing",
+                               "product_type",
+                               "xcode_test_application_name",
+                             ])
+
+      testonly = true
+      visibility = [ ":$_xctest_bundle" ]
+
+      bundle_extension = ".xctest"
+
+      output_name = _output_name
+      bundle_binary_target = ":$_lipo_loadable_module_target"
+      bundle_binary_output = _output_name
+
+      if (ios_set_attributes_for_xcode_project_generation) {
+        _xcode_product_bundle_id =
+            "$ios_app_bundle_id_prefix.gtest.$_output_name"
+
+        _ios_provisioning_profile_info =
+            exec_script("//build/config/ios/codesign.py",
+                        [
+                          "find-provisioning-profile",
+                          "-b=" + _xcode_product_bundle_id,
+                        ],
+                        "json")
+
+        xcode_extra_attributes = {
+          IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
+          CODE_SIGN_IDENTITY = "iPhone Developer"
+          DEVELOPMENT_TEAM = _ios_provisioning_profile_info.team_identifier
+          PRODUCT_BUNDLE_IDENTIFIER = _xcode_product_bundle_id
+          PROVISIONING_PROFILE_SPECIFIER = _ios_provisioning_profile_info.name
+
+          # For XCUITest, Xcode requires specifying the host application name
+          # via the TEST_TARGET_NAME attribute.
+          if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) {
+            TEST_TARGET_NAME = invoker.xcode_test_application_name
+          }
+
+          # For XCTest, Xcode requires specifying the host application path via
+          # both BUNDLE_LOADER and TEST_HOST attributes.
+          if (invoker.product_type == _ios_xcode_xctest_bundle_id) {
+            _xcode_app_name = invoker.xcode_test_application_name
+            if (defined(invoker.xcode_test_application_output_name)) {
+              _xcode_app_name = invoker.xcode_test_application_output_name
+            }
+
+            BUNDLE_LOADER = "\$(TEST_HOST)"
+            TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" +
+                        "${_xcode_app_name}.app/${_xcode_app_name}"
+          }
+        }
+      } else {
+        not_needed(invoker,
+                   [
+                     "xcode_test_application_name",
+                     "xcode_test_application_output_name",
+                   ])
+      }
+
+      deps = [ ":$_info_plist_bundle" ]
+    }
+
+    bundle_data(_xctest_bundle) {
+      forward_variables_from(invoker, [ "host_target" ])
+
+      testonly = true
+      visibility = [ ":$host_target" ]
+
+      public_deps = [ ":$_target_name" ]
+      sources = [ "$root_out_dir/$_output_name.xctest" ]
+      outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ]
+    }
+  }
+}
+
+set_defaults("ios_xctest_bundle") {
+  configs = default_shared_library_configs
+}
+
+# For Chrome on iOS we want to run XCTests for all our build configurations
+# (Debug, Release, ...). In addition, the symbols visibility is configured to
+# private by default. To simplify testing with those constraints, our tests are
+# compiled in the TEST_HOST target instead of the .xctest bundle.
+template("ios_xctest_test") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _xctest_target = _target_name + "_module"
+  _xctest_output = _output_name + "_module"
+
+  _host_target = _target_name
+  _host_output = _output_name
+
+  # Allow invokers to specify their own target for the xctest module, but
+  # fall back to a default (empty) module otherwise.
+  if (defined(invoker.xctest_module_target)) {
+    _xctest_module_target = invoker.xctest_module_target
+  } else {
+    _xctest_module_target_name = _xctest_target + "shell_source"
+    _xctest_module_target = ":$_xctest_module_target_name"
+    source_set(_xctest_module_target_name) {
+      sources = [ "//build/config/ios/xctest_shell.mm" ]
+
+      configs += [ "//build/config/ios:xctest_config" ]
+    }
+  }
+
+  ios_xctest_bundle(_xctest_target) {
+    forward_variables_from(invoker, [ "data_deps" ])
+    output_name = _xctest_output
+    product_type = _ios_xcode_xctest_bundle_id
+    host_target = _host_target
+
+    # TODO(crbug.com/1056328) The change in output name results in a mismatch
+    # between this value and the ios_app_bundle target name. To mitigate, this
+    # has been modified to _host_target. output_name is set to _host_output
+    # to mitigate the naming.
+    xcode_test_application_name = _host_target
+    xcode_test_application_output_name = _host_output
+
+    deps = [ _xctest_module_target ]
+  }
+
+  ios_app_bundle(_host_target) {
+    forward_variables_from(invoker, "*", [ "testonly" ])
+
+    testonly = true
+    output_name = _host_output
+    configs += [ "//build/config/ios:xctest_config" ]
+
+    if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) {
+      info_plist = "//build/config/ios/Host-Info.plist"
+    }
+
+    # Xcode needs the following frameworks installed in the application (and
+    # signed) for the XCTest to run, so install them using
+    # extra_system_frameworks.
+    extra_system_frameworks = [
+      "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework",
+      "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework",
+      "$ios_sdk_platform_path/Developer/usr/lib/libXCTestBundleInject.dylib",
+    ]
+
+    _xctest_bundle = _xctest_target + "_bundle"
+    if (!is_fat_secondary_toolchain) {
+      if (!defined(bundle_deps)) {
+        bundle_deps = []
+      }
+      bundle_deps += [ ":$_xctest_bundle" ]
+    }
+  }
+}
+
+set_defaults("ios_xctest_test") {
+  configs = default_executable_configs
+}
+
+# Template to build a xcuitest test runner bundle.
+#
+# Xcode requires a test runner application with a copy of the XCTest dynamic
+# library bundle in it for the XCUITest to run. The test runner bundle is created
+# by copying the system bundle XCTRunner.app from Xcode SDK with the plist file
+# being properly tweaked, and a xctest and it needs to be code signed in order
+# to run on devices.
+#
+# Arguments
+#
+#   xctest_bundle
+#       string, name of the dependent xctest bundle target.
+#
+#   output_name
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+template("ios_xcuitest_test_runner_bundle") {
+  assert(defined(invoker.xctest_bundle),
+         "xctest_bundle must be defined for $target_name")
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _xctrunner_path =
+      "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app"
+
+  _info_plist_merge_plist = _target_name + "_info_plist_merge_plist"
+  _info_plist_target = _target_name + "_info_plist"
+  _info_plist_bundle = _target_name + "_info_plist_bundle"
+
+  action(_info_plist_merge_plist) {
+    testonly = true
+    script = "//build/apple/plist_util.py"
+
+    sources = [
+      "$_xctrunner_path/Info.plist",
+
+      # NOTE: The XCTRunnerAddition+Info.plist must come after the Info.plist
+      # because it overrides the values under "CFBundleIdentifier" and
+      # "CFBundleName".
+      "//build/config/ios/resources/XCTRunnerAddition+Info.plist",
+    ]
+
+    _output_name = "$target_gen_dir/${_target_name}_merged.plist"
+    outputs = [ _output_name ]
+    args = [
+             "merge",
+             "-f=xml1",
+             "-x=$xcode_version",
+             "-o=" + rebase_path(_output_name, root_build_dir),
+           ] + rebase_path(sources, root_build_dir)
+
+    if (use_system_xcode && use_goma) {
+      deps = [ "//build/config/ios:copy_xctrunner_app" ]
+    }
+  }
+
+  ios_info_plist(_info_plist_target) {
+    testonly = true
+    visibility = [ ":$_info_plist_bundle" ]
+
+    executable_name = _output_name
+    info_plist_target = ":$_info_plist_merge_plist"
+  }
+
+  bundle_data(_info_plist_bundle) {
+    testonly = true
+    visibility = [ ":$_target_name" ]
+
+    public_deps = [ ":$_info_plist_target" ]
+
+    sources = get_target_outputs(":$_info_plist_target")
+    outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+  }
+
+  _pkginfo_bundle = _target_name + "_pkginfo_bundle"
+  bundle_data(_pkginfo_bundle) {
+    testonly = true
+    visibility = [ ":$_target_name" ]
+
+    sources = [ "$_xctrunner_path/PkgInfo" ]
+
+    outputs = [ "{{bundle_contents_dir}}/PkgInfo" ]
+
+    if (use_system_xcode && use_goma) {
+      public_deps = [ "//build/config/ios:copy_xctrunner_app" ]
+    }
+  }
+
+  _xctest_bundle = invoker.xctest_bundle
+  create_signed_bundle(_target_name) {
+    testonly = true
+
+    bundle_binary_target = "//build/config/ios:xctest_runner_without_arm64e"
+    bundle_binary_output = "XCTRunner"
+    bundle_extension = ".app"
+    product_type = _ios_xcode_app_bundle_id
+
+    output_name = _output_name
+
+    # Xcode needs the following frameworks installed in the application
+    # (and signed) for the XCUITest to run, so install them using
+    # extra_system_frameworks.
+    extra_system_frameworks = [
+      "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework",
+      "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework",
+    ]
+
+    bundle_deps = []
+    if (defined(invoker.bundle_deps)) {
+      bundle_deps += invoker.bundle_deps
+    }
+    bundle_deps += [
+      ":$_info_plist_bundle",
+      ":$_pkginfo_bundle",
+      ":$_xctest_bundle",
+    ]
+  }
+}
+
+# Template to build a XCUITest that consists of two parts: the test runner
+# application bundle and the xctest dynamic library.
+#
+# Arguments
+#
+#   deps:
+#       list of labels to depends on, these values are used to create the
+#       xctest dynamic library.
+#
+#   xcode_test_application_name:
+#       string, name of the test application for the ui test target.
+#
+# This template defines two targets, one named "${target_name}_module" is the
+# xctest dynamic library, and the other named "${target_name}_runner" is the
+# test runner application bundle.
+#
+template("ios_xcuitest_test") {
+  assert(defined(invoker.deps), "deps must be defined for $target_name")
+  assert(defined(invoker.xcode_test_application_name),
+         "xcode_test_application_name must be defined for $target_name")
+
+  _xcuitest_target = target_name
+  if (defined(invoker.output_name)) {
+    _xcuitest_target = invoker.output_name
+  }
+
+  _xcuitest_runner_target = _xcuitest_target + "_runner"
+  _xcuitest_module_target = _xcuitest_target + "_module"
+
+  group(target_name) {
+    testonly = true
+
+    deps = [ ":$_xcuitest_runner_target" ]
+  }
+
+  _xcuitest_module_output = _xcuitest_target
+  ios_xctest_bundle(_xcuitest_module_target) {
+    forward_variables_from(invoker,
+                           [
+                             "xcode_test_application_name",
+                             "xctest_bundle_principal_class",
+                             "data_deps",
+                           ])
+
+    product_type = _ios_xcode_xcuitest_bundle_id
+    host_target = _xcuitest_runner_target
+    output_name = _xcuitest_module_output
+
+    deps = invoker.deps
+  }
+
+  _xcuitest_runner_output = _xcuitest_target + "-Runner"
+  ios_xcuitest_test_runner_bundle(_xcuitest_runner_target) {
+    output_name = _xcuitest_runner_output
+    xctest_bundle = _xcuitest_module_target + "_bundle"
+    forward_variables_from(invoker, [ "bundle_deps" ])
+  }
+}
+
+set_defaults("ios_xcuitest_test") {
+  configs = default_executable_configs
+}
diff --git a/src/build/config/ios/strip_arm64e.py b/src/build/config/ios/strip_arm64e.py
new file mode 100644
index 0000000..f21baf4
--- /dev/null
+++ b/src/build/config/ios/strip_arm64e.py
@@ -0,0 +1,70 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Strip arm64e architecture from a binary if present."""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+def check_output(command):
+  """Returns the output from |command| or propagates error, quitting script."""
+  process = subprocess.Popen(
+      command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  outs, errs = process.communicate()
+  if process.returncode:
+    sys.stderr.write('error: command failed with retcode %d: %s\n\n' %
+                     (process.returncode, ' '.join(map(repr, command))))
+    sys.stderr.write(errs.decode('UTF-8', errors='ignore'))
+    sys.exit(process.returncode)
+  return outs.decode('UTF-8')
+
+
+def check_call(command):
+  """Invokes |command| or propagates error."""
+  check_output(command)
+
+
+def parse_args(args):
+  """Parses the command-line."""
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--input', required=True, help='Path to input binary')
+  parser.add_argument('--output', required=True, help='Path to output binary')
+  parser.add_argument('--xcode-version', required=True, help='Version of Xcode')
+  return parser.parse_args(args)
+
+
+def get_archs(path):
+  """Extracts the architectures present in binary at |path|."""
+  outputs = check_output(["xcrun", "lipo", "-info", os.path.abspath(path)])
+  return outputs.split(': ')[-1].split()
+
+
+def main(args):
+  parsed = parse_args(args)
+
+  outdir = os.path.dirname(parsed.output)
+  if not os.path.isdir(outdir):
+    os.makedirs(outdir)
+
+  if os.path.exists(parsed.output):
+    os.unlink(parsed.output)
+
+  # As "lipo" fails with an error if asked to remove an architecture that is
+  # not included, only use it if "arm64e" is present in the binary. Otherwise
+  # simply copy the file.
+  if 'arm64e' in get_archs(parsed.input):
+    check_output([
+        "xcrun", "lipo", "-remove", "arm64e", "-output",
+        os.path.abspath(parsed.output),
+        os.path.abspath(parsed.input)
+    ])
+  else:
+    shutil.copy(parsed.input, parsed.output)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/src/build/config/ios/write_framework_hmap.py b/src/build/config/ios/write_framework_hmap.py
new file mode 100644
index 0000000..ac467ee
--- /dev/null
+++ b/src/build/config/ios/write_framework_hmap.py
@@ -0,0 +1,103 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import struct
+import sys
+
+def Main(args):
+  if len(args) < 4:
+    print(
+        "Usage: %s output.hmap Foo.framework header1.h..." % args[0],
+        file=sys.stderr)
+    return 1
+
+  (out, framework, all_headers) = args[1], args[2], args[3:]
+
+  framework_name = os.path.basename(framework).split('.')[0]
+  all_headers = map(os.path.abspath, all_headers)
+  filelist = {}
+  for header in all_headers:
+    filename = os.path.basename(header)
+    filelist[filename] = header
+    filelist[os.path.join(framework_name, filename)] = header
+  WriteHmap(out, filelist)
+  return 0
+
+
+def NextGreaterPowerOf2(x):
+  return 2**(x).bit_length()
+
+
+def WriteHmap(output_name, filelist):
+  """Generates a header map based on |filelist|.
+
+  Per Mark Mentovai:
+    A header map is structured essentially as a hash table, keyed by names used
+    in #includes, and providing pathnames to the actual files.
+
+  The implementation below and the comment above comes from inspecting:
+    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+  while also looking at the implementation in clang in:
+    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+  """
+  magic = 1751998832
+  version = 1
+  _reserved = 0
+  count = len(filelist)
+  capacity = NextGreaterPowerOf2(count)
+  strings_offset = 24 + (12 * capacity)
+  max_value_length = len(max(filelist.values(), key=lambda v: len(v)))
+
+  out = open(output_name, 'wb')
+  out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+                        count, capacity, max_value_length))
+
+  # Create empty hashmap buckets.
+  buckets = [None] * capacity
+  for file, path in filelist.items():
+    key = 0
+    for c in file:
+      key += ord(c.lower()) * 13
+
+    # Fill next empty bucket.
+    while buckets[key & capacity - 1] is not None:
+      key = key + 1
+    buckets[key & capacity - 1] = (file, path)
+
+  next_offset = 1
+  for bucket in buckets:
+    if bucket is None:
+      out.write(struct.pack('<LLL', 0, 0, 0))
+    else:
+      (file, path) = bucket
+      key_offset = next_offset
+      prefix_offset = key_offset + len(file) + 1
+      suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+      next_offset = suffix_offset + len(os.path.basename(path)) + 1
+      out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+  # Pad byte since next offset starts at 1.
+  out.write(struct.pack('<x'))
+
+  for bucket in buckets:
+    if bucket is not None:
+      (file, path) = bucket
+      base = os.path.dirname(path) + os.sep
+      path = os.path.basename(path)
+      file = file.encode('UTF-8')
+      base = base.encode('UTF-8')
+      path = path.encode('UTF-8')
+      out.write(struct.pack('<%ds' % len(file), file))
+      out.write(struct.pack('<s', b'\0'))
+      out.write(struct.pack('<%ds' % len(base), base))
+      out.write(struct.pack('<s', b'\0'))
+      out.write(struct.pack('<%ds' % len(path), path))
+      out.write(struct.pack('<s', b'\0'))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/src/build/config/ios/write_framework_modulemap.py b/src/build/config/ios/write_framework_modulemap.py
new file mode 100644
index 0000000..dcc88a8
--- /dev/null
+++ b/src/build/config/ios/write_framework_modulemap.py
@@ -0,0 +1,28 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+MODULE_MAP_TEMPLATE = '''\
+framework module %(framework_name)s {
+  umbrella header "%(framework_name)s.h"
+
+  export *
+  module * { export * }
+}
+'''
+
+
+def Main(framework_name, modules_dir):
+  # Find the name of the binary based on the part before the ".framework".
+  if not os.path.isdir(modules_dir):
+    os.makedirs(modules_dir)
+
+  with open(os.path.join(modules_dir, 'module.modulemap'), 'w') as module_file:
+    module_file.write(MODULE_MAP_TEMPLATE % {'framework_name': framework_name})
+
+
+if __name__ == '__main__':
+  Main(*sys.argv[1:])
diff --git a/src/build/config/ios/xctest_shell.mm b/src/build/config/ios/xctest_shell.mm
new file mode 100644
index 0000000..dcf5bad
--- /dev/null
+++ b/src/build/config/ios/xctest_shell.mm
@@ -0,0 +1,19 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+#import <XCTest/XCTest.h>
+
+// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all
+// our build configurations (Debug, Release, ...). In addition, the symbols
+// visibility is configured to private by default. To simplify testing with
+// those constraints, our tests are compiled in the TEST_HOST target instead
+// of the .xctest bundle that all link against this single test (just there to
+// ensure that the bundle is not empty).
+
+@interface XCTestShellEmptyClass : NSObject
+@end
+
+@implementation XCTestShellEmptyClass
+@end
diff --git a/src/build/config/linux/BUILD.gn b/src/build/config/linux/BUILD.gn
new file mode 100644
index 0000000..4770424
--- /dev/null
+++ b/src/build/config/linux/BUILD.gn
@@ -0,0 +1,68 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+group("linux") {
+  visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Linux-only. This is not applied to Android, but is applied to ChromeOS.
+config("compiler") {
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Linux-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
+  # OS_LINUX and the like.
+  if (is_chromeos) {
+    defines = [ "OS_CHROMEOS" ]
+  }
+
+  if ((!(is_chromeos_ash || is_chromeos_lacros) ||
+       default_toolchain != "//build/toolchain/cros:target") &&
+      (!use_custom_libcxx || current_cpu == "mipsel")) {
+    libs = [ "atomic" ]
+  }
+}
+
+config("libcap") {
+  libs = [ "cap" ]
+}
+
+config("libresolv") {
+  libs = [ "resolv" ]
+}
+
+if (use_glib) {
+  pkg_config("glib") {
+    packages = [
+      "glib-2.0",
+      "gmodule-2.0",
+      "gobject-2.0",
+      "gthread-2.0",
+    ]
+    defines = [
+      "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_40",
+      "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_40",
+    ]
+  }
+}
+
+# Ensures all exported symbols are added to the dynamic symbol table.  This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries.  Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+  ldflags = [ "-rdynamic" ]
+}
diff --git a/src/build/config/linux/atk/BUILD.gn b/src/build/config/linux/atk/BUILD.gn
new file mode 100644
index 0000000..bc8e278
--- /dev/null
+++ b/src/build/config/linux/atk/BUILD.gn
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+# TODO(crbug.com/1171629): Change this back to is_chromeos.
+# CrOS doesn't install GTK or any gnome packages.
+assert(!is_chromeos_ash)
+
+# These packages should _only_ be expected when building for a target.
+assert(current_toolchain == default_toolchain)
+
+if (use_atk) {
+  assert(use_glib, "use_atk=true requires that use_glib=true")
+}
+
+pkg_config("atk") {
+  packages = [
+    "atk",
+    "atk-bridge-2.0",
+  ]
+  atk_lib_dir = exec_script(pkg_config_script,
+                            pkg_config_args + [
+                                  "--libdir",
+                                  "atk",
+                                ],
+                            "string")
+  defines = [
+    "ATK_LIB_DIR=\"$atk_lib_dir\"",
+    "USE_ATK_BRIDGE",
+  ]
+}
diff --git a/src/build/config/linux/atspi2/BUILD.gn b/src/build/config/linux/atspi2/BUILD.gn
new file mode 100644
index 0000000..988a995
--- /dev/null
+++ b/src/build/config/linux/atspi2/BUILD.gn
@@ -0,0 +1,29 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+# These packages should _only_ be expected when building for a target.
+assert(current_toolchain == default_toolchain)
+
+if (use_atk) {
+  pkg_config("atspi2") {
+    packages = [ "atspi-2" ]
+    atspi_version = exec_script(pkg_config_script,
+                                pkg_config_args + [
+                                      "atspi-2",
+                                      "--version-as-components",
+                                    ],
+                                "value")
+    atspi_major_version = atspi_version[0]
+    atspi_minor_version = atspi_version[1]
+    atspi_micro_version = atspi_version[2]
+    defines = [
+      "ATSPI_MAJOR_VERSION=$atspi_major_version",
+      "ATSPI_MINOR_VERSION=$atspi_minor_version",
+      "ATSPI_MICRO_VERSION=$atspi_micro_version",
+    ]
+  }
+}
diff --git a/src/build/config/linux/dbus/BUILD.gn b/src/build/config/linux/dbus/BUILD.gn
new file mode 100644
index 0000000..f11cf71
--- /dev/null
+++ b/src/build/config/linux/dbus/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(use_dbus)
+
+# Note: if your target also depends on //dbus, you don't need to add this
+# config (it will get added automatically if you depend on //dbus).
+pkg_config("dbus") {
+  packages = [ "dbus-1" ]
+}
diff --git a/src/build/config/linux/dri/BUILD.gn b/src/build/config/linux/dri/BUILD.gn
new file mode 100644
index 0000000..8e3efe6
--- /dev/null
+++ b/src/build/config/linux/dri/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux || is_chromeos, "This file should only be referenced on Linux")
+
+pkg_config("dri") {
+  packages = [ "dri" ]
+  dri_driver_dir = exec_script(pkg_config_script,
+                               pkg_config_args + [
+                                     "--dridriverdir",
+                                     "dri",
+                                   ],
+                               "string")
+  defines = [ "DRI_DRIVER_DIR=\"$dri_driver_dir\"" ]
+}
diff --git a/src/build/config/linux/gtk/BUILD.gn b/src/build/config/linux/gtk/BUILD.gn
new file mode 100644
index 0000000..ecf95dd
--- /dev/null
+++ b/src/build/config/linux/gtk/BUILD.gn
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/gtk/gtk.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+declare_args() {
+  # The (major) version of GTK to build against.  A different version may be
+  # loaded at runtime.
+  gtk_version = 3
+}
+
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk_internal_config") {
+  # Gtk requires gmodule, but it does not list it as a dependency in some
+  # misconfigured systems.
+  packages = [
+    "gmodule-2.0",
+    "gthread-2.0",
+  ]
+  if (gtk_version == 3) {
+    packages += [ "gtk+-3.0" ]
+  } else {
+    assert(gtk_version == 4)
+    packages += [ "gtk4" ]
+  }
+}
+
+group("gtk") {
+  visibility = [
+    # This is the only target that can depend on GTK.  Do not add more targets
+    # to this list.
+    "//ui/gtk:gtk_stubs",
+
+    # These are allow-listed for WebRTC builds.
+    "//examples:peerconnection_client",
+    "//remoting/host:common",
+    "//remoting/host:remoting_me2me_host_static",
+    "//remoting/host/file_transfer",
+    "//remoting/host/it2me:common",
+    "//remoting/host/it2me:remote_assistance_host",
+    "//remoting/host/linux",
+    "//remoting/test:it2me_standalone_host_main",
+    "//webrtc/examples:peerconnection_client",
+  ]
+
+  public_configs = [ ":gtk_internal_config" ]
+}
+
+# Depend on "gtkprint" to get this.
+pkg_config("gtkprint_internal_config") {
+  if (gtk_version == 3) {
+    packages = [ "gtk+-unix-print-3.0" ]
+  } else {
+    assert(gtk_version == 4)
+    packages = [ "gtk4-unix-print" ]
+  }
+}
+
+group("gtkprint") {
+  visibility = [ "//ui/gtk:*" ]
+  public_configs = [ ":gtkprint_internal_config" ]
+}
diff --git a/src/build/config/linux/gtk/gtk.gni b/src/build/config/linux/gtk/gtk.gni
new file mode 100644
index 0000000..1e45248
--- /dev/null
+++ b/src/build/config/linux/gtk/gtk.gni
@@ -0,0 +1,10 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ui.gni")
+
+declare_args() {
+  # Whether or not we should use libgtk.
+  use_gtk = is_linux && !is_chromecast
+}
diff --git a/src/build/config/linux/libdrm/BUILD.gn b/src/build/config/linux/libdrm/BUILD.gn
new file mode 100644
index 0000000..e9b4018
--- /dev/null
+++ b/src/build/config/linux/libdrm/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux || is_chromeos)
+
+declare_args() {
+  # Controls whether the build should use the version of libdrm library shipped
+  # with the system. In release builds of desktop Linux and Chrome OS we use the
+  # system version. Some Chromecast devices use this as well.
+  use_system_libdrm = is_chromeos_device || (is_linux && !is_chromecast)
+}
+
+if (use_system_libdrm) {
+  pkg_config("libdrm_config") {
+    packages = [ "libdrm" ]
+  }
+  group("libdrm") {
+    public_configs = [ ":libdrm_config" ]
+  }
+} else {
+  group("libdrm") {
+    public_deps = [ "//third_party/libdrm" ]
+  }
+  config("libdrm_exynos_include_config") {
+    include_dirs = [ "//third_party/libdrm/src/exynos" ]
+  }
+}
diff --git a/src/build/config/linux/libffi/BUILD.gn b/src/build/config/linux/libffi/BUILD.gn
new file mode 100644
index 0000000..59b7f04
--- /dev/null
+++ b/src/build/config/linux/libffi/BUILD.gn
@@ -0,0 +1,16 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (default_toolchain == "//build/toolchain/cros:target") {
+  pkg_config("libffi") {
+    packages = [ "libffi" ]
+  }
+} else {
+  # On Linux, make sure we link against libffi version 6.
+  config("libffi") {
+    libs = [ ":libffi.so.6" ]
+  }
+}
diff --git a/src/build/config/linux/libva/BUILD.gn b/src/build/config/linux/libva/BUILD.gn
new file mode 100644
index 0000000..ada5d66
--- /dev/null
+++ b/src/build/config/linux/libva/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux || is_chromeos, "This file should only be referenced on Linux")
+
+pkg_config("libva") {
+  packages = [ "libva" ]
+
+  # Do not use exec_script to check the version here. It is done with a
+  # static_assert instead.
+
+  # vaapi decoders use dlopen pre-sandbox anyway to improve startup times.
+  ignore_libs = true
+}
diff --git a/src/build/config/linux/nss/BUILD.gn b/src/build/config/linux/nss/BUILD.gn
new file mode 100644
index 0000000..8c27938
--- /dev/null
+++ b/src/build/config/linux/nss/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux || is_chromeos) {
+  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+  # library but the system NSS libraries. Non-Linux platforms using NSS use the
+  # hermetic one in //third_party/nss.
+  #
+  # Generally you should depend on //crypto:platform instead of using this
+  # config since that will properly pick up NSS or OpenSSL depending on
+  # platform and build config.
+  pkg_config("system_nss_no_ssl_config") {
+    packages = [ "nss" ]
+    extra_args = [
+      "-v",
+      "-lssl3",
+    ]
+  }
+}
diff --git a/src/build/config/linux/pangocairo/BUILD.gn b/src/build/config/linux/pangocairo/BUILD.gn
new file mode 100644
index 0000000..ddcc754
--- /dev/null
+++ b/src/build/config/linux/pangocairo/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pangocairo/pangocairo.gni")
+import("//build/config/linux/pkg_config.gni")
+
+if (use_pangocairo) {
+  pkg_config("pangocairo") {
+    packages = [ "pangocairo" ]
+
+    # We don't want pkgconfig for pangocairo to explicitly request FreeType to get
+    # linked, because we control which FreeType to link to.
+    extra_args = [
+      "-v",
+      "freetype",
+    ]
+  }
+}
diff --git a/src/build/config/linux/pangocairo/pangocairo.gni b/src/build/config/linux/pangocairo/pangocairo.gni
new file mode 100644
index 0000000..ecfe663
--- /dev/null
+++ b/src/build/config/linux/pangocairo/pangocairo.gni
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/ui.gni")
+
+declare_args() {
+  use_pangocairo =
+      # TODO(crbug.com/1052397): Remove !chromeos_is_browser_only once
+      # lacros-chrome switches to target_os="chromeos"
+      is_linux && !is_chromecast && !chromeos_is_browser_only
+}
diff --git a/src/build/config/linux/pkg-config.py b/src/build/config/linux/pkg-config.py
new file mode 100755
index 0000000..5adf70c
--- /dev/null
+++ b/src/build/config/linux/pkg-config.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+#
+# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+# depending on whether the systemroot is for a 32 or 64 bit architecture. They
+# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
+# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this
+# variable to this script with the "--system_libdir <system_libdir>" flag. If no
+# flag is provided, then pkgconfig files are assumed to come from
+# <systemroot>/usr/lib/pkgconfig.
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+
+def SetConfigPath(options):
+  """Set the PKG_CONFIG_LIBDIR environment variable.
+
+  This takes into account any sysroot and architecture specification from the
+  options on the given command line.
+  """
+
+  sysroot = options.sysroot
+  assert sysroot
+
+  # Compute the library path name based on the architecture.
+  arch = options.arch
+  if sysroot and not arch:
+    print("You must specify an architecture via -a if using a sysroot.")
+    sys.exit(1)
+
+  libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
+  libdir += ':' + sysroot + '/usr/share/pkgconfig'
+  os.environ['PKG_CONFIG_LIBDIR'] = libdir
+  return libdir
+
+
+def GetPkgConfigPrefixToStrip(options, args):
+  """Returns the prefix from pkg-config where packages are installed.
+
+  This returned prefix is the one that should be stripped from the beginning of
+  directory names to take into account sysroots.
+  """
+  # Some sysroots, like the Chromium OS ones, may generate paths that are not
+  # relative to the sysroot. For example,
+  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+  # To support this correctly, it's necessary to extract the prefix to strip
+  # from pkg-config's |prefix| variable.
+  prefix = subprocess.check_output([options.pkg_config,
+      "--variable=prefix"] + args, env=os.environ).decode('utf-8')
+  if prefix[-4] == '/usr':
+    return prefix[4:]
+  return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+  """Returns true if the first argument matches any regular expression in the
+  given list."""
+  for regexp in list_of_regexps:
+    if regexp.search(flag) != None:
+      return True
+  return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(strip_prefix):
+      path = path[len(strip_prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def main():
+  # If this is run on non-Linux platforms, just return nothing and indicate
+  # success. This allows us to "kind of emulate" a Linux build from other
+  # platforms.
+  if "linux" not in sys.platform:
+    print("[[],[],[],[],[]]")
+    return 0
+
+  parser = OptionParser()
+  parser.add_option('-d', '--debug', action='store_true')
+  parser.add_option('-p', action='store', dest='pkg_config', type='string',
+                    default='pkg-config')
+  parser.add_option('-v', action='append', dest='strip_out', type='string')
+  parser.add_option('-s', action='store', dest='sysroot', type='string')
+  parser.add_option('-a', action='store', dest='arch', type='string')
+  parser.add_option('--system_libdir', action='store', dest='system_libdir',
+                    type='string', default='lib')
+  parser.add_option('--atleast-version', action='store',
+                    dest='atleast_version', type='string')
+  parser.add_option('--libdir', action='store_true', dest='libdir')
+  parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir')
+  parser.add_option('--version-as-components', action='store_true',
+                    dest='version_as_components')
+  (options, args) = parser.parse_args()
+
+  # Make a list of regular expressions to strip out.
+  strip_out = []
+  if options.strip_out != None:
+    for regexp in options.strip_out:
+      strip_out.append(re.compile(regexp))
+
+  if options.sysroot:
+    libdir = SetConfigPath(options)
+    if options.debug:
+      sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
+    prefix = GetPkgConfigPrefixToStrip(options, args)
+  else:
+    prefix = ''
+
+  if options.atleast_version:
+    # When asking for the return value, just run pkg-config and print the return
+    # value, no need to do other work.
+    if not subprocess.call([options.pkg_config,
+                            "--atleast-version=" + options.atleast_version] +
+                            args):
+      print("true")
+    else:
+      print("false")
+    return 0
+
+  if options.version_as_components:
+    cmd = [options.pkg_config, "--modversion"] + args
+    try:
+      version_string = subprocess.check_output(cmd).decode('utf-8')
+    except:
+      sys.stderr.write('Error from pkg-config.\n')
+      return 1
+    print(json.dumps(list(map(int, version_string.strip().split(".")))))
+    return 0
+
+
+  if options.libdir:
+    cmd = [options.pkg_config, "--variable=libdir"] + args
+    if options.debug:
+      sys.stderr.write('Running: %s\n' % cmd)
+    try:
+      libdir = subprocess.check_output(cmd).decode('utf-8')
+    except:
+      print("Error from pkg-config.")
+      return 1
+    sys.stdout.write(libdir.strip())
+    return 0
+
+  if options.dridriverdir:
+    cmd = [options.pkg_config, "--variable=dridriverdir"] + args
+    if options.debug:
+      sys.stderr.write('Running: %s\n' % cmd)
+    try:
+      dridriverdir = subprocess.check_output(cmd).decode('utf-8')
+    except:
+      print("Error from pkg-config.")
+      return 1
+    sys.stdout.write(dridriverdir.strip())
+    return
+
+  cmd = [options.pkg_config, "--cflags", "--libs"] + args
+  if options.debug:
+    sys.stderr.write('Running: %s\n' % ' '.join(cmd))
+
+  try:
+    flag_string = subprocess.check_output(cmd).decode('utf-8')
+  except:
+    sys.stderr.write('Could not run pkg-config.\n')
+    return 1
+
+  # For now just split on spaces to get the args out. This will break if
+  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+  # to happen in practice.
+  all_flags = flag_string.strip().split(' ')
+
+
+  sysroot = options.sysroot
+  if not sysroot:
+    sysroot = ''
+
+  includes = []
+  cflags = []
+  libs = []
+  lib_dirs = []
+
+  for flag in all_flags[:]:
+    if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+      continue;
+
+    if flag[:2] == '-l':
+      libs.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:2] == '-L':
+      lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:2] == '-I':
+      includes.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:3] == '-Wl':
+      # Don't allow libraries to control ld flags.  These should be specified
+      # only in build files.
+      pass
+    elif flag == '-pthread':
+      # Many libs specify "-pthread" which we don't need since we always include
+      # this anyway. Removing it here prevents a bunch of duplicate inclusions
+      # on the command line.
+      pass
+    else:
+      cflags.append(flag)
+
+  # Output a GN array, the first one is the cflags, the second are the libs. The
+  # JSON formatter prints GN compatible lists when everything is a list of
+  # strings.
+  print(json.dumps([includes, cflags, libs, lib_dirs]))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/config/linux/pkg_config.gni b/src/build/config/linux/pkg_config.gni
new file mode 100644
index 0000000..428e44a
--- /dev/null
+++ b/src/build/config/linux/pkg_config.gni
@@ -0,0 +1,128 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+#   pkg_config("mything") {
+#     packages = [ "mything1", "mything2" ]
+#     defines = [ "ENABLE_AWESOME" ]
+#   }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+#   extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+#   ignore_libs = true
+
+declare_args() {
+  # A pkg-config wrapper to call instead of trying to find and call the right
+  # pkg-config directly. Wrappers like this are common in cross-compilation
+  # environments.
+  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+  # the sysroot mechanism to find the right .pc files.
+  pkg_config = ""
+
+  # A optional pkg-config wrapper to use for tools built on the host.
+  host_pkg_config = ""
+
+  # CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+  # and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+  # depending on whether the systemroot is for a 32 or 64 bit architecture.
+  #
+  # When build under GYP, CrOS board builds specify the 'system_libdir' variable
+  # as part of the GYP_DEFINES provided by the CrOS emerge build or simple
+  # chrome build scheme. This variable permits controlling this for GN builds
+  # in similar fashion by setting the `system_libdir` variable in the build's
+  # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
+  system_libdir = "lib"
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+pkg_config_args = []
+
+if (sysroot != "") {
+  # Pass the sysroot if we're using one (it requires the CPU arch also).
+  pkg_config_args += [
+    "-s",
+    rebase_path(sysroot),
+    "-a",
+    current_cpu,
+  ]
+}
+
+if (pkg_config != "") {
+  pkg_config_args += [
+    "-p",
+    pkg_config,
+  ]
+}
+
+# Only use the custom libdir when building with the target sysroot.
+if (target_sysroot != "" && sysroot == target_sysroot) {
+  pkg_config_args += [
+    "--system_libdir",
+    system_libdir,
+  ]
+}
+
+if (host_pkg_config != "") {
+  host_pkg_config_args = [
+    "-p",
+    host_pkg_config,
+  ]
+} else {
+  host_pkg_config_args = pkg_config_args
+}
+
+template("pkg_config") {
+  assert(defined(invoker.packages),
+         "Variable |packages| must be defined to be a list in pkg_config.")
+  config(target_name) {
+    if (host_toolchain == current_toolchain) {
+      args = host_pkg_config_args + invoker.packages
+    } else {
+      args = pkg_config_args + invoker.packages
+    }
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+
+    pkgresult = exec_script(pkg_config_script, args, "value")
+    cflags = pkgresult[1]
+
+    foreach(include, pkgresult[0]) {
+      if (use_sysroot) {
+        # We want the system include paths to use -isystem instead of -I to
+        # suppress warnings in those headers.
+        include_relativized = rebase_path(include, root_build_dir)
+        cflags += [ "-isystem$include_relativized" ]
+      } else {
+        cflags += [ "-I$include" ]
+      }
+    }
+
+    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+      libs = pkgresult[2]
+      lib_dirs = pkgresult[3]
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "defines",
+                             "visibility",
+                           ])
+  }
+}
diff --git a/src/build/config/locales.gni b/src/build/config/locales.gni
new file mode 100644
index 0000000..e94e162
--- /dev/null
+++ b/src/build/config/locales.gni
@@ -0,0 +1,272 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+
+# This file creates the |locales| which is the set of current
+# locales based on the current platform. Locales in this list are formated
+# based on what .pak files expect.
+# The |locales| variable *may* contain pseudolocales, depending on the
+# |enable_pseudolocales| flag.
+# If you specifically want to have the locales variable with or without
+# pseudolocales, then use |locales_with_pseudolocales| or
+# |locales_without_pseudolocales|.
+
+# The following additional platform specific lists are created:
+# - |android_apk_locales| subset for Android based apk builds
+# - |android_bundle_locales_as_resources| locales formatted for XML output names
+# - |locales_as_mac_outputs| formated for mac output bundles
+# - |ios_packed_locales| subset for iOS
+# - |ios_packed_locales_as_mac_outputs| subset for iOS output
+
+# Android doesn't ship all locales in order to save space (but webview does).
+# http://crbug.com/369218
+android_apk_omitted_locales = [
+  "bn",
+  "et",
+  "gu",
+  "kn",
+  "ml",
+  "mr",
+  "ms",
+  "ta",
+  "te",
+]
+
+# Chrome on iOS only ships with a subset of the locales supported by other
+# version of Chrome as the corresponding locales are not supported by the
+# operating system (but for simplicity, the corresponding .pak files are
+# still generated).
+ios_unsupported_locales = [
+  "am",
+  "bn",
+  "et",
+  "fil",
+  "gu",
+  "kn",
+  "lv",
+  "ml",
+  "mr",
+  "sl",
+  "sw",
+  "ta",
+  "te",
+]
+
+# These list are defined even when not building for Android or iOS for the
+# sake of build/locale_tool.py. Ensure that GN doesn't complain about them
+# being unused.
+not_needed([ "android_apk_omitted_locales" ])
+not_needed([ "ios_unsupported_locales" ])
+
+# Superset of all locales used in Chrome with platform specific changes noted.
+all_chrome_locales = [
+  "af",
+  "am",
+  "ar",
+  "as",
+  "az",
+  "be",
+  "bg",
+  "bn",
+  "bs",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en-GB",
+  "en-US",
+  "es",
+  "es-419",  # "es-MX" in iOS (Mexico vs Latin America) "es-US" on Android
+  "et",
+  "eu",
+  "fa",
+  "fi",
+  "fil",  # "tl" in .xml but "fil" in TC and .pak
+  "fr",
+  "fr-CA",
+  "gl",
+  "gu",
+  "he",  # "iw" in .xml and TC but "he" in .pak
+  "hi",
+  "hr",
+  "hu",
+  "hy",
+  "id",  # "in" in .xml but "id" in TC and .pak
+  "is",
+  "it",
+  "ja",
+  "ka",
+  "kk",
+  "km",
+  "kn",
+  "ko",
+  "ky",
+  "lo",
+  "lt",
+  "lv",
+  "mk",
+  "ml",
+  "mn",
+  "mr",
+  "ms",
+  "my",
+  "nb",  # "no" in TC but "nb" in .xml and .pak
+  "ne",
+  "nl",
+  "or",
+  "pa",
+  "pl",
+  "pt-BR",  # just "pt" in iOS
+  "pt-PT",
+  "ro",
+  "ru",
+  "si",
+  "sk",
+  "sl",
+  "sq",
+  "sr",
+  "sr-Latn",  # -b+sr+Latn in .xml
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "ur",
+  "uz",
+  "vi",
+  "zh-CN",
+  "zh-HK",
+  "zh-TW",
+  "zu",
+]
+
+# New locales added to Chrome Android bundle builds.
+android_bundle_only_locales = [
+  "af",
+  "as",
+  "az",
+  "be",
+  "bs",
+  "eu",
+  "fr-CA",
+  "gl",
+  "hy",
+  "is",
+  "ka",
+  "kk",
+  "km",
+  "ky",
+  "lo",
+  "mk",
+  "mn",
+  "my",
+  "ne",
+  "or",
+  "pa",
+  "si",
+  "sq",
+  "sr-Latn",
+  "ur",
+  "uz",
+  "zh-HK",
+  "zu",
+]
+
+# New locales added to ChromeOS builds.
+chromeos_only_locales = [ "is" ]
+
+if (is_android) {
+  locales = all_chrome_locales
+
+  # Android doesn't ship all locales on KitKat in order to save space
+  # (but webview does). http://crbug.com/369218
+  android_apk_locales = all_chrome_locales - android_bundle_only_locales -
+                        android_apk_omitted_locales
+
+  # List for Android locale names in .xml exports. Note: needs to stay in sync
+  # with |ToAndroidLocaleName| in build/android/gyp/util/resource_utils.py.
+  #  - add r: (e.g. zh-HK -> zh-rHK )
+  android_bundle_locales_as_resources = []
+  foreach(_locale, locales) {
+    android_bundle_locales_as_resources +=
+        [ string_replace(_locale, "-", "-r") ]
+  }
+
+  #  - remove en-US
+  #  - swap: (he, id, en-419, fil) -> (iw, in, es-rUS, tl)
+  #  - sr-rLatn -> -b+sr+Latn
+  android_bundle_locales_as_resources -= [
+    "en-rUS",
+    "es-r419",
+    "fil",
+    "he",
+    "id",
+    "sr-rLatn",
+  ]
+  android_bundle_locales_as_resources += [
+    "b+sr+Latn",
+    "es-rUS",
+    "in",
+    "iw",
+    "tl",
+  ]
+} else if (is_chromeos_ash || is_chromeos_lacros) {
+  # In ChromeOS we support a few more locales than standard Chrome.
+  locales =
+      all_chrome_locales - android_bundle_only_locales + chromeos_only_locales
+} else {
+  # Change if other platforms support more locales.
+  locales = all_chrome_locales - android_bundle_only_locales
+}
+
+# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
+# respectively "es-MX" and "pt" on iOS).
+if (is_ios) {
+  locales -= [
+    "es-419",
+    "pt-BR",
+  ]
+  locales += [
+    "es-MX",
+    "pt",
+  ]
+}
+
+pseudolocales = [
+  "ar-XB",
+  "en-XA",
+]
+locales_without_pseudolocales = locales
+locales_with_pseudolocales = locales + pseudolocales
+
+declare_args() {
+  # We want to give pseudolocales to everyone except end-users (devs & QA).
+  enable_pseudolocales = !is_official_build
+}
+
+if (enable_pseudolocales) {
+  # Note that this only packages the locales in, and doesn't add the ui to enable them.
+  locales += pseudolocales
+}
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = []
+foreach(locale, locales) {
+  if (locale == "en-US") {
+    locales_as_mac_outputs += [ "en" ]
+  } else {
+    locales_as_mac_outputs += [ string_replace(locale, "-", "_") ]
+  }
+}
+
+if (is_ios) {
+  ios_packed_locales = locales - ios_unsupported_locales
+  ios_packed_locales_as_mac_outputs =
+      locales_as_mac_outputs - ios_unsupported_locales
+}
diff --git a/src/build/config/logging.gni b/src/build/config/logging.gni
new file mode 100644
index 0000000..25fe991
--- /dev/null
+++ b/src/build/config/logging.gni
@@ -0,0 +1,12 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/dcheck_always_on.gni")
+
+declare_args() {
+  # Use LogErrorNotReached() for NOTREACHED().
+  enable_log_error_not_reached =
+      is_chromeos_ash && !(is_debug || dcheck_always_on)
+}
diff --git a/src/build/config/mac/BUILD.gn b/src/build/config/mac/BUILD.gn
new file mode 100644
index 0000000..0919208
--- /dev/null
+++ b/src/build/config/mac/BUILD.gn
@@ -0,0 +1,132 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/apple/symbols.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/sysroot.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  common_mac_flags = []
+
+  # CPU architecture.
+  if (current_cpu == "x64") {
+    clang_arch = "x86_64"
+  } else if (current_cpu == "x86") {
+    clang_arch = "i386"
+  } else if (current_cpu == "arm64") {
+    clang_arch = current_cpu
+  } else {
+    assert(false, "unknown current_cpu $current_cpu")
+  }
+  if (host_os == "mac") {
+    common_mac_flags += [
+      "-arch",
+      clang_arch,
+    ]
+  } else {
+    common_mac_flags += [ "--target=$clang_arch-apple-macos" ]
+  }
+
+  # This is here so that all files get recompiled after an Xcode update.
+  # (defines are passed via the command line, and build system rebuild things
+  # when their commandline changes). Nothing should ever read this define.
+  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+  asmflags = common_mac_flags
+  cflags = common_mac_flags
+
+  # Without this, the constructors and destructors of a C++ object inside
+  # an Objective C struct won't be called, which is very bad.
+  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+  ldflags = common_mac_flags
+
+  if (save_unstripped_output) {
+    ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
+  }
+
+  if (export_libcxxabi_from_executables) {
+    ldflags += [ "-Wl,-undefined,dynamic_lookup" ]
+  }
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Mac-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  common_flags = [
+    "-isysroot",
+    rebase_path(sysroot, root_build_dir),
+    "-mmacosx-version-min=$mac_deployment_target",
+  ]
+
+  asmflags = common_flags
+  cflags = common_flags
+  ldflags = common_flags
+
+  # Prevent Mac OS X AssertMacros.h (included by system header) from defining
+  # macros that collide with common names, like 'check', 'require', and
+  # 'verify'.
+  # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+  defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0" ]
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+  ldflags = [ "-Wl,-ObjC" ]  # Always load Objective-C categories and classes.
+
+  if (is_component_build) {
+    ldflags += [
+      # Path for loading shared libraries for unbundled binaries.
+      "-Wl,-rpath,@loader_path/.",
+
+      # Path for loading shared libraries for bundled binaries. Get back from
+      # Binary.app/Contents/MacOS.
+      "-Wl,-rpath,@loader_path/../../..",
+    ]
+  }
+}
+
+# The ldflags referenced below are handled by
+# //build/toolchain/apple/linker_driver.py.
+# Remove this config if a target wishes to change the arguments passed to the
+# strip command during linking. This config by default strips all symbols
+# from a binary, but some targets may wish to specify an exports file to
+# preserve specific symbols.
+config("strip_all") {
+  if (enable_stripping) {
+    ldflags = [ "-Wcrl,strip,-x,-S" ]
+  }
+}
+
+# When building with Goma, all inputs must be relative to the build directory.
+# If using the system Xcode, which typically resides outside the build root, a
+# symlink to the SDK is created in the build directory, and the path to that
+# link is stored in $mac_sdk_path. If an action references a file in the SDK as
+# an input, GN will complain that no target generates the file because it is
+# below the $root_build_dir. The below action lists as outputs the files in the
+# SDK that are referenced as inputs to actions, so that GN thinks a target has
+# generated them. The list is centralized here, as multiple targets need to
+# reference the same files, and an output can only be generated once.
+#
+# The symbolic link for $mac_sdk_path is set up by
+# //build/config/apple/sdk_info.py in //build/config/mac/mac_sdk.gni.
+if (use_system_xcode && use_goma && target_os == "mac") {
+  action("sdk_inputs") {
+    script = "//build/noop.py"
+    outputs = [
+      "$mac_sdk_path/usr/include/mach/exc.defs",
+      "$mac_sdk_path/usr/include/mach/mach_exc.defs",
+      "$mac_sdk_path/usr/include/mach/notify.defs",
+    ]
+  }
+} else {
+  group("sdk_inputs") {
+  }
+}
diff --git a/src/build/config/mac/BuildInfo.plist b/src/build/config/mac/BuildInfo.plist
new file mode 100644
index 0000000..bfa3b8d
--- /dev/null
+++ b/src/build/config/mac/BuildInfo.plist
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>DTCompiler</key>
+  <string>${GCC_VERSION}</string>
+  <key>DTSDKBuild</key>
+  <string>${MAC_SDK_BUILD}</string>
+  <key>DTSDKName</key>
+  <string>${MAC_SDK_NAME}</string>
+  <key>DTXcode</key>
+  <string>${XCODE_VERSION}</string>
+  <key>DTXcodeBuild</key>
+  <string>${XCODE_BUILD}</string>
+</dict>
+</plist>
diff --git a/src/build/config/mac/mac_sdk.gni b/src/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000..7f4432d
--- /dev/null
+++ b/src/build/config/mac/mac_sdk.gni
@@ -0,0 +1,127 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/gclient_args.gni")
+import("//build/config/mac/mac_sdk_overrides.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(current_os == "mac" || current_toolchain == default_toolchain)
+
+declare_args() {
+  # The MACOSX_DEPLOYMENT_TARGET variable used when compiling. This partially
+  # controls the minimum supported version of macOS for Chromium by
+  # affecting the symbol availability rules. This may differ from
+  # mac_min_system_version when dropping support for older macOSes but where
+  # additional code changes are required to be compliant with the availability
+  # rules.
+  # Must be of the form x.x.x for Info.plist files.
+  mac_deployment_target = "10.11.0"
+
+  # The value of the LSMinimmumSystemVersion in Info.plist files. This partially
+  # controls the minimum supported version of macOS for Chromium by
+  # affecting the Info.plist. This may differ from mac_deployment_target when
+  # dropping support for older macOSes. This should be greater than or equal to
+  # the mac_deployment_target version.
+  # Must be of the form x.x.x for Info.plist files.
+  mac_min_system_version = "10.11.0"
+
+  # Path to a specific version of the Mac SDK, not including a slash at the end.
+  # If empty, the path to the lowest version greater than or equal to
+  # mac_sdk_min is used.
+  mac_sdk_path = ""
+
+  # The SDK name as accepted by xcodebuild.
+  mac_sdk_name = "macosx"
+
+  # The SDK version used when making official builds. This is a single exact
+  # version, not a minimum. If this version isn't available official builds
+  # will fail.
+  mac_sdk_official_version = "11.1"
+
+  # Production builds should use hermetic Xcode. If you want to do production
+  # builds with system Xcode to test new SDKs, set this.
+  # Don't set this on any bots.
+  mac_allow_system_xcode_for_official_builds_for_testing = false
+}
+
+# Check that the version of macOS SDK used is the one requested when building
+# a version of Chrome shipped to the users. Disable the check if building for
+# iOS as the version macOS SDK used is not relevant for the tool build for the
+# host (they are not shipped) --- this is required as Chrome on iOS is usually
+# build with the latest version of Xcode that may not ship with the version of
+# the macOS SDK used to build Chrome on mac.
+# TODO(crbug.com/635745): the check for target_os should be replaced by a
+# check that current_toolchain is default_toolchain, and the file should
+# assert that current_os is "mac" once this file is no longer included by
+# iOS toolchains.
+if (is_chrome_branded && is_official_build && target_os != "ios") {
+  assert(!use_system_xcode ||
+             mac_allow_system_xcode_for_official_builds_for_testing,
+         "official branded builds should use hermetic xcode")
+}
+
+# The path to the hermetic install of Xcode. Only relevant when
+# use_system_xcode = false.
+if (!use_system_xcode) {
+  _hermetic_xcode_path = "//build/mac_files/xcode_binaries"
+}
+
+script_name = "//build/config/apple/sdk_info.py"
+sdk_info_args = []
+if (!use_system_xcode) {
+  sdk_info_args += [
+    "--developer_dir",
+    rebase_path(_hermetic_xcode_path, "", root_build_dir),
+  ]
+}
+
+# Goma RBE requires paths relative to source directory. When using system
+# Xcode, this is done by creating symbolic links in root_build_dir.
+if (use_system_xcode && use_goma) {
+  sdk_info_args += [
+    "--get_sdk_info",
+    "--create_symlink_at",
+    "sdk/xcode_links",
+  ]
+}
+sdk_info_args += [ mac_sdk_name ]
+
+_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
+xcode_version = _mac_sdk_result.xcode_version
+xcode_build = _mac_sdk_result.xcode_build
+if (mac_sdk_path == "" && use_system_xcode && use_goma) {
+  mac_sdk_path = _mac_sdk_result.sdk_path
+}
+
+if (use_system_xcode) {
+  # The tool will print the SDK path on the first line, and the version on the
+  # second line.
+  find_sdk_args = [
+    "--print_sdk_path",
+    "--print_bin_path",
+    mac_sdk_min,
+  ]
+  find_sdk_lines =
+      exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+  mac_sdk_version = find_sdk_lines[2]
+  if (mac_sdk_path == "") {
+    mac_sdk_path = find_sdk_lines[0]
+    mac_bin_path = find_sdk_lines[1]
+  } else {
+    mac_bin_path = find_sdk_lines[1]
+  }
+} else {
+  mac_sdk_version = mac_sdk_official_version
+  _dev = _hermetic_xcode_path + "/Contents/Developer"
+  _sdk = "MacOSX${mac_sdk_version}.sdk"
+  mac_sdk_path = _dev + "/Platforms/MacOSX.platform/Developer/SDKs/$_sdk"
+  mac_bin_path = _dev + "/Toolchains/XcodeDefault.xctoolchain/usr/bin/"
+
+  # If we're using hermetic Xcode, then we want the paths to be relative so that
+  # generated ninja files are independent of the directory location.
+  # TODO(thakis): Do this at the uses of this variable instead.
+  mac_bin_path = rebase_path(mac_bin_path, root_build_dir)
+}
diff --git a/src/build/config/mac/mac_sdk_overrides.gni b/src/build/config/mac/mac_sdk_overrides.gni
new file mode 100644
index 0000000..17eb3d9
--- /dev/null
+++ b/src/build/config/mac/mac_sdk_overrides.gni
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains arguments that subprojects may choose to override. It
+# asserts that those overrides are used, to prevent unused args warnings.
+
+_sdk_min_from_env = getenv("FORCE_MAC_SDK_MIN")
+declare_args() {
+  # Minimum supported version of the Mac SDK.
+  if (_sdk_min_from_env == "") {
+    mac_sdk_min = "10.15"
+  } else {
+    mac_sdk_min = _sdk_min_from_env
+  }
+}
diff --git a/src/build/config/mac/package_framework.py b/src/build/config/mac/package_framework.py
new file mode 100644
index 0000000..0026f46
--- /dev/null
+++ b/src/build/config/mac/package_framework.py
@@ -0,0 +1,60 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import errno
+import os
+import shutil
+import sys
+
+def Main():
+  parser = argparse.ArgumentParser(description='Create Mac Framework symlinks')
+  parser.add_argument('--framework', action='store', type=str, required=True)
+  parser.add_argument('--version', action='store', type=str)
+  parser.add_argument('--contents', action='store', type=str, nargs='+')
+  parser.add_argument('--stamp', action='store', type=str, required=True)
+  args = parser.parse_args()
+
+  VERSIONS = 'Versions'
+  CURRENT = 'Current'
+
+  # Ensure the Foo.framework/Versions/A/ directory exists and create the
+  # Foo.framework/Versions/Current symlink to it.
+  if args.version:
+    try:
+      os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0o755)
+    except OSError as e:
+      if e.errno != errno.EEXIST:
+        raise e
+    _Relink(os.path.join(args.version),
+            os.path.join(args.framework, VERSIONS, CURRENT))
+
+  # Establish the top-level symlinks in the framework bundle. The dest of
+  # the symlinks may not exist yet.
+  if args.contents:
+    for item in args.contents:
+      _Relink(os.path.join(VERSIONS, CURRENT, item),
+              os.path.join(args.framework, item))
+
+  # Write out a stamp file.
+  if args.stamp:
+    with open(args.stamp, 'w') as f:
+      f.write(str(args))
+
+  return 0
+
+
+def _Relink(dest, link):
+  """Creates a symlink to |dest| named |link|. If |link| already exists,
+  it is overwritten."""
+  try:
+    os.remove(link)
+  except OSError as e:
+    if e.errno != errno.ENOENT:
+      shutil.rmtree(link)
+  os.symlink(dest, link)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/config/mac/prepare_framework_version.py b/src/build/config/mac/prepare_framework_version.py
new file mode 100644
index 0000000..db92150
--- /dev/null
+++ b/src/build/config/mac/prepare_framework_version.py
@@ -0,0 +1,42 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+
+# Ensures that the current version matches the last-produced version, which is
+# stored in the version_file. If it does not, then the framework_root_dir is
+# obliterated.
+# Usage: python prepare_framework_version.py out/obj/version_file \
+#                                            out/Framework.framework \
+#                                            'A'
+
+def PrepareFrameworkVersion(version_file, framework_root_dir, version):
+  # Test what the current framework version is. Stop if it is up-to-date.
+  try:
+    with open(version_file, 'r') as f:
+      current_version = f.read()
+      if current_version == version:
+        return
+  except IOError:
+    pass
+
+  # The framework version has changed, so clobber the framework.
+  if os.path.exists(framework_root_dir):
+    shutil.rmtree(framework_root_dir)
+
+  # Write out the new framework version file, making sure its containing
+  # directory exists.
+  dirname = os.path.dirname(version_file)
+  if not os.path.isdir(dirname):
+    os.makedirs(dirname, 0o700)
+
+  with open(version_file, 'w+') as f:
+    f.write(version)
+
+
+if __name__ == '__main__':
+  PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3])
+  sys.exit(0)
diff --git a/src/build/config/mac/rules.gni b/src/build/config/mac/rules.gni
new file mode 100644
index 0000000..30f6e4f
--- /dev/null
+++ b/src/build/config/mac/rules.gni
@@ -0,0 +1,601 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/apple/apple_info_plist.gni")
+import("//build/config/apple/symbols.gni")
+import("//build/config/mac/mac_sdk.gni")
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("mac_info_plist") {
+  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+         "Only one of info_plist or info_plist_target may be specified in " +
+             target_name)
+
+  if (defined(invoker.info_plist)) {
+    _info_plist = invoker.info_plist
+  } else {
+    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+    _info_plist = _info_plist_target_output[0]
+  }
+
+  apple_info_plist(target_name) {
+    format = "xml1"
+    extra_substitutions = []
+    if (defined(invoker.extra_substitutions)) {
+      extra_substitutions = invoker.extra_substitutions
+    }
+    extra_substitutions += [
+      "MAC_SDK_BUILD=$mac_sdk_version",
+      "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
+      "MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target",
+      "CHROMIUM_MIN_SYSTEM_VERSION=$mac_min_system_version",
+      "XCODE_BUILD=$xcode_build",
+      "XCODE_VERSION=$xcode_version",
+    ]
+    plist_templates = [
+      "//build/config/mac/BuildInfo.plist",
+      _info_plist,
+    ]
+    if (defined(invoker.info_plist_target)) {
+      deps = [ invoker.info_plist_target ]
+    }
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "executable_name",
+                           ])
+  }
+}
+
+# Template to package a shared library into a Mac framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+#     framework_version:
+#         string, version of the framework. Typically this is a
+#         single letter, like "A".
+#
+#     framework_contents:
+#         list of string, top-level items in the framework. This is
+#         the list of symlinks to create in the .framework directory that link
+#         into Versions/Current/.
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     output_name:
+#         (optional) string, name of the generated framework without the
+#         .framework suffix. If omitted, defaults to target_name.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+#   - $target_name only adds a build-time dependency. Targets that depend on
+#     it will not link against the framework.
+#   - $target_name+link adds a build-time and link-time dependency. Targets
+#     that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+#     mac_framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+#     mac_framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#       ldflags = [
+#         "-install_name",
+#         "@executable_path/../Frameworks/$target_name.framework"
+#       ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation+link" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("mac_framework_bundle") {
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(invoker.framework_version != "", "framework_version is required")
+  assert(defined(invoker.framework_contents), "framework_contents is required")
+
+  _info_plist_target = target_name + "_info_plist"
+
+  mac_info_plist(_info_plist_target) {
+    executable_name = target_name
+    if (defined(invoker.output_name)) {
+      executable_name = invoker.output_name
+    }
+    forward_variables_from(invoker,
+                           [
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "testonly",
+                           ])
+  }
+
+  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+  bundle_data(_info_plist_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = get_target_outputs(":$_info_plist_target")
+    outputs = [ "{{bundle_resources_dir}}/Info.plist" ]
+    public_deps = [ ":$_info_plist_target" ]
+  }
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  # Create a file to track the build dependency on the framework_version and
+  # framework_contents variables.
+  _framework_toc = [
+                     "Version=" + invoker.framework_version,
+                     _output_name,
+                   ] + invoker.framework_contents
+  _framework_contents = [ _output_name ] + invoker.framework_contents
+  _framework_toc_file = "$target_out_dir/${target_name}.toc"
+  write_file(_framework_toc_file, _framework_toc)
+
+  # Create local variables for referencing different parts of the bundle.
+  _framework_target = _target_name
+  _framework_name = _output_name + ".framework"
+  _framework_base_dir = "$root_out_dir/$_framework_name"
+  _framework_root_dir =
+      _framework_base_dir + "/Versions/${invoker.framework_version}"
+
+  # Clean the entire framework if the framework_version changes.
+  _version_file = "$target_out_dir/${target_name}_version"
+  exec_script("//build/config/mac/prepare_framework_version.py",
+              [
+                rebase_path(_version_file),
+                rebase_path(_framework_base_dir),
+                invoker.framework_version,
+              ])
+
+  # Create the symlinks.
+  _framework_package_target = target_name + "_package"
+  action(_framework_package_target) {
+    script = "//build/config/mac/package_framework.py"
+
+    # The TOC file never needs to be read, since its contents are the values
+    # of GN variables. It is only used to trigger this rule when the values
+    # change.
+    inputs = [ _framework_toc_file ]
+
+    _stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp"
+    outputs = [ _stamp_file ]
+
+    visibility = [ ":$_framework_target" ]
+
+    args = [
+             "--framework",
+             rebase_path(_framework_base_dir, root_build_dir),
+             "--stamp",
+             rebase_path(_stamp_file, root_build_dir),
+             "--version",
+             invoker.framework_version,
+             "--contents",
+           ] + _framework_contents
+
+    # It is not possible to list _framework_contents as outputs, since
+    # ninja does not properly stat symbolic links.
+    # https://github.com/ninja-build/ninja/issues/1186
+  }
+
+  _link_shared_library_target = target_name + "_shared_library"
+  _shared_library_bundle_data = target_name + "_shared_library_bundle_data"
+
+  shared_library(_link_shared_library_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "bundle_deps",
+                             "code_signing_enabled",
+                             "data_deps",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "visibility",
+                           ])
+    visibility = [ ":$_shared_library_bundle_data" ]
+    output_name = _output_name
+    output_prefix_override = true
+    output_extension = ""
+    output_dir = "$target_out_dir/$_link_shared_library_target"
+  }
+
+  bundle_data(_shared_library_bundle_data) {
+    visibility = [ ":$_framework_target" ]
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = [ "$target_out_dir/$_link_shared_library_target/$_output_name" ]
+    outputs = [ "{{bundle_executable_dir}}/$_output_name" ]
+    public_deps = [ ":$_link_shared_library_target" ]
+  }
+
+  _framework_public_config = _target_name + "_public_config"
+  config(_framework_public_config) {
+    visibility = [ ":$_framework_target" ]
+    framework_dirs = [ root_out_dir ]
+    frameworks = [ _framework_name ]
+  }
+
+  create_bundle(_framework_target) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                           ])
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+      visibility += [ ":$_target_name+link" ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_info_plist_bundle_data" ]
+
+    if (defined(invoker.bundle_deps)) {
+      deps += invoker.bundle_deps
+    }
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [
+      ":$_framework_package_target",
+      ":$_shared_library_bundle_data",
+    ]
+
+    if (enable_dsyms) {
+      data = [
+        "$root_out_dir/$_output_name.dSYM/Contents/Info.plist",
+        "$root_out_dir/$_output_name.dSYM/Contents/Resources/DWARF/$_output_name",
+      ]
+    }
+
+    bundle_root_dir = _framework_base_dir
+    bundle_contents_dir = _framework_root_dir
+    bundle_resources_dir = "$bundle_contents_dir/Resources"
+    bundle_executable_dir = bundle_contents_dir
+  }
+
+  group(_target_name + "+link") {
+    forward_variables_from(invoker,
+                           [
+                             "public_configs",
+                             "testonly",
+                             "visibility",
+                           ])
+    public_deps = [ ":$_target_name" ]
+    if (!defined(public_configs)) {
+      public_configs = []
+    }
+    public_configs += [ ":$_framework_public_config" ]
+  }
+}
+
+set_defaults("mac_framework_bundle") {
+  configs = default_shared_library_configs
+}
+
+# Template to create a Mac executable application bundle.
+#
+# Arguments
+#
+#     package_type:
+#         (optional) string, the product package type to create. Options are:
+#             "app" to create a .app bundle (default)
+#             "xpc" to create an .xpc service bundle
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     output_name:
+#         (optional) string, name of the generated app without the
+#         .app suffix. If omitted, defaults to target_name.
+#
+#     extra_configs:
+#         (optional) list of label, additional configs to apply to the
+#         executable target.
+#
+#     remove_configs:
+#         (optional) list of label, default configs to remove from the target.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("mac_app_bundle") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _package_type = "app"
+  if (defined(invoker.package_type)) {
+    _package_type = invoker.package_type
+  }
+
+  if (_package_type == "app") {
+    _output_extension = "app"
+    _product_type = "com.apple.product-type.application"
+    _write_pkg_info = true
+  } else if (_package_type == "xpc") {
+    _output_extension = "xpc"
+    _product_type = "com.apple.product-type.xpc-service"
+    _write_pkg_info = false
+  } else {
+    assert(false, "Unsupported packge_type: " + packge_type)
+  }
+
+  _executable_target = target_name + "_executable"
+  _executable_bundle_data = _executable_target + "_bundle_data"
+
+  _info_plist_target = target_name + "_info_plist"
+
+  mac_info_plist(_info_plist_target) {
+    executable_name = _output_name
+    forward_variables_from(invoker,
+                           [
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "testonly",
+                           ])
+  }
+
+  if (_write_pkg_info) {
+    _pkg_info_target = target_name + "_pkg_info"
+
+    action(_pkg_info_target) {
+      forward_variables_from(invoker, [ "testonly" ])
+      script = "//build/apple/write_pkg_info.py"
+      inputs = [ "//build/apple/plist_util.py" ]
+      sources = get_target_outputs(":$_info_plist_target")
+      outputs = [ "$target_gen_dir/$_pkg_info_target" ]
+      args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+             [ "--output" ] + rebase_path(outputs, root_build_dir)
+      deps = [ ":$_info_plist_target" ]
+    }
+  }
+
+  executable(_executable_target) {
+    visibility = [ ":$_executable_bundle_data" ]
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "data_deps",
+                             "info_plist",
+                             "output_name",
+                             "visibility",
+                           ])
+    if (defined(extra_configs)) {
+      configs += extra_configs
+    }
+    if (defined(remove_configs)) {
+      configs -= remove_configs
+    }
+    output_name = _output_name
+    output_dir = "$target_out_dir/$_executable_target"
+  }
+
+  bundle_data(_executable_bundle_data) {
+    visibility = [ ":$_target_name" ]
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = [ "$target_out_dir/$_executable_target/$_output_name" ]
+    outputs = [ "{{bundle_executable_dir}}/$_output_name" ]
+    public_deps = [ ":$_executable_target" ]
+  }
+
+  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+  bundle_data(_info_plist_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    visibility = [ ":$_target_name" ]
+    sources = get_target_outputs(":$_info_plist_target")
+    outputs = [ "{{bundle_contents_dir}}/Info.plist" ]
+    public_deps = [ ":$_info_plist_target" ]
+  }
+
+  if (_write_pkg_info) {
+    _pkg_info_bundle_data = _pkg_info_target + "_bundle_data"
+
+    bundle_data(_pkg_info_bundle_data) {
+      forward_variables_from(invoker, [ "testonly" ])
+      visibility = [ ":$_target_name" ]
+      sources = get_target_outputs(":$_pkg_info_target")
+      outputs = [ "{{bundle_contents_dir}}/PkgInfo" ]
+      public_deps = [ ":$_pkg_info_target" ]
+    }
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [
+      ":$_executable_bundle_data",
+      ":$_info_plist_bundle_data",
+    ]
+    if (_write_pkg_info) {
+      deps += [ ":$_pkg_info_bundle_data" ]
+    }
+
+    if (enable_dsyms) {
+      data = [
+        "$root_out_dir/$_output_name.dSYM/Contents/Info.plist",
+        "$root_out_dir/$_output_name.dSYM/Contents/Resources/DWARF/$_output_name",
+      ]
+    }
+
+    product_type = _product_type
+    bundle_root_dir = "$root_out_dir/${_output_name}.${_output_extension}"
+    bundle_contents_dir = "$bundle_root_dir/Contents"
+    bundle_resources_dir = "$bundle_contents_dir/Resources"
+    bundle_executable_dir = "$bundle_contents_dir/MacOS"
+  }
+}
+
+# Template to package a loadable_module into a .plugin bundle.
+#
+# This takes no extra arguments that differ from a loadable_module.
+template("mac_plugin_bundle") {
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+
+  _target_name = target_name
+  _loadable_module_target = _target_name + "_loadable_module"
+  _loadable_module_bundle_data = _loadable_module_target + "_bundle_data"
+
+  _output_name = _target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  loadable_module(_loadable_module_target) {
+    visibility = [ ":$_loadable_module_bundle_data" ]
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "data_deps",
+                             "output_name",
+                             "visibility",
+                           ])
+    output_dir = "$target_out_dir"
+    output_name = _output_name
+  }
+
+  bundle_data(_loadable_module_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    visibility = [ ":$_target_name" ]
+    sources = [ "$target_out_dir/$_output_name.so" ]
+    outputs = [ "{{bundle_executable_dir}}/$_output_name" ]
+    public_deps = [ ":$_loadable_module_target" ]
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_loadable_module_bundle_data" ]
+
+    if (enable_dsyms) {
+      data = [
+        "$root_out_dir/$_output_name.so.dSYM/Contents/Info.plist",
+        "$root_out_dir/$_output_name.so.dSYM/Contents/Resources/DWARF/$_output_name.so",
+      ]
+    }
+
+    bundle_root_dir = "$root_out_dir/$_output_name.plugin"
+    bundle_contents_dir = "$bundle_root_dir/Contents"
+    bundle_executable_dir = "$bundle_contents_dir/MacOS"
+  }
+}
diff --git a/src/build/config/mips.gni b/src/build/config/mips.gni
new file mode 100644
index 0000000..6365088
--- /dev/null
+++ b/src/build/config/mips.gni
@@ -0,0 +1,67 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "mips*" contexts, where
+# MIPS code is being compiled.  But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+declare_args() {
+  # MIPS MultiMedia Instruction compilation flag.
+  mips_use_mmi = false
+}
+
+if (current_cpu == "mipsel" || v8_current_cpu == "mipsel" ||
+    current_cpu == "mips" || v8_current_cpu == "mips") {
+  declare_args() {
+    # MIPS arch variant. Possible values are:
+    #   "r1"
+    #   "r2"
+    #   "r6"
+    #   "loongson3"
+    mips_arch_variant = "r1"
+
+    # MIPS DSP ASE revision. Possible values are:
+    #   0: unavailable
+    #   1: revision 1
+    #   2: revision 2
+    mips_dsp_rev = 0
+
+    # MIPS SIMD Arch compilation flag.
+    mips_use_msa = false
+
+    # MIPS floating-point ABI. Possible values are:
+    #   "hard": sets the GCC -mhard-float option.
+    #   "soft": sets the GCC -msoft-float option.
+    mips_float_abi = "hard"
+
+    # MIPS32 floating-point register width. Possible values are:
+    #   "fp32": sets the GCC -mfp32 option.
+    #   "fp64": sets the GCC -mfp64 option.
+    #   "fpxx": sets the GCC -mfpxx option.
+    mips_fpu_mode = "fp32"
+  }
+} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el" ||
+           current_cpu == "mips64" || v8_current_cpu == "mips64") {
+  # MIPS arch variant. Possible values are:
+  #   "r2"
+  #   "r6"
+  #   "loongson3"
+  if (current_os == "android" || target_os == "android") {
+    declare_args() {
+      mips_arch_variant = "r6"
+
+      # MIPS SIMD Arch compilation flag.
+      mips_use_msa = true
+    }
+  } else {
+    declare_args() {
+      mips_arch_variant = "r2"
+
+      # MIPS SIMD Arch compilation flag.
+      mips_use_msa = false
+    }
+  }
+}
diff --git a/src/build/config/nacl/BUILD.gn b/src/build/config/nacl/BUILD.gn
new file mode 100644
index 0000000..c0c5282
--- /dev/null
+++ b/src/build/config/nacl/BUILD.gn
@@ -0,0 +1,143 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+
+# Native Client Definitions
+config("nacl_defines") {
+  if (is_linux || is_chromeos || is_android || is_nacl) {
+    defines = [
+      "_POSIX_C_SOURCE=199506",
+      "_XOPEN_SOURCE=600",
+      "_GNU_SOURCE=1",
+      "__STDC_LIMIT_MACROS=1",
+    ]
+  } else if (is_win) {
+    defines = [ "__STDC_LIMIT_MACROS=1" ]
+  }
+
+  if (current_cpu == "pnacl" && !is_nacl_nonsfi) {
+    # TODO: Remove the following definition once NACL_BUILD_ARCH and
+    # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain.
+    defines += [ "NACL_BUILD_ARCH=pnacl" ]
+  }
+}
+
+config("nexe_defines") {
+  defines = [
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "DYNAMIC_ANNOTATIONS_PREFIX=NACL_",
+  ]
+}
+
+config("nacl_warnings") {
+  if (is_win) {
+    # Some NaCl code uses forward declarations of static const variables,
+    # with initialized definitions later on.  (The alternative would be
+    # many, many more forward declarations of everything used in that
+    # const variable's initializer before the definition.)  The Windows
+    # compiler is too stupid to notice that there is an initializer later
+    # in the file, and warns about the forward declaration.
+    cflags = [ "/wd4132" ]
+  }
+}
+
+# The base target that all targets in the NaCl build should depend on.
+# This allows configs to be modified for everything in the NaCl build, even when
+# the NaCl build is composed into the Chrome build.  (GN has no functionality to
+# add flags to everything in //native_client, having a base target works around
+# that limitation.)
+source_set("nacl_base") {
+  public_configs = [
+    ":nacl_defines",
+    ":nacl_warnings",
+  ]
+  if (current_os == "nacl") {
+    public_configs += [ ":nexe_defines" ]
+  }
+}
+
+config("compiler") {
+  configs = []
+  cflags = []
+  ldflags = []
+  libs = []
+
+  if (is_clang && current_cpu != "pnacl") {
+    # -no-integrated-as is the default in nacl-clang for historical
+    # compatibility with inline assembly code and so forth.  But there
+    # are no such cases in Chromium code, and -integrated-as is nicer in
+    # general.  Moreover, the IRT must be built using LLVM's assembler
+    # on x86-64 to preserve sandbox base address hiding.  Use it
+    # everywhere for consistency (and possibly quicker builds).
+    cflags += [ "-integrated-as" ]
+  }
+  if (is_nacl_nonsfi) {
+    cflags += [ "--pnacl-allow-translate" ]
+    ldflags += [
+      "--pnacl-allow-translate",
+      "--pnacl-allow-native",
+      "-Wl,--noirt",
+      "-Wt,--noirt",
+      "-Wt,--noirtshim",
+
+      # The clang driver automatically injects -lpthread when using libc++, but
+      # the toolchain doesn't have it yet.  To get around this, use
+      # -nodefaultlibs and make each executable target depend on
+      # "//native_client/src/nonsfi/irt:nacl_sys_private".
+      "-nodefaultlibs",
+    ]
+    libs += [
+      "c++",
+      "m",
+      "c",
+      "pnaclmm",
+    ]
+    include_dirs = [ "//native_client/src/public/linux_syscalls" ]
+  }
+
+  asmflags = cflags
+}
+
+config("compiler_codegen") {
+  cflags = []
+
+  if (is_nacl_irt) {
+    cflags += [
+      # A debugger should be able to unwind IRT call frames.  This is
+      # the default behavior on x86-64 and when compiling C++ with
+      # exceptions enabled; the change is for the benefit of x86-32 C.
+      # The frame pointer is unnecessary when unwind tables are used.
+      "-fasynchronous-unwind-tables",
+      "-fomit-frame-pointer",
+    ]
+
+    if (current_cpu == "x86") {
+      # The x86-32 IRT needs to be callable with an under-aligned
+      # stack; so we disable SSE instructions, which can fault on
+      # misaligned addresses.  See
+      # https://code.google.com/p/nativeclient/issues/detail?id=3935
+      cflags += [
+        "-mstackrealign",
+        "-mno-sse",
+      ]
+    }
+  }
+
+  asmflags = cflags
+}
+
+config("irt_optimize") {
+  cflags = [
+    # Optimize for space, keep the IRT nexe small.
+    "-Os",
+
+    # These are omitted from non-IRT libraries to keep the libraries
+    # themselves small.
+    "-ffunction-sections",
+    "-fdata-sections",
+  ]
+
+  ldflags = [ "-Wl,--gc-sections" ]
+}
diff --git a/src/build/config/nacl/config.gni b/src/build/config/nacl/config.gni
new file mode 100644
index 0000000..77e15fc
--- /dev/null
+++ b/src/build/config/nacl/config.gni
@@ -0,0 +1,55 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Native Client supports both Newlib and Glibc C libraries where Newlib
+  # is assumed to be the default one; use this to determine whether Glibc
+  # is being used instead.
+  is_nacl_glibc = false
+}
+
+is_nacl_irt = false
+is_nacl_nonsfi = false
+
+nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
+
+if (is_nacl_glibc) {
+  if (current_cpu == "x86" || current_cpu == "x64") {
+    nacl_toolchain_package = "nacl_x86_glibc"
+  } else if (current_cpu == "arm") {
+    nacl_toolchain_package = "nacl_arm_glibc"
+  }
+} else {
+  nacl_toolchain_package = "pnacl_newlib"
+}
+
+if (current_cpu == "pnacl") {
+  _nacl_tuple = "pnacl"
+} else if (current_cpu == "x86" || current_cpu == "x64") {
+  _nacl_tuple = "x86_64-nacl"
+} else if (current_cpu == "arm") {
+  _nacl_tuple = "arm-nacl"
+} else if (current_cpu == "mipsel") {
+  _nacl_tuple = "mipsel-nacl"
+} else {
+  # In order to allow this file to be included unconditionally
+  # from build files that can't depend on //components/nacl/features.gni
+  # we provide a dummy value that should be harmless if nacl isn't needed.
+  # If nacl *is* needed this will result in a real error, indicating that
+  # people need to set the toolchain path correctly.
+  _nacl_tuple = "unknown"
+}
+
+nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin"
+nacl_toolchain_tooldir =
+    "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}"
+nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-"
+
+nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu
+is_nacl_irt = current_toolchain == nacl_irt_toolchain
+
+# Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC
+# applications.
+nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi"
+is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain
diff --git a/src/build/config/nacl/rules.gni b/src/build/config/nacl/rules.gni
new file mode 100644
index 0000000..c08357c
--- /dev/null
+++ b/src/build/config/nacl/rules.gni
@@ -0,0 +1,182 @@
+# Copyright 2015 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/config/python.gni")
+
+# Generate a nmf file
+#
+# Native Client Manifest (nmf) is a JSON file that tells the browser where to
+# download and load Native Client application files and libraries.
+#
+# Variables:
+#   executables: .nexe/.pexe/.bc executables to generate nmf for
+#   lib_prefix: path to prepend to shared libraries in the nmf
+#   nmf: the name and the path of the output file
+#   nmfflags: additional flags for the nmf generator
+#   stage_dependencies: directory for staging libraries
+template("generate_nmf") {
+  assert(defined(invoker.executables), "Must define executables")
+  assert(defined(invoker.nmf), "Must define nmf")
+
+  # TODO(crbug.com/1112471): Get this to run cleanly under Python 3.
+  python2_action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "data_deps",
+                             "executables",
+                             "lib_prefix",
+                             "nmf",
+                             "nmfflags",
+                             "public_deps",
+                             "stage_dependencies",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(nmfflags)) {
+      nmfflags = []
+    }
+
+    # TODO(phosek): Remove this conditional once
+    # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is
+    # resolved.
+    if (current_cpu == "pnacl") {
+      objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump")
+    } else {
+      objdump = rebase_path("${nacl_toolprefix}objdump")
+    }
+    if (host_os == "win") {
+      objdump += ".exe"
+    }
+
+    script = "//native_client_sdk/src/tools/create_nmf.py"
+    inputs = [ objdump ]
+    sources = executables
+    outputs = [ nmf ]
+    if (is_nacl_glibc) {
+      if (defined(stage_dependencies)) {
+        nmfflags += [ "--stage-dependencies=" +
+                      rebase_path(stage_dependencies, root_build_dir) ]
+        lib_path = stage_dependencies
+      } else {
+        lib_path = root_build_dir
+      }
+      if (defined(lib_prefix)) {
+        nmfflags += [ "--lib-prefix=" + lib_prefix ]
+        lib_path += "/${lib_prefix}"
+      }
+
+      # Starts empty so the code below can use += everywhere.
+      data = []
+
+      nmfflags +=
+          [ "--library-path=" + rebase_path(root_out_dir, root_build_dir) ]
+
+      # NOTE: There is no explicit dependency for the lib directory
+      # (lib32 and lib64 for x86/x64) created in the product directory.
+      # They are created as a side-effect of nmf creation.
+      if (current_cpu != "x86" && current_cpu != "x64") {
+        nmfflags +=
+            [ "--library-path=" +
+              rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir) ]
+        if (current_cpu == "arm") {
+          data += [ "${lib_path}/libarm/" ]
+        } else {
+          data += [ "${lib_path}/lib/" ]
+        }
+      } else {
+        # For x86-32, the lib/ directory is called lib32/ instead.
+        if (current_cpu == "x86") {
+          nmfflags +=
+              [ "--library-path=" +
+                rebase_path("${nacl_toolchain_tooldir}/lib32", root_build_dir) ]
+          data += [ "${lib_path}/lib32/" ]
+        }
+
+        # x86-32 Windows needs to build both x86-32 and x86-64 NaCl
+        # binaries into the same nmf covering both architectures.  That
+        # gets handled at a higher level (see the nacl_test_data template),
+        # so a single generate_nmf invocation gets both x86-32 and x86-64
+        # nexes listed in executables.
+        if (current_cpu == "x64" || target_os == "win") {
+          # For x86-64, the lib/ directory is called lib64/ instead
+          # when copied by create_nmf.py.
+          glibc_tc = "//build/toolchain/nacl:glibc"
+          assert(current_toolchain == "${glibc_tc}_${current_cpu}")
+          if (current_cpu == "x64") {
+            x64_out_dir = root_out_dir
+          } else {
+            x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)",
+                                         "root_out_dir")
+          }
+          nmfflags += [
+            "--library-path=" + rebase_path(x64_out_dir, root_build_dir),
+            "--library-path=" +
+                rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir),
+          ]
+          data += [ "${lib_path}/lib64/" ]
+        }
+      }
+    }
+    args = [
+             "--no-default-libpath",
+             "--objdump=" + rebase_path(objdump, root_build_dir),
+             "--output=" + rebase_path(nmf, root_build_dir),
+           ] + nmfflags + rebase_path(sources, root_build_dir)
+    if (is_nacl_glibc && current_cpu == "arm") {
+      deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ]
+    }
+  }
+}
+
+# Generate a nmf file for Non-SFI tests
+#
+# Non-SFI tests use a different manifest format from regular Native Client and
+# as such requires a different generator.
+#
+# Variables:
+#   executable: Non-SFI .nexe executable to generate nmf for
+#   nmf: the name and the path of the output file
+#   nmfflags: additional flags for the nmf generator
+template("generate_nonsfi_test_nmf") {
+  assert(defined(invoker.executable), "Must define executable")
+  assert(defined(invoker.nmf), "Must define nmf")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "data_deps",
+                             "executable",
+                             "nmf",
+                             "testonly",
+                             "public_deps",
+                             "visibility",
+                           ])
+
+    script = "//ppapi/tests/create_nonsfi_test_nmf.py"
+    sources = [ executable ]
+    outputs = [ nmf ]
+
+    # NOTE: We use target_cpu rather than current_cpu on purpose because
+    # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI
+    # .nexe executable is always translated to run on the target machine.
+    if (target_cpu == "x86") {
+      arch = "x86-32"
+    } else if (target_cpu == "x64") {
+      arch = "x86-64"
+    } else {
+      arch = target_cpu
+    }
+    args = [
+      "--program=" + rebase_path(executable, root_build_dir),
+      "--arch=${arch}",
+      "--output=" + rebase_path(nmf, root_build_dir),
+    ]
+    if (defined(invoker.nmfflags)) {
+      args += invoker.nmfflags
+    }
+  }
+}
diff --git a/src/build/config/ozone.gni b/src/build/config/ozone.gni
new file mode 100644
index 0000000..a14eb93
--- /dev/null
+++ b/src/build/config/ozone.gni
@@ -0,0 +1,126 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Ozone extra platforms file path. Can be overridden to build out of
+  # tree ozone platforms.
+  ozone_extra_path = "//build/config/ozone_extra.gni"
+
+  # Select platforms automatically. Turn this off for manual control.
+  ozone_auto_platforms = use_ozone
+
+  # TODO(petermcneeley): Backwards compatiblity support for VM images.
+  # Remove when deprecated. (https://crbug.com/1122009)
+  ozone_platform_gbm = -1
+
+  # Enable explicit apitrace (https://apitrace.github.io) loading.
+  # This requires apitrace library with additional bindings.
+  # See ChromeOS package for details:
+  # https://chromium-review.googlesource.com/c/chromiumos/overlays/chromiumos-overlay/+/2659419
+  # Chrome will not start without an apitrace.so library.
+  # Trace will be saved to /tmp/gltrace.dat file by default. You can
+  # override it at run time with TRACE_FILE=<path> environment variable.
+  enable_opengl_apitrace = false
+}
+
+declare_args() {
+  # The platform that will used at runtime by default. This can be overridden
+  # with the command line flag --ozone-platform=<platform>.
+  ozone_platform = ""
+
+  # Compile the 'cast' platform.
+  ozone_platform_cast = false
+
+  # Compile the 'drm' platform.
+  ozone_platform_drm = false
+
+  # Compile the 'headless' platform.
+  ozone_platform_headless = false
+
+  # Compile the 'scenic' platform.
+  ozone_platform_scenic = false
+
+  # Compile the 'x11' platform.
+  ozone_platform_x11 = false
+
+  # Compile the 'wayland' platform.
+  ozone_platform_wayland = false
+
+  # Compile the 'windows' platform.
+  ozone_platform_windows = false
+
+  if (ozone_auto_platforms) {
+    # Use headless as the default platform unless modified below.
+    ozone_platform = "headless"
+    ozone_platform_headless = true
+
+    if (is_cast_audio_only) {
+      # Just use headless for audio-only Cast platforms.
+    } else if (is_chromecast && !is_fuchsia) {
+      # Enable the Cast ozone platform on all A/V Cast builds except Fuchsia.
+      ozone_platform_cast = true
+
+      # For visual desktop Chromecast builds, override the default "headless"
+      # platform with --ozone-platform=x11.
+      # TODO(halliwell): Create a libcast_graphics implementation for desktop
+      # using X11, and disable this platform.
+      if (is_cast_desktop_build && !is_cast_audio_only) {
+        ozone_platform_x11 = true
+      } else {
+        ozone_platform = "cast"
+      }
+    } else if (is_chromeos_ash) {
+      ozone_platform = "x11"
+      ozone_platform_drm = true
+      ozone_platform_x11 = true
+    } else if (is_linux || is_chromeos_lacros) {
+      ozone_platform = "x11"
+      ozone_platform_wayland = true
+      ozone_platform_x11 = true
+    } else if (is_win) {
+      ozone_platform = "windows"
+      ozone_platform_windows = true
+    } else if (is_fuchsia) {
+      ozone_platform = "scenic"
+      ozone_platform_scenic = true
+    }
+  }
+
+  # TODO(petermcneeley): Backwards compatiblity support for VM images.
+  # Remove when deprecated. (https://crbug.com/1122009)
+  if (ozone_platform_gbm != -1) {
+    ozone_platform_drm = ozone_platform_gbm
+  }
+}
+
+import(ozone_extra_path)
+
+_ozone_extra_directory = get_path_info(ozone_extra_path, "dir")
+
+# Extra paths to add to targets visibility list.
+ozone_external_platform_visibility = [ "$_ozone_extra_directory/*" ]
+
+if (is_a_target_toolchain) {
+  assert(use_ozone || !(ozone_platform_cast || ozone_platform_drm ||
+                            ozone_platform_headless || ozone_platform_x11 ||
+                            ozone_platform_wayland || ozone_platform_windows ||
+                            ozone_platform_scenic),
+         "Must set use_ozone to select ozone platforms")
+}
+
+# TODO(petermcneeley): Backwards compatiblity support for VM images.
+# Remove when deprecated. (https://crbug.com/1122009)
+
+assert(ozone_platform_gbm == -1 || ozone_platform_drm == ozone_platform_gbm)
+
+ozone_platform_gbm = ozone_platform_drm
+
+if (ozone_platform == "gbm") {
+  ozone_platform = "drm"
+}
diff --git a/src/build/config/ozone_extra.gni b/src/build/config/ozone_extra.gni
new file mode 100644
index 0000000..57fa791
--- /dev/null
+++ b/src/build/config/ozone_extra.gni
@@ -0,0 +1,24 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This list contains the name of external platforms that are passed to the
+# --ozone-platform command line argument or used for the ozone_platform build
+# config. For example ozone_external_platforms = [ "foo1", "foo2", ... ]
+ozone_external_platforms = []
+
+# This list contains dependencies for external platforms. Typically, the Ozone
+# implementations are placed into ui/ozone/platform/ and so this will look
+# something like:
+# ozone_external_platform_deps = [ "platform/foo1", "platform/foo_2", ... ]
+ozone_external_platform_deps = []
+
+# If a platform has unit tests, the corresponding source_set can be listed here
+# so that they get included into ozone_unittests.
+# ozone_external_platform_test_deps = [ "platform/foo1:foo1_unitests", ... ]
+ozone_external_platform_test_deps = []
+
+# If a platform has test support files for ui, the corresponding source_set can
+# be listed here so that they get included into ui_test_support.
+# ozone_external_platform_ui_test_support_deps = [ "platform/foo1:ui_test_support", ... ]
+ozone_external_platform_ui_test_support_deps = []
diff --git a/src/build/config/pch.gni b/src/build/config/pch.gni
new file mode 100644
index 0000000..3afd639
--- /dev/null
+++ b/src/build/config/pch.gni
@@ -0,0 +1,14 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+
+declare_args() {
+  # Precompiled header file support is by default available,
+  # but for distributed build system uses (like goma or rbe) or when
+  # doing official builds.
+  # On Linux it slows down the build, so don't enable it by default.
+  enable_precompiled_headers = !is_official_build && !(use_goma || use_rbe) && !is_linux
+}
diff --git a/src/build/config/posix/BUILD.gn b/src/build/config/posix/BUILD.gn
new file mode 100644
index 0000000..e61554c
--- /dev/null
+++ b/src/build/config/posix/BUILD.gn
@@ -0,0 +1,72 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# This build configuration is used by both Fuchsia and POSIX systems.
+assert(is_posix || is_fuchsia)
+
+group("posix") {
+  visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Posix-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  asmflags = []
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  cflags_objc = []
+  cflags_objcc = []
+  defines = []
+  ldflags = []
+
+  if (!is_apple && sysroot != "") {
+    # Pass the sysroot to all C compiler variants, the assembler, and linker.
+    sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
+    if (is_linux || is_chromeos) {
+      # This is here so that all files get recompiled after a sysroot roll and
+      # when turning the sysroot on or off. (defines are passed via the command
+      # line, and build system rebuilds things when their commandline
+      # changes). Nothing should ever read this define.
+      sysroot_hash =
+          exec_script("//build/linux/sysroot_scripts/install-sysroot.py",
+                      [ "--print-hash=$current_cpu" ],
+                      "trim string",
+                      [ "//build/linux/sysroot_scripts/sysroots.json" ])
+      defines += [ "CR_SYSROOT_HASH=$sysroot_hash" ]
+    }
+    asmflags += sysroot_flags
+    ldflags += sysroot_flags
+
+    # When use_custom_libcxx=true, some -isystem flags get passed to
+    # cflags_cc to set up libc++ include paths.  We want to make sure
+    # the sysroot includes take lower precendence than the libc++
+    # ones, so they must appear later in the command line.  However,
+    # the gn reference states "These variant-specific versions of
+    # cflags* will be appended on the compiler command line after
+    # 'cflags'."  Because of this, we must set the sysroot flags for
+    # all cflags variants instead of using 'cflags' directly.
+    cflags_c += sysroot_flags
+    cflags_cc += sysroot_flags
+    cflags_objc += sysroot_flags
+    cflags_objcc += sysroot_flags
+
+    # Need to get some linker flags out of the sysroot.
+    ld_paths = exec_script("sysroot_ld_path.py",
+                           [
+                             rebase_path("//build/linux/sysroot_ld_path.sh",
+                                         root_build_dir),
+                             rebase_path(sysroot),
+                           ],
+                           "list lines")
+    foreach(ld_path, ld_paths) {
+      ld_path = rebase_path(ld_path, root_build_dir)
+      ldflags += [ "-L" + ld_path ]
+    }
+  }
+}
diff --git a/src/build/config/posix/sysroot_ld_path.py b/src/build/config/posix/sysroot_ld_path.py
new file mode 100644
index 0000000..b45aa0d
--- /dev/null
+++ b/src/build/config/posix/sysroot_ld_path.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+from __future__ import print_function
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+  print("Need two arguments")
+  sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1],
+                                  sys.argv[2]]).strip().decode("utf-8")
+result = result.replace(" ", "\n")
+if result != "":
+  print(result)
diff --git a/src/build/config/profiling/profiling.gni b/src/build/config/profiling/profiling.gni
new file mode 100644
index 0000000..9ca3d4a
--- /dev/null
+++ b/src/build/config/profiling/profiling.gni
@@ -0,0 +1,14 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/pgo/pgo.gni")
+import("//build/config/coverage/coverage.gni")
+
+declare_args() {
+  use_clang_profiling =
+      is_a_target_toolchain && (use_clang_coverage || chrome_pgo_phase == 1)
+}
+
+assert(!use_clang_profiling || is_clang,
+       "Clang Source-based profiling requires clang.")
diff --git a/src/build/config/python.gni b/src/build/config/python.gni
new file mode 100644
index 0000000..89fa317
--- /dev/null
+++ b/src/build/config/python.gni
@@ -0,0 +1,273 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a group() that lists Python sources as |data|.
+# Having such targets serves two purposes:
+# 1) Causes files to be included in runtime_deps, so that they are uploaded to
+#    swarming when running tests remotely.
+# 2) Causes "gn analyze" to know about all Python inputs so that tests will be
+#    re-run when relevant Python files change.
+#
+# All non-trivial Python scripts should use a "pydeps" file to track their
+# sources. To create a .pydep file for a target in //example:
+#
+#   build/print_python_deps.py \
+#       --root example \
+#       --output example/$target_name.pydeps \
+#       path/to/your/script.py
+#
+# Keep the .pydep file up-to-date by adding to //PRESUBMIT.py under one of:
+#     _ANDROID_SPECIFIC_PYDEPS_FILES, _GENERIC_PYDEPS_FILES
+#
+# Variables
+#   pydeps_file: Path to .pydeps file to read sources from (optional).
+#   data: Additional files to include in data. E.g. non-.py files needed by the
+#         library, or .py files that are conditionally / lazily imported.
+#
+# Example
+#   python_library("my_library_py") {
+#      pydeps_file = "my_library.pydeps"
+#      data = [ "foo.dat" ]
+#   }
+template("python_library") {
+  group(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "testonly",
+                             "visibility",
+                           ])
+
+    if (defined(invoker.pydeps_file)) {
+      # Read and filter out comments.
+      _pydeps_lines = read_file(invoker.pydeps_file, "list lines")
+      _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ])
+
+      # Dependencies are listed relative to the pydeps file directory, but data
+      # parameter expects paths that are relative to the current BUILD.gn
+      _script_dir = get_path_info(invoker.pydeps_file, "dir")
+      _rebased_pydeps_entries = rebase_path(_pydeps_entries, ".", _script_dir)
+
+      # Even though the .pydep file is not used at runtime, it must be added
+      # so that "gn analyze" will mark the target as changed when .py files
+      # are removed but none are added or modified.
+      data = _rebased_pydeps_entries + [ invoker.pydeps_file ]
+    } else {
+      data = []
+    }
+    if (defined(invoker.data)) {
+      data += invoker.data
+    }
+  }
+}
+
+_is_python2 = exec_script("//build/util/is_python2.py", [], "json")
+
+# This is a wrapper around action() that ensures that the script is
+# run under a Python2 executable, even if the main script_executable is
+# Python3.
+#
+# It supports all of action()'s arguments.
+#
+# TODO(crbug.com/1112471): Remove this once everything runs cleanly under
+# Python3.
+template("python2_action") {
+  action(target_name) {
+    # Forward all variables. Ensure that testonly and visibility are forwarded
+    # explicitly, since this performs recursive scope lookups, which is
+    # required to ensure their definition from scopes above the caller are
+    # properly handled. All other variables are forwarded with "*", which
+    # doesn't perform recursive lookups at all. See https://crbug.com/862232
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+
+    if (!_is_python2) {
+      script = "//build/util/python2_action.py"
+      _rebased_script = rebase_path(invoker.script, root_build_dir)
+      inputs = []
+      inputs = [ invoker.script ]
+      if (defined(invoker.inputs)) {
+        inputs += invoker.inputs
+      }
+      args = []
+      args = [ _rebased_script ]
+      if (defined(invoker.args)) {
+        args += invoker.args
+      }
+    }
+  }
+}
+
+# This is a wrapper around action_foreach() that ensures that the script is
+# run under a Python2 executable, even if the main script_executable is
+# Python3.
+#
+# It supports all of action_foreach()'s arguments.
+#
+# TODO(crbug.com/1112471): Remove this once everything runs cleanly under
+# Python3.
+template("python2_action_foreach") {
+  action_foreach(target_name) {
+    # Forward all variables. Ensure that testonly and visibility are forwarded
+    # explicitly, since this performs recursive scope lookups, which is
+    # required to ensure their definition from scopes above the caller are
+    # properly handled. All other variables are forwarded with "*", which
+    # doesn't perform recursive lookups at all. See https://crbug.com/862232
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+
+    if (!_is_python2) {
+      script = "//build/util/python2_action.py"
+      _rebased_script = rebase_path(invoker.script, root_build_dir)
+      inputs = []
+      inputs = [ invoker.script ]
+      if (defined(invoker.inputs)) {
+        inputs += invoker.inputs
+      }
+      args = []
+      args = [ _rebased_script ]
+      if (defined(invoker.args)) {
+        args += invoker.args
+      }
+    }
+  }
+}
+
+# A template used for actions that execute a Python script, which has an
+# associated .pydeps file. In other words:
+#
+# - This is very similar to just an action(), except that |script| must point
+#   to a Python script (e.g. "//build/.../foo.py") that has a corresponding
+#   .pydeps file in the source tree (e.g. "//build/.../foo.pydeps").
+#
+# - The .pydeps file contains a list of python dependencies (imports really)
+#   and is generated _manually_ by using a command like:
+#
+#     build/print_python_deps.py --inplace build/android/gyp/foo.py
+#
+# Example
+#   action_with_pydeps("create_foo") {
+#     script = "myscript.py"
+#     args = [...]
+#   }
+template("action_with_pydeps") {
+  action(target_name) {
+    # Forward all variables except run_under_python2.
+    # Ensure that testonly and visibility are forwarded
+    # explicitly, since this performs recursive scope lookups, which is
+    # required to ensure their definition from scopes above the caller are
+    # properly handled. All other variables are forwarded with "*", which
+    # doesn't perform recursive lookups at all. See https://crbug.com/862232
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "run_under_python2",
+                             "testonly",
+                             "visibility",
+                           ])
+
+    # Read and filter out comments.
+    # Happens every time the template is instantiated, but benchmarking shows no
+    # perceivable impact on overall 'gn gen' speed.
+    _pydeps_file = invoker.script + "deps"
+
+    _pydeps_lines =
+        read_file(_pydeps_file, "list lines")  # https://crbug.com/1102058
+    _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ])
+
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    # Dependencies are listed relative to the script directory, but inputs
+    # expects paths that are relative to the current BUILD.gn
+    _script_dir = get_path_info(_pydeps_file, "dir")
+    inputs += rebase_path(_pydeps_entries, ".", _script_dir)
+
+    if (defined(invoker.run_under_python2) && invoker.run_under_python2 &&
+        !_is_python2) {
+      inputs += [ invoker.script ]
+      _args = args
+      args = []
+      args = [ rebase_path(invoker.script, root_build_dir) ] + _args
+      script = "//build/util/python2_action.py"
+    }
+  }
+}
+
+template("action_foreach_with_pydeps") {
+  action_foreach(target_name) {
+    # Forward all variables execept run_under_python2.
+    # Ensure that testonly and visibility are forwarded
+    # explicitly, since this performs recursive scope lookups, which is
+    # required to ensure their definition from scopes above the caller are
+    # properly handled. All other variables are forwarded with "*", which
+    # doesn't perform recursive lookups at all. See https://crbug.com/862232
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "run_under_python2",
+                             "testonly",
+                             "visibility",
+                           ])
+
+    # Read and filter out comments.
+    # Happens every time the template is instantiated, but benchmarking shows no
+    # perceivable impact on overall 'gn gen' speed.
+    if (defined(invoker.deps_file)) {
+      _pydeps_file = invoker.deps_file
+    } else {
+      _pydeps_file = invoker.script + "deps"
+    }
+    _pydeps_lines = read_file(_pydeps_file, "list lines")
+    _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ])
+
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    # Dependencies are listed relative to the script directory, but inputs
+    # expects paths that are relative to the current BUILD.gn
+    _script_dir = get_path_info(script, "dir")
+    inputs += rebase_path(_pydeps_entries, ".", _script_dir)
+
+    if (defined(invoker.run_under_python2) && invoker.run_under_python2 &&
+        !_is_python2) {
+      inputs += [ invoker.script ]
+      _args = args
+      args = []
+      args = [ rebase_path(invoker.script, root_build_dir) ] + _args
+      script = "//build/util/python2_action.py"
+    }
+  }
+}
diff --git a/src/build/config/sanitizers/BUILD.gn b/src/build/config/sanitizers/BUILD.gn
new file mode 100644
index 0000000..aaaad02
--- /dev/null
+++ b/src/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,610 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+
+# Contains the dependencies needed for sanitizers to link into executables and
+# shared_libraries.
+group("deps") {
+  if (using_sanitizer) {
+    public_configs = [
+      ":sanitizer_options_link_helper",
+
+      # Even when a target removes default_sanitizer_flags, it may be depending
+      # on a library that did not remove default_sanitizer_flags. Thus, we need
+      # to add the ldflags here as well as in default_sanitizer_flags.
+      ":default_sanitizer_ldflags",
+    ]
+    deps = [ ":options_sources" ]
+    if (is_win) {
+      exe = ".exe"
+    } else {
+      exe = ""
+    }
+    data = [
+      "//tools/valgrind/asan/",
+      "$clang_base_path/bin/llvm-symbolizer${exe}",
+    ]
+    if (use_prebuilt_instrumented_libraries ||
+        use_locally_built_instrumented_libraries) {
+      deps += [ "//third_party/instrumented_libraries:deps" ]
+    }
+  }
+  if (is_asan) {
+    # ASAN is supported on iOS but the runtime library depends on the compiler
+    # used (Chromium version of clang versus Xcode version of clang). Only copy
+    # the ASAN runtime on iOS if building with Chromium clang.
+    if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
+      data_deps = [ ":copy_asan_runtime" ]
+    }
+    if (is_mac || (is_ios && !use_xcode_clang)) {
+      public_deps = [ ":asan_runtime_bundle_data" ]
+    }
+  }
+}
+
+assert(!(is_win && is_asan && current_cpu == "x86"),
+       "ASan is only supported in 64-bit builds on Windows.")
+
+if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && is_asan) {
+  if (is_mac) {
+    _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
+  } else if (is_ios) {
+    _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
+  } else if (is_win && current_cpu == "x64") {
+    _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
+  }
+
+  _clang_rt_dso_full_path =
+      "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
+
+  if (!is_ios) {
+    copy("copy_asan_runtime") {
+      sources = [ _clang_rt_dso_full_path ]
+      outputs = [ "$root_out_dir/{{source_file_part}}" ]
+    }
+  } else {
+    # On iOS, the runtime library need to be code signed (adhoc signature)
+    # starting with Xcode 8, so use an action instead of a copy on iOS.
+    action("copy_asan_runtime") {
+      script = "//build/config/ios/codesign.py"
+      sources = [ _clang_rt_dso_full_path ]
+      outputs = [ "$root_out_dir/" + get_path_info(sources[0], "file") ]
+      args = [
+        "code-sign-file",
+        "--identity=" + ios_code_signing_identity,
+        "--output=" + rebase_path(outputs[0], root_build_dir),
+        rebase_path(sources[0], root_build_dir),
+      ]
+    }
+  }
+
+  if (is_apple) {
+    bundle_data("asan_runtime_bundle_data") {
+      sources = get_target_outputs(":copy_asan_runtime")
+      outputs = [ "{{bundle_executable_dir}}/{{source_file_part}}" ]
+      public_deps = [ ":copy_asan_runtime" ]
+    }
+  }
+}
+
+config("sanitizer_options_link_helper") {
+  if (is_apple) {
+    ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
+  } else if (!is_win) {
+    ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+  }
+}
+
+static_library("options_sources") {
+  # This is a static_library instead of a source_set, as it shouldn't be
+  # unconditionally linked into targets.
+  visibility = [
+    ":deps",
+    "//:gn_visibility",
+  ]
+  sources = [ "//build/sanitizers/sanitizer_options.cc" ]
+
+  # Don't compile this target with any sanitizer code. It can be called from
+  # the sanitizer runtimes, so instrumenting these functions could cause
+  # recursive calls into the runtime if there is an error.
+  configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+  if (is_asan) {
+    if (!defined(asan_suppressions_file)) {
+      asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
+    }
+    sources += [ asan_suppressions_file ]
+  }
+
+  if (is_lsan) {
+    if (!defined(lsan_suppressions_file)) {
+      lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
+    }
+    sources += [ lsan_suppressions_file ]
+  }
+
+  if (is_tsan) {
+    if (!defined(tsan_suppressions_file)) {
+      tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
+    }
+    sources += [ tsan_suppressions_file ]
+  }
+}
+
+# Applies linker flags necessary when either :deps or :default_sanitizer_flags
+# are used.
+config("default_sanitizer_ldflags") {
+  visibility = [
+    ":default_sanitizer_flags",
+    ":deps",
+
+    # https://crbug.com/360158.
+    "//tools/ipc_fuzzer/fuzzer:ipc_fuzzer",
+  ]
+
+  if (is_posix || is_fuchsia) {
+    ldflags = []
+    if (is_asan) {
+      ldflags += [ "-fsanitize=address" ]
+    }
+    if (is_hwasan) {
+      ldflags += [ "-fsanitize=hwaddress" ]
+    }
+    if (is_lsan) {
+      ldflags += [ "-fsanitize=leak" ]
+    }
+    if (is_tsan) {
+      ldflags += [ "-fsanitize=thread" ]
+    }
+    if (is_msan) {
+      ldflags += [ "-fsanitize=memory" ]
+    }
+    if (is_ubsan || is_ubsan_security) {
+      ldflags += [ "-fsanitize=undefined" ]
+    }
+    if (is_ubsan_null) {
+      ldflags += [ "-fsanitize=null" ]
+    }
+    if (is_ubsan_vptr) {
+      ldflags += [ "-fsanitize=vptr" ]
+    }
+
+    if (use_sanitizer_coverage) {
+      if (use_libfuzzer) {
+        ldflags += [ "-fsanitize=fuzzer-no-link" ]
+        if (is_mac) {
+          # TODO(crbug.com/926588): on macOS, dead code stripping does not work
+          # well with `pc-table` instrumentation enabled by `fuzzer-no-link`.
+          ldflags += [ "-fno-sanitize-coverage=pc-table" ]
+        }
+      } else {
+        ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
+      }
+    }
+
+    if (is_cfi && current_toolchain == default_toolchain) {
+      ldflags += [ "-fsanitize=cfi-vcall" ]
+      if (use_cfi_cast) {
+        ldflags += [
+          "-fsanitize=cfi-derived-cast",
+          "-fsanitize=cfi-unrelated-cast",
+        ]
+      }
+      if (use_cfi_icall) {
+        ldflags += [ "-fsanitize=cfi-icall" ]
+      }
+      if (use_cfi_diag) {
+        ldflags += [ "-fno-sanitize-trap=cfi" ]
+        if (use_cfi_recover) {
+          ldflags += [ "-fsanitize-recover=cfi" ]
+        }
+      }
+    }
+  } else if (is_win) {
+    # Windows directly calls link.exe instead of the compiler driver when
+    # linking.  Hence, pass the runtime libraries instead of -fsanitize=address
+    # or -fsanitize=fuzzer.
+    if (is_asan && is_component_build) {
+      # In the static-library build, ASan libraries are different for
+      # executables and dlls, see link_executable and link_shared_library below.
+      # This here handles only the component build.
+      if (current_cpu == "x64") {
+        # Windows 64-bit.
+        libs = [
+          "clang_rt.asan_dynamic-x86_64.lib",
+          "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
+        ]
+      } else {
+        assert(current_cpu == "x86", "WinASan unsupported architecture")
+        libs = [
+          "clang_rt.asan_dynamic-i386.lib",
+          "clang_rt.asan_dynamic_runtime_thunk-i386.lib",
+        ]
+      }
+    }
+    if (use_libfuzzer) {
+      assert(current_cpu == "x64", "LibFuzzer unsupported architecture")
+      assert(!is_component_build,
+             "LibFuzzer only supports non-component builds on Windows")
+
+      # Incremental linking causes padding that messes up SanitizerCoverage.
+      # Don't do it.
+      ldflags = [ "/INCREMENTAL:NO" ]
+    }
+  }
+}
+
+config("common_sanitizer_flags") {
+  cflags = []
+
+  if (using_sanitizer) {
+    assert(is_clang, "sanitizers only supported with clang")
+
+    # Allow non-default toolchains to enable sanitizers in toolchain_args even
+    # in official builds.
+    assert(current_toolchain != default_toolchain || !is_official_build,
+           "sanitizers not supported in official builds")
+
+    cflags += [
+      # Column info in debug data confuses Visual Studio's debugger, so don't
+      # use this by default.  However, clusterfuzz needs it for good
+      # attribution of reports to CLs, so turn it on there.
+      "-gcolumn-info",
+    ]
+
+    # Frame pointers are controlled in //build/config/compiler:default_stack_frames
+  }
+}
+
+config("asan_flags") {
+  cflags = []
+  if (is_asan) {
+    cflags += [ "-fsanitize=address" ]
+    if (is_win) {
+      if (!defined(asan_win_blacklist_path)) {
+        asan_win_blacklist_path =
+            rebase_path("//tools/memory/asan/blocklist_win.txt", root_build_dir)
+      }
+      cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ]
+    }
+  }
+}
+
+config("link_executable") {
+  if (is_asan && is_win && !is_component_build) {
+    if (current_cpu == "x64") {
+      ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ]
+    } else {
+      assert(current_cpu == "x86", "WinASan unsupported architecture")
+      ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ]
+    }
+  }
+}
+
+config("link_shared_library") {
+  if (is_asan && is_win && !is_component_build) {
+    if (current_cpu == "x64") {
+      libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
+    } else {
+      assert(current_cpu == "x86", "WinASan unsupported architecture")
+      libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
+    }
+  }
+}
+
+config("cfi_flags") {
+  cflags = []
+  if (is_cfi && current_toolchain == default_toolchain) {
+    if (!defined(cfi_blacklist_path)) {
+      cfi_blacklist_path =
+          rebase_path("//tools/cfi/ignores.txt", root_build_dir)
+    }
+    cflags += [
+      "-fsanitize=cfi-vcall",
+      "-fsanitize-blacklist=$cfi_blacklist_path",
+    ]
+
+    if (use_cfi_cast) {
+      cflags += [
+        "-fsanitize=cfi-derived-cast",
+        "-fsanitize=cfi-unrelated-cast",
+      ]
+    }
+
+    if (use_cfi_icall) {
+      cflags += [ "-fsanitize=cfi-icall" ]
+    }
+
+    if (use_cfi_diag) {
+      cflags += [ "-fno-sanitize-trap=cfi" ]
+      if (is_win) {
+        cflags += [
+          "/Oy-",
+          "/Ob0",
+        ]
+      } else {
+        cflags += [
+          "-fno-inline-functions",
+          "-fno-inline",
+          "-fno-omit-frame-pointer",
+          "-O1",
+        ]
+      }
+      if (use_cfi_recover) {
+        cflags += [ "-fsanitize-recover=cfi" ]
+      }
+    }
+  }
+}
+
+# crbug.com/785442: Fix cfi-icall failures for code that casts pointer argument
+# types in function pointer type signatures.
+config("cfi_icall_generalize_pointers") {
+  if (is_clang && is_cfi && use_cfi_icall) {
+    cflags = [ "-fsanitize-cfi-icall-generalize-pointers" ]
+  }
+}
+
+config("cfi_icall_disable") {
+  if (is_clang && is_cfi && use_cfi_icall) {
+    cflags = [ "-fno-sanitize=cfi-icall" ]
+  }
+}
+
+config("coverage_flags") {
+  cflags = []
+  if (use_sanitizer_coverage) {
+    # Used by sandboxing code to allow coverage dump to be written on the disk.
+    defines = [ "SANITIZER_COVERAGE" ]
+
+    if (use_libfuzzer) {
+      cflags += [ "-fsanitize=fuzzer-no-link" ]
+      if (is_mac) {
+        # TODO(crbug.com/926588): on macOS, dead code stripping does not work
+        # well with `pc-table` instrumentation enabled by `fuzzer-no-link`.
+        cflags += [ "-fno-sanitize-coverage=pc-table" ]
+      }
+    } else {
+      cflags += [
+        "-fsanitize-coverage=$sanitizer_coverage_flags",
+        "-mllvm",
+        "-sanitizer-coverage-prune-blocks=1",
+      ]
+      if (current_cpu == "arm") {
+        # http://crbug.com/517105
+        cflags += [
+          "-mllvm",
+          "-sanitizer-coverage-block-threshold=0",
+        ]
+      }
+    }
+  }
+}
+
+config("hwasan_flags") {
+  if (is_hwasan) {
+    asmflags = [ "-fsanitize=hwaddress" ]
+    cflags = [ "-fsanitize=hwaddress" ]
+  }
+}
+
+config("lsan_flags") {
+  if (is_lsan) {
+    cflags = [ "-fsanitize=leak" ]
+  }
+}
+
+config("msan_flags") {
+  if (is_msan) {
+    assert(is_linux || is_chromeos,
+           "msan only supported on linux x86_64/ChromeOS")
+    if (!defined(msan_blacklist_path)) {
+      msan_blacklist_path =
+          rebase_path("//tools/msan/blacklist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=memory",
+      "-fsanitize-memory-track-origins=$msan_track_origins",
+      "-fsanitize-blacklist=$msan_blacklist_path",
+    ]
+  }
+}
+
+config("tsan_flags") {
+  if (is_tsan) {
+    assert(is_linux || is_chromeos, "tsan only supported on linux x86_64")
+    if (!defined(tsan_blacklist_path)) {
+      tsan_blacklist_path =
+          rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=thread",
+      "-fsanitize-blacklist=$tsan_blacklist_path",
+    ]
+  }
+}
+
+config("ubsan_flags") {
+  cflags = []
+  if (is_ubsan) {
+    if (!defined(ubsan_blacklist_path)) {
+      ubsan_blacklist_path =
+          rebase_path("//tools/ubsan/ignorelist.txt", root_build_dir)
+    }
+    cflags += [
+      "-fsanitize=bool",
+      "-fsanitize=bounds",
+      "-fsanitize=builtin",
+      "-fsanitize=float-divide-by-zero",
+      "-fsanitize=integer-divide-by-zero",
+      "-fsanitize=null",
+      "-fsanitize=object-size",
+      "-fsanitize=return",
+      "-fsanitize=returns-nonnull-attribute",
+      "-fsanitize=shift-exponent",
+      "-fsanitize=signed-integer-overflow",
+      "-fsanitize=unreachable",
+      "-fsanitize=vla-bound",
+      "-fsanitize-blacklist=$ubsan_blacklist_path",
+    ]
+
+    # Chromecast ubsan builds fail to compile with these
+    # experimental flags, so only add them to non-chromecast ubsan builds.
+    if (!is_chromecast) {
+      cflags += [
+        # Employ the experimental PBQP register allocator to avoid slow
+        # compilation on files with too many basic blocks.
+        # See http://crbug.com/426271.
+        "-mllvm",
+        "-regalloc=pbqp",
+
+        # Speculatively use coalescing to slightly improve the code generated
+        # by PBQP regallocator. May increase compile time.
+        "-mllvm",
+        "-pbqp-coalescing",
+      ]
+    }
+  }
+}
+
+config("ubsan_no_recover") {
+  if (is_ubsan_no_recover) {
+    cflags = [ "-fno-sanitize-recover=undefined" ]
+  }
+}
+
+config("ubsan_security_flags") {
+  if (is_ubsan_security) {
+    if (!defined(ubsan_security_blacklist_path)) {
+      ubsan_security_blacklist_path =
+          rebase_path("//tools/ubsan/security_ignorelist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=function",
+      "-fsanitize=shift",
+      "-fsanitize=signed-integer-overflow",
+      "-fsanitize=vla-bound",
+      "-fsanitize-blacklist=$ubsan_security_blacklist_path",
+    ]
+  }
+}
+
+config("ubsan_null_flags") {
+  if (is_ubsan_null) {
+    cflags = [ "-fsanitize=null" ]
+  }
+}
+
+config("ubsan_vptr_flags") {
+  if (is_ubsan_vptr) {
+    if (!defined(ubsan_vptr_blacklist_path)) {
+      ubsan_vptr_blacklist_path =
+          rebase_path("//tools/ubsan/vptr_ignorelist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=vptr",
+      "-fsanitize-blacklist=$ubsan_vptr_blacklist_path",
+    ]
+  }
+}
+
+config("fuzzing_build_mode") {
+  if (use_fuzzing_engine && optimize_for_fuzzing) {
+    defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
+  }
+}
+
+all_sanitizer_configs = [
+  ":common_sanitizer_flags",
+  ":coverage_flags",
+  ":default_sanitizer_ldflags",
+  ":asan_flags",
+  ":cfi_flags",
+  ":hwasan_flags",
+  ":lsan_flags",
+  ":msan_flags",
+  ":tsan_flags",
+  ":ubsan_flags",
+  ":ubsan_no_recover",
+  ":ubsan_null_flags",
+  ":ubsan_security_flags",
+  ":ubsan_vptr_flags",
+  ":fuzzing_build_mode",
+]
+
+# This config is applied by default to all targets. It sets the compiler flags
+# for sanitizer usage, or, if no sanitizer is set, does nothing.
+#
+# This needs to be in a separate config so that targets can opt out of
+# sanitizers (by removing the config) if they desire. Even if a target
+# removes this config, executables & shared libraries should still depend on
+# :deps if any of their dependencies have not opted out of sanitizers.
+# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
+config("default_sanitizer_flags") {
+  configs = all_sanitizer_configs
+
+  if (use_sanitizer_configs_without_instrumentation) {
+    configs = []
+  }
+}
+
+# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
+# This allows to selectively disable ubsan_vptr, when needed. In particular,
+# if some third_party code is required to be compiled without rtti, which
+# is a requirement for ubsan_vptr.
+config("default_sanitizer_flags_but_ubsan_vptr") {
+  configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
+
+  if (use_sanitizer_configs_without_instrumentation) {
+    configs = []
+  }
+}
+
+config("default_sanitizer_flags_but_coverage") {
+  configs = all_sanitizer_configs - [ ":coverage_flags" ]
+
+  if (use_sanitizer_configs_without_instrumentation) {
+    configs = []
+  }
+}
+
+# This config is used by parts of code that aren't targeted in fuzzers and
+# therefore don't need coverage instrumentation and possibly wont need
+# sanitizer instrumentation either. The config also tells the compiler to
+# perform additional optimizations on the configured code and ensures that
+# linking it to the rest of the binary which is instrumented with sanitizers
+# works. The config only does anything if the build is a fuzzing build.
+config("not_fuzzed") {
+  if (use_fuzzing_engine) {
+    # Since we aren't instrumenting with coverage, code size is less of a
+    # concern, so use a more aggressive optimization level than
+    # optimize_for_fuzzing (-O1). When given multiple optimization flags, clang
+    # obeys the last one, so as long as this flag comes after -O1, it should work.
+    # Since this config will always be depended on after
+    # "//build/config/compiler:default_optimization" (which adds -O1 when
+    # optimize_for_fuzzing is true), -O2 should always be the second flag. Even
+    # though this sounds fragile, it isn't a big deal if it breaks, since proto
+    # fuzzers will still work, they will just be slightly slower.
+    cflags = [ "-O2" ]
+
+    # We need to include this config when we remove default_sanitizer_flags or
+    # else there will be linking errors. We would remove default_sanitizer_flags
+    # here as well, but gn doesn't permit this.
+    if (!is_msan) {
+      # We don't actually remove sanitization when MSan is being used so there
+      # is no need to add default_sanitizer_ldflags in that case
+      configs = [ ":default_sanitizer_ldflags" ]
+    }
+  }
+}
diff --git a/src/build/config/sanitizers/sanitizers.gni b/src/build/config/sanitizers/sanitizers.gni
new file mode 100644
index 0000000..576bf83
--- /dev/null
+++ b/src/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,296 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/profiling/profiling.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Compile for Address Sanitizer to find memory bugs.
+  is_asan = false
+
+  # Compile for Hardware-Assisted Address Sanitizer to find memory bugs
+  # (android/arm64 only).
+  # See http://clang.llvm.org/docs/HardwareAssistedAddressSanitizerDesign.html
+  is_hwasan = false
+
+  # Compile for Leak Sanitizer to find leaks.
+  is_lsan = false
+
+  # Compile for Memory Sanitizer to find uninitialized reads.
+  is_msan = false
+
+  # Compile for Thread Sanitizer to find threading bugs.
+  is_tsan = false
+
+  # Compile for Undefined Behaviour Sanitizer to find various types of
+  # undefined behaviour (excludes vptr checks).
+  is_ubsan = false
+
+  # Halt the program if a problem is detected.
+  is_ubsan_no_recover = false
+
+  # Compile for Undefined Behaviour Sanitizer's null pointer checks.
+  is_ubsan_null = false
+
+  # Track where uninitialized memory originates from. From fastest to slowest:
+  # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+  # chain of stores leading from allocation site to use site.
+  msan_track_origins = 2
+
+  # Use dynamic libraries instrumented by one of the sanitizers instead of the
+  # standard system libraries. Set this flag to build the libraries from source.
+  use_locally_built_instrumented_libraries = false
+
+  # Compile with Control Flow Integrity to protect virtual calls and casts.
+  # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+  #
+  # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds.
+  #
+  # Disable this on linux-chromeos to avoid using ThinLTO there;
+  # crbug.com/1033839. Similarly, don't use this on ARC builds.
+  #
+  # TODO(crbug.com/1159424): Reassess the validity of the next expression.
+  is_cfi =
+      is_official_build &&
+      (((target_os == "linux" || is_chromeos_lacros) && target_cpu == "x64") ||
+       ((is_chromeos_ash || is_chromeos_lacros) && is_chromeos_device))
+
+  # Enable checks for indirect function calls via a function pointer.
+  # TODO(pcc): remove this when we're ready to add these checks by default.
+  # https://crbug.com/701919
+  #
+  # TODO(crbug.com/1159424): Reassess the validity of the next expression.
+  use_cfi_icall = (target_os == "linux" || is_chromeos_lacros) &&
+                  target_cpu == "x64" && is_official_build
+
+  # Print detailed diagnostics when Control Flow Integrity detects a violation.
+  use_cfi_diag = false
+
+  # Let Control Flow Integrity continue execution instead of crashing when
+  # printing diagnostics (use_cfi_diag = true).
+  use_cfi_recover = false
+
+  # Compile for fuzzing with LLVM LibFuzzer.
+  # See http://www.chromium.org/developers/testing/libfuzzer
+  use_libfuzzer = false
+
+  # Compile for fuzzing with AFL.
+  use_afl = false
+
+  # Compile for fuzzing with an external engine (e.g., Grammarinator).
+  use_external_fuzzing_engine = false
+
+  # Enables core ubsan security features. Will later be removed once it matches
+  # is_ubsan.
+  is_ubsan_security = false
+
+  # Helper variable for testing builds with disabled libfuzzer.
+  # Not for client use.
+  disable_libfuzzer = false
+
+  # Optimize for coverage guided fuzzing (balance between speed and number of
+  # branches). Can be also used to remove non-determinism and other issues.
+  optimize_for_fuzzing = false
+
+  # Value for -fsanitize-coverage flag. Setting this causes
+  # use_sanitizer_coverage to be enabled.
+  # This flag is not used for libFuzzer (use_libfuzzer=true). Instead, we use:
+  #     -fsanitize=fuzzer-no-link
+  # Default value when unset and use_fuzzing_engine=true:
+  #     trace-pc-guard
+  # Default value when unset and use_sanitizer_coverage=true:
+  #     trace-pc-guard,indirect-calls
+  sanitizer_coverage_flags = ""
+
+  # When enabled, only relevant sanitizer defines are set, but compilation
+  # happens with no extra flags. This is useful when in component build
+  # enabling sanitizers only in some of the components.
+  use_sanitizer_configs_without_instrumentation = false
+
+  # When true, seed corpora archives are built.
+  archive_seed_corpus = true
+}
+
+declare_args() {
+  # Enable checks for bad casts: derived cast and unrelated cast.
+  # TODO(krasin): remove this, when we're ready to add these checks by default.
+  # https://crbug.com/626794
+  use_cfi_cast = is_cfi && (is_chromeos_ash || is_chromeos_lacros)
+
+  # Compile for Undefined Behaviour Sanitizer's vptr checks.
+  is_ubsan_vptr = is_ubsan_security
+}
+
+# Disable sanitizers for non-target toolchains.
+if (!is_a_target_toolchain) {
+  is_asan = false
+  is_cfi = false
+  is_hwasan = false
+  is_lsan = false
+  is_msan = false
+  is_tsan = false
+  is_ubsan = false
+  is_ubsan_null = false
+  is_ubsan_no_recover = false
+  is_ubsan_security = false
+  is_ubsan_vptr = false
+  msan_track_origins = 0
+  sanitizer_coverage_flags = ""
+  use_afl = false
+  use_cfi_diag = false
+  use_cfi_recover = false
+  use_libfuzzer = false
+  use_locally_built_instrumented_libraries = false
+  use_sanitizer_coverage = false
+}
+
+# Use dynamic libraries instrumented by one of the sanitizers instead of the
+# standard system libraries. We have instrumented system libraries for msan,
+# which requires them to prevent false positives.
+# TODO(thakis): Maybe remove this variable.
+use_prebuilt_instrumented_libraries = is_msan
+
+# Whether we are doing a fuzzer build. Normally this should be checked instead
+# of checking "use_libfuzzer || use_afl" because often developers forget to
+# check for "use_afl".
+use_fuzzing_engine = use_libfuzzer || use_afl || use_external_fuzzing_engine
+
+# Args that are in turn dependent on other args must be in a separate
+# declare_args block. User overrides are only applied at the end of a
+# declare_args block.
+declare_args() {
+  # Generates an owners file for each fuzzer test.
+  # TODO(crbug.com/1194183): Remove this arg when finding OWNERS is faster.
+  generate_fuzzer_owners = use_fuzzing_engine
+
+  use_sanitizer_coverage =
+      !use_clang_coverage &&
+      (use_fuzzing_engine || sanitizer_coverage_flags != "")
+
+  # https://crbug.com/1002058: Code coverage works inside the sandbox via the
+  # help of several helper IPCs. Unfortunately, the sandbox-only path does not
+  # work well for fuzzing builds. Since fuzzing builds already disable the
+  # sandbox when dumping coverage, limit the sandbox-only path to non-fuzzing
+  # builds.
+  # Everything is IPC on Fuchsia, so this workaround for code coverage inside
+  # the sandbox does not apply.
+  use_clang_profiling_inside_sandbox =
+      use_clang_profiling && !use_fuzzing_engine && !is_fuchsia
+}
+
+if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
+  sanitizer_coverage_flags = "trace-pc-guard"
+} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
+  sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
+}
+
+# Whether we are linking against a sanitizer runtime library. Among other
+# things, this changes the default symbol level and other settings in order to
+# prepare to create stack traces "live" using the sanitizer runtime.
+using_sanitizer = is_asan || is_hwasan || is_lsan || is_tsan || is_msan ||
+                  is_ubsan || is_ubsan_null || is_ubsan_vptr ||
+                  is_ubsan_security || use_sanitizer_coverage || use_cfi_diag
+
+assert(!using_sanitizer || is_clang,
+       "Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
+
+assert(!is_cfi || is_clang,
+       "is_cfi requires setting is_clang = true in 'gn args'")
+
+prebuilt_instrumented_libraries_available =
+    is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
+
+if (use_libfuzzer && (is_linux || is_chromeos)) {
+  if (is_asan) {
+    # We do leak checking with libFuzzer on Linux. Set is_lsan for code that
+    # relies on LEAK_SANITIZER define to avoid false positives.
+    is_lsan = true
+  }
+}
+
+# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
+# same is possibly true for the other non-ASan sanitizers. But regardless of
+# whether it links, one would normally never run a sanitizer in debug mode.
+# Running in debug mode probably indicates you forgot to set the "is_debug =
+# false" flag in the build args. ASan seems to run fine in debug mode.
+#
+# If you find a use-case where you want to compile a sanitizer in debug mode
+# and have verified it works, ask brettw and we can consider removing it from
+# this condition. We may also be able to find another way to enable your case
+# without having people accidentally get broken builds by compiling an
+# unsupported or unadvisable configurations.
+#
+# For one-off testing, just comment this assertion out.
+assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
+       "Sanitizers should generally be used in release (set is_debug=false).")
+
+assert(!is_hwasan || (is_android && current_cpu == "arm64"),
+       "HWASan only supported on Android ARM64 builds.")
+
+assert(!is_msan || ((is_linux || is_chromeos) && current_cpu == "x64"),
+       "MSan currently only works on 64-bit Linux and ChromeOS builds.")
+
+assert(!is_lsan || is_asan, "is_lsan = true requires is_asan = true also.")
+
+# ASAN build on Windows is not working in debug mode. Intercepting memory
+# allocation functions is hard on Windows and not yet implemented in LLVM.
+assert(!is_win || !is_debug || !is_asan,
+       "ASan on Windows doesn't work in debug (set is_debug=false).")
+
+# libFuzzer targets can fail to build or behave incorrectly when built without
+# ASAN on Windows.
+assert(!is_win || !use_libfuzzer || is_asan,
+       "use_libfuzzer on Windows requires setting is_asan = true")
+
+# Make sure that if we recover on detection (i.e. not crash), diagnostics are
+# printed.
+assert(!use_cfi_recover || use_cfi_diag,
+       "Only use CFI recovery together with diagnostics.")
+
+# TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
+# not supported by the Chromium mac_clang_x64 toolchain on iOS distribution.
+# The coverage works with iOS toolchain but it is broken when the mac
+# toolchain is used as a secondary one on iOS distribution. E.g., it should be
+# possible to build the "net" target for iOS with the sanitizer coverage
+# enabled.
+assert(
+    !(use_sanitizer_coverage && is_mac && target_os == "ios"),
+    "crbug.com/753445: use_sanitizer_coverage=true is not supported by the " +
+        "Chromium mac_clang_x64 toolchain on iOS distribution. Please set " +
+        "the argument value to false.")
+
+# Use these lists of configs to disable instrumenting code that is part of a
+# fuzzer, but which isn't being targeted (such as libprotobuf-mutator, *.pb.cc
+# and libprotobuf when they are built as part of a proto fuzzer). Adding or
+# removing these lists does not have any effect if use_libfuzzer or use_afl are
+# not passed as arguments to gn.
+not_fuzzed_remove_configs = []
+not_fuzzed_remove_nonasan_configs = []
+
+if (use_fuzzing_engine) {
+  # Removing coverage should always just work.
+  not_fuzzed_remove_configs += [ "//build/config/coverage:default_coverage" ]
+  not_fuzzed_remove_nonasan_configs +=
+      [ "//build/config/coverage:default_coverage" ]
+
+  if (!is_msan) {
+    # Allow sanitizer instrumentation to be removed if we are not using MSan
+    # since binaries cannot be partially instrumented with MSan.
+    not_fuzzed_remove_configs +=
+        [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+    # Certain parts of binaries must be instrumented with ASan if the rest of
+    # the binary is. For these, only remove non-ASan sanitizer instrumentation.
+    if (!is_asan) {
+      not_fuzzed_remove_nonasan_configs +=
+          [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+      assert(not_fuzzed_remove_nonasan_configs == not_fuzzed_remove_configs)
+    }
+  }
+}
diff --git a/src/build/config/sysroot.gni b/src/build/config/sysroot.gni
new file mode 100644
index 0000000..18d2d57
--- /dev/null
+++ b/src/build/config/sysroot.gni
@@ -0,0 +1,81 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # The path of the sysroot that is applied when compiling using the target
+  # toolchain.
+  target_sysroot = ""
+
+  # The path to directory containing linux sysroot images.
+  target_sysroot_dir = "//build/linux"
+
+  # The path of the sysroot for the current toolchain. If empty, default
+  # sysroot is used.
+  sysroot = ""
+
+  # Controls default is_linux sysroot. If set to true, and sysroot
+  # is empty, default sysroot is calculated.
+  use_sysroot = current_cpu == "x86" || current_cpu == "x64" ||
+                current_cpu == "arm" || current_cpu == "arm64" ||
+                current_cpu == "mipsel" || current_cpu == "mips64el"
+}
+
+if (sysroot == "") {
+  if (current_os == target_os && current_cpu == target_cpu &&
+      target_sysroot != "") {
+    sysroot = target_sysroot
+  } else if (is_android) {
+    import("//build/config/android/config.gni")
+
+    # Android uses unified headers, and thus a single compile time sysroot
+    sysroot = "$android_toolchain_root/sysroot"
+  } else if ((is_linux || is_chromeos) && use_sysroot) {
+    # By default build against a sysroot image downloaded from Cloud Storage
+    # during gclient runhooks.
+    if (current_cpu == "x64") {
+      sysroot = "$target_sysroot_dir/debian_sid_amd64-sysroot"
+    } else if (current_cpu == "x86") {
+      sysroot = "$target_sysroot_dir/debian_sid_i386-sysroot"
+    } else if (current_cpu == "mipsel") {
+      sysroot = "$target_sysroot_dir/debian_sid_mips-sysroot"
+    } else if (current_cpu == "mips64el") {
+      sysroot = "$target_sysroot_dir/debian_sid_mips64el-sysroot"
+    } else if (current_cpu == "arm") {
+      sysroot = "$target_sysroot_dir/debian_sid_arm-sysroot"
+    } else if (current_cpu == "arm64") {
+      sysroot = "$target_sysroot_dir/debian_sid_arm64-sysroot"
+    } else {
+      assert(false, "No linux sysroot for cpu: $target_cpu")
+    }
+
+    if (sysroot != "") {
+      _script_arch = current_cpu
+      if (_script_arch == "x86") {
+        _script_arch = "i386"
+      } else if (_script_arch == "x64") {
+        _script_arch = "amd64"
+      }
+      assert(
+          exec_script("//build/dir_exists.py",
+                      [ rebase_path(sysroot) ],
+                      "string") == "True",
+          "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch")
+    }
+  } else if (is_mac) {
+    import("//build/config/mac/mac_sdk.gni")
+    sysroot = mac_sdk_path
+  } else if (is_ios) {
+    import("//build/config/ios/ios_sdk.gni")
+    sysroot = ios_sdk_path
+  } else if (is_fuchsia) {
+    if (current_cpu == "arm64" || current_cpu == "x64") {
+      sysroot = "//third_party/fuchsia-sdk/sdk/arch/$current_cpu/sysroot"
+    }
+  }
+}
diff --git a/src/build/config/ui.gni b/src/build/config/ui.gni
new file mode 100644
index 0000000..1ad352c
--- /dev/null
+++ b/src/build/config/ui.gni
@@ -0,0 +1,66 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# These flags are ui-related so should eventually be moved to various places
+# in //ui/*.
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
+
+declare_args() {
+  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+  # that does not require X11.
+  use_ozone =
+      is_chromeos || (is_chromecast && !is_android) || is_fuchsia || is_linux
+
+  # Indicates if the UI toolkit depends on X11.
+  # Enabled by default. Can be disabled if Ozone only build is required and
+  # vice-versa.
+  use_x11 = is_linux && !is_chromecast && !is_chromeos_lacros
+
+  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+  # of a replacement for GDI or GTK.
+  use_aura = is_win || is_linux || is_chromeos || is_fuchsia
+}
+
+declare_args() {
+  # True means the UI is built using the "views" framework.
+  toolkit_views = is_mac || is_win || is_linux || is_chromeos || is_fuchsia
+
+  # TODO(crbug.com/1171629): Remove is_chromeos_lacros.
+  # Whether we should use glib, a low level C utility library.
+  use_glib = (is_linux || is_chromeos_lacros) && !is_chromecast
+}
+
+# TODO(crbug.com/1171629): Remove is_chromeos_lacros.
+# Make sure glib is not used if building for ChromeOS/Chromecast
+assert(!use_glib || ((is_linux || is_chromeos_lacros) && !is_chromecast))
+
+# TODO(crbug.com/1171629): Remove is_chromeos_lacros.
+# Whether to use atk, the Accessibility ToolKit library
+use_atk = (is_linux || is_chromeos_lacros) && !is_chromecast && use_glib
+
+# Whether using Xvfb to provide a display server for a test might be
+# necessary.
+use_xvfb_in_this_config =
+    is_linux || (is_chromeos_lacros && !is_chromeos_device)
+#
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
diff --git a/src/build/config/v8_target_cpu.gni b/src/build/config/v8_target_cpu.gni
new file mode 100644
index 0000000..305981f
--- /dev/null
+++ b/src/build/config/v8_target_cpu.gni
@@ -0,0 +1,61 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # This arg is used when we want to tell the JIT-generating v8 code
+  # that we want to have it generate for an architecture that is different
+  # than the architecture that v8 will actually run on; we then run the
+  # code under an emulator. For example, we might run v8 on x86, but
+  # generate arm code and run that under emulation.
+  #
+  # This arg is defined here rather than in the v8 project because we want
+  # some of the common architecture-specific args (like arm_float_abi or
+  # mips_arch_variant) to be set to their defaults either if the current_cpu
+  # applies *or* if the v8_current_cpu applies.
+  #
+  # As described below, you can also specify the v8_target_cpu to use
+  # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the
+  # name after the normal toolchain.
+  #
+  # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"`
+  # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting
+  # `custom_toolchain` is more verbose but makes the toolchain that is
+  # (effectively) being used explicit.
+  #
+  # v8_target_cpu can only be used to target one architecture in a build,
+  # so if you wish to build multiple copies of v8 that are targeting
+  # different architectures, you will need to do something more
+  # complicated involving multiple toolchains along the lines of
+  # custom_toolchain, above.
+  v8_target_cpu = ""
+}
+
+if (v8_target_cpu == "") {
+  if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") {
+    v8_target_cpu = "arm64"
+  } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") {
+    v8_target_cpu = "arm"
+  } else if (current_toolchain ==
+             "//build/toolchain/linux:clang_x86_v8_mips64el") {
+    v8_target_cpu = "mips64el"
+  } else if (current_toolchain ==
+             "//build/toolchain/linux:clang_x86_v8_mipsel") {
+    v8_target_cpu = "mipsel"
+  } else if (is_msan) {
+    # If we're running under a sanitizer, if we configure v8 to generate
+    # code that will be run under a simulator, then the generated code
+    # also gets the benefits of the sanitizer.
+    v8_target_cpu = "arm64"
+  } else {
+    v8_target_cpu = target_cpu
+  }
+}
+
+declare_args() {
+  # This argument is declared here so that it can be overridden in toolchains.
+  # It should never be explicitly set by the user.
+  v8_current_cpu = v8_target_cpu
+}
diff --git a/src/build/config/win/BUILD.gn b/src/build/config/win/BUILD.gn
new file mode 100644
index 0000000..813c72c
--- /dev/null
+++ b/src/build/config/win/BUILD.gn
@@ -0,0 +1,587 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/control_flow_guard.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/timestamp.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_win)
+
+declare_args() {
+  # Turn this on to have the linker output extra timing information.
+  win_linker_timing = false
+
+  # possible values for target_winuwp_version:
+  #   "10" - Windows UWP 10
+  #   "8.1" - Windows RT 8.1
+  #   "8.0" - Windows RT 8.0
+  target_winuwp_version = "10"
+
+  # possible values:
+  #   "app" - Windows Store Applications
+  #   "phone" - Windows Phone Applications
+  #   "system" - Windows Drivers and Tools
+  #   "server" - Windows Server Applications
+  #   "desktop" - Windows Desktop Applications
+  target_winuwp_family = "app"
+
+  # Set this to use clang-style diagnostics format instead of MSVC-style, which
+  # is useful in e.g. Emacs compilation mode.
+  # E.g.:
+  #  Without this, clang emits a diagnostic message like this:
+  #    foo/bar.cc(12,34): error: something went wrong
+  #  and with this switch, clang emits it like this:
+  #    foo/bar.cc:12:34: error: something went wrong
+  use_clang_diagnostics_format = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Windows-only.
+config("compiler") {
+  if (current_cpu == "x86") {
+    asmflags = [
+      # When /safeseh is specified, the linker will only produce an image if it
+      # can also produce a table of the image's safe exception handlers. This
+      # table specifies for the operating system which exception handlers are
+      # valid for the image. Note that /SAFESEH isn't accepted on the command
+      # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe.
+      "/safeseh",
+    ]
+  }
+
+  cflags = [
+    "/Gy",  # Enable function-level linking.
+    "/FS",  # Preserve previous PDB behavior.
+    "/bigobj",  # Some of our files are bigger than the regular limits.
+    "/utf-8",  # Assume UTF-8 by default to avoid code page dependencies.
+  ]
+
+  if (is_clang) {
+    cflags += [ "/Zc:twoPhase" ]
+  }
+
+  # Force C/C++ mode for the given GN detected file type. This is necessary
+  # for precompiled headers where the same source file is compiled in both
+  # modes.
+  cflags_c = [ "/TC" ]
+  cflags_cc = [ "/TP" ]
+
+  cflags += [
+    # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
+    "/Zc:sizedDealloc-",
+  ]
+
+  if (is_clang) {
+    # Required to make the 19041 SDK compatible with clang-cl.
+    # See https://crbug.com/1089996 issue #2 for details.
+    cflags += [ "/D__WRL_ENABLE_FUNCTION_STATICS__" ]
+
+    # Tell clang which version of MSVC to emulate.
+    cflags += [ "-fmsc-version=1916" ]
+
+    if (is_component_build) {
+      cflags += [
+        # Do not export inline member functions. This makes component builds
+        # faster. This is similar to -fvisibility-inlines-hidden.
+        "/Zc:dllexportInlines-",
+      ]
+    }
+
+    if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+    } else if (current_cpu == "x64") {
+      cflags += [ "-m64" ]
+    } else if (current_cpu == "arm64") {
+      cflags += [ "--target=arm64-windows" ]
+    } else {
+      assert(false, "unknown current_cpu " + current_cpu)
+    }
+
+    # Chrome currently requires SSE3. Clang supports targeting any Intel
+    # microarchitecture. MSVC only supports a subset of architectures, and the
+    # next step after SSE2 will be AVX.
+    if (current_cpu == "x86" || current_cpu == "x64") {
+      cflags += [ "-msse3" ]
+    }
+
+    if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+        "True") {
+      cflags += [
+        # cmd.exe doesn't understand ANSI escape codes by default,
+        # so only enable them if something emulating them is around.
+        "-fansi-escape-codes",
+      ]
+    }
+
+    if (use_clang_diagnostics_format) {
+      cflags += [ "/clang:-fdiagnostics-format=clang" ]
+    }
+  }
+
+  if (use_lld && !use_thin_lto && (is_clang || !use_goma)) {
+    # /Brepro lets the compiler not write the mtime field in the .obj output.
+    # link.exe /incremental relies on this field to work correctly, but lld
+    # never looks at this timestamp, so it's safe to pass this flag with
+    # lld and get more deterministic compiler output in return.
+    # In LTO builds, the compiler doesn't write .obj files containing mtimes,
+    # so /Brepro is ignored there.
+    cflags += [ "/Brepro" ]
+  }
+
+  ldflags = []
+
+  if (use_lld) {
+    # lld defaults to writing the current time in the pe/coff header.
+    # For build reproducibility, pass an explicit timestamp. See
+    # build/compute_build_timestamp.py for how the timestamp is chosen.
+    # (link.exe also writes the current time, but it doesn't have a flag to
+    # override that behavior.)
+    ldflags += [ "/TIMESTAMP:" + build_timestamp ]
+
+    # Don't look for libpaths in %LIB%, similar to /X in cflags above.
+    ldflags += [ "/lldignoreenv" ]
+  }
+
+  if (!is_debug && !is_component_build) {
+    # Enable standard linker optimizations like GC (/OPT:REF) and ICF in static
+    # release builds.
+    # Release builds always want these optimizations, so enable them explicitly.
+    ldflags += [
+      "/OPT:REF",
+      "/OPT:ICF",
+      "/INCREMENTAL:NO",
+      "/FIXED:NO",
+    ]
+
+    if (use_lld) {
+      # String tail merging leads to smaller binaries, but they don't compress
+      # as well, leading to increased mini_installer size (crbug.com/838449).
+      ldflags += [ "/OPT:NOLLDTAILMERGE" ]
+    }
+
+    # TODO(siggi): Is this of any use anymore?
+    # /PROFILE ensures that the PDB file contains FIXUP information (growing the
+    # PDB file by about 5%) but does not otherwise alter the output binary. It
+    # is enabled opportunistically for builds where it is not prohibited (not
+    # supported when incrementally linking, or using /debug:fastlink).
+    ldflags += [ "/PROFILE" ]
+  }
+
+  # arflags apply only to static_libraries. The normal linker configs are only
+  # set for executable and shared library targets so arflags must be set
+  # elsewhere. Since this is relatively contained, we just apply them in this
+  # more general config and they will only have an effect on static libraries.
+  arflags = [
+    # "No public symbols found; archive member will be inaccessible." This
+    # means that one or more object files in the library can never be
+    # pulled in to targets that link to this library. It's just a warning that
+    # the source file is a no-op.
+    "/ignore:4221",
+  ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Windows-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  cflags = []
+  cflags_cc = []
+
+  # Defines that set up the CRT.
+  defines = [
+    "__STD_C",
+    "_CRT_RAND_S",
+    "_CRT_SECURE_NO_DEPRECATE",
+    "_SCL_SECURE_NO_DEPRECATE",
+  ]
+
+  # Defines that set up the Windows SDK.
+  defines += [
+    "_ATL_NO_OPENGL",
+    "_WINDOWS",
+    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+    "PSAPI_VERSION=2",
+    "WIN32",
+    "_SECURE_ATL",
+  ]
+
+  if (current_os == "winuwp") {
+    # When targeting Windows Runtime, certain compiler/linker flags are
+    # necessary.
+    defines += [
+      "WINUWP",
+      "__WRL_NO_DEFAULT_LIB__",
+    ]
+    if (target_winuwp_family == "app") {
+      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP" ]
+    } else if (target_winuwp_family == "phone") {
+      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" ]
+    } else if (target_winuwp_family == "system") {
+      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SYSTEM" ]
+    } else if (target_winuwp_family == "server") {
+      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SERVER" ]
+    } else {
+      defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
+    }
+    cflags_cc += [ "/EHsc" ]
+
+    # This warning is given because the linker cannot tell the difference
+    # between consuming WinRT APIs versus authoring WinRT within static
+    # libraries as such this warning is always given by the linker. Since
+    # consuming WinRT APIs within a library is legitimate but authoring
+    # WinRT APis is not allowed, this warning is disabled to ignore the
+    # legitimate consumption of WinRT APIs within static library builds.
+    arflags = [ "/IGNORE:4264" ]
+
+    if (target_winuwp_version == "10") {
+      defines += [ "WIN10=_WIN32_WINNT_WIN10" ]
+    } else if (target_winuwp_version == "8.1") {
+      defines += [ "WIN8_1=_WIN32_WINNT_WINBLUE" ]
+    } else if (target_winuwp_version == "8.0") {
+      defines += [ "WIN8=_WIN32_WINNT_WIN8" ]
+    }
+  } else {
+    # When not targeting Windows Runtime, make sure the WINAPI family is set
+    # to desktop.
+    defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
+  }
+}
+
+# Chromium supports running on Windows 7, but if these constants are set to
+# Windows 7, then newer APIs aren't made available by the Windows SDK.
+# So we set this to Windows 10 and then are careful to check at runtime
+# to only call newer APIs when they're available.
+# Some third-party libraries assume that these defines set what version of
+# Windows is available at runtime. Targets using these libraries need to
+# manually override this config for their compiles.
+config("winver") {
+  defines = [
+    "NTDDI_VERSION=NTDDI_WIN10_VB",
+
+    # We can't say `=_WIN32_WINNT_WIN10` here because some files do
+    # `#if WINVER < 0x0600` without including windows.h before,
+    # and then _WIN32_WINNT_WIN10 isn't yet known to be 0x0A00.
+    "_WIN32_WINNT=0x0A00",
+    "WINVER=0x0A00",
+  ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+  assert(current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm" ||
+             current_cpu == "arm64",
+         "Only supports x64, x86, arm and arm64 CPUs")
+  if (current_cpu == "x64") {
+    ldflags = [ "/MACHINE:X64" ]
+  } else if (current_cpu == "x86") {
+    ldflags = [
+      "/MACHINE:X86",
+      "/SAFESEH",  # Not compatible with x64 so use only for x86.
+      "/largeaddressaware",
+    ]
+  } else if (current_cpu == "arm") {
+    ldflags = [ "/MACHINE:ARM" ]
+  } else if (current_cpu == "arm64") {
+    ldflags = [ "/MACHINE:ARM64" ]
+  }
+
+  vcvars_toolchain_data = exec_script("../../toolchain/win/setup_toolchain.py",
+                                      [
+                                        visual_studio_path,
+                                        windows_sdk_path,
+                                        visual_studio_runtime_dirs,
+                                        current_os,
+                                        current_cpu,
+                                        "none",
+                                      ],
+                                      "scope")
+
+  vc_lib_path = vcvars_toolchain_data.vc_lib_path
+  if (defined(vcvars_toolchain_data.vc_lib_atlmfc_path)) {
+    vc_lib_atlmfc_path = vcvars_toolchain_data.vc_lib_atlmfc_path
+  }
+  vc_lib_um_path = vcvars_toolchain_data.vc_lib_um_path
+
+  lib_dirs = [
+    "$vc_lib_um_path",
+    "$vc_lib_path",
+  ]
+  if (defined(vc_lib_atlmfc_path)) {
+    lib_dirs += [ "$vc_lib_atlmfc_path" ]
+  }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+  ldflags = [
+    "/FIXED:NO",
+    "/ignore:4199",
+    "/ignore:4221",
+    "/NXCOMPAT",
+    "/DYNAMICBASE",
+  ]
+
+  if (win_linker_timing) {
+    ldflags += [
+      "/time",
+      "/verbose:incr",
+    ]
+  }
+}
+
+config("default_cfg_compiler") {
+  # Emit table of address-taken functions for Control-Flow Guard (CFG).
+  # This is needed to allow functions to be called by code that is built
+  # with CFG enabled, such as system libraries.
+  # The CFG guards are only emitted if |win_enable_cfg_guards| is enabled.
+  if (is_clang) {
+    if (win_enable_cfg_guards) {
+      cflags = [ "/guard:cf" ]
+    } else {
+      cflags = [ "/guard:cf,nochecks" ]
+    }
+  }
+}
+
+# To disable CFG guards for a target, remove the "default_cfg_compiler"
+# config, and add "disable_guards_cfg_compiler" config.
+config("disable_guards_cfg_compiler") {
+  # Emit table of address-taken functions for Control-Flow Guard (CFG).
+  # This is needed to allow functions to be called by code that is built
+  # with CFG enabled, such as system libraries.
+  if (is_clang) {
+    cflags = [ "/guard:cf,nochecks" ]
+  }
+}
+
+config("cfi_linker") {
+  # Control Flow Guard (CFG)
+  # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
+  # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG cannot be
+  # turned on either.
+  # ASan and CFG leads to slow process startup. Chromium's test runner uses
+  # lots of child processes, so this means things are really slow. Disable CFG
+  # for now. https://crbug.com/846966
+  if (!is_debug && !is_asan) {
+    # Turn on CFG bitmap generation and CFG load config.
+    ldflags = [ "/guard:cf" ]
+  }
+}
+
+# This is a superset of all the delayloads needed for chrome.exe, chrome.dll,
+# chrome_child.dll, and chrome_elf.dll. The linker will automatically ignore
+# anything which is not linked to the binary at all.
+# Most of the dlls are simply not required at startup (or at all, depending
+# on how the browser is used). The following dlls are interconnected and need to
+# be delayloaded together to ensure user32 does not load too early or at all,
+# depending on the process type: user32, gdi32, comctl32, comdlg32, cryptui,
+# d3d9, dwmapi, imm32, msi, ole32, oleacc, rstrtmgr, shell32, shlwapi, and
+# uxtheme.
+# There are some exceptions to this list which need to be declared separately.
+# Some dlls cannot be delayloaded by chrome_child.dll due to the sandbox
+# restrictions that prevent them from being loaded properly. Those dlls are
+# specified in the separate config below.
+# This config should also be used for any test binary whose goal is to run
+# tests with the full browser.
+config("delayloads") {
+  ldflags = [
+    "/DELAYLOAD:api-ms-win-core-winrt-error-l1-1-0.dll",
+    "/DELAYLOAD:api-ms-win-core-winrt-l1-1-0.dll",
+    "/DELAYLOAD:api-ms-win-core-winrt-string-l1-1-0.dll",
+    "/DELAYLOAD:comctl32.dll",
+    "/DELAYLOAD:comdlg32.dll",
+    "/DELAYLOAD:credui.dll",
+    "/DELAYLOAD:cryptui.dll",
+    "/DELAYLOAD:d3d11.dll",
+    "/DELAYLOAD:d3d9.dll",
+    "/DELAYLOAD:dwmapi.dll",
+    "/DELAYLOAD:dxgi.dll",
+    "/DELAYLOAD:dxva2.dll",
+    "/DELAYLOAD:esent.dll",
+    "/DELAYLOAD:gdi32.dll",
+    "/DELAYLOAD:hid.dll",
+    "/DELAYLOAD:imagehlp.dll",
+    "/DELAYLOAD:imm32.dll",
+    "/DELAYLOAD:msi.dll",
+    "/DELAYLOAD:netapi32.dll",
+    "/DELAYLOAD:ncrypt.dll",
+    "/DELAYLOAD:ole32.dll",
+    "/DELAYLOAD:oleacc.dll",
+    "/DELAYLOAD:propsys.dll",
+    "/DELAYLOAD:psapi.dll",
+    "/DELAYLOAD:rpcrt4.dll",
+    "/DELAYLOAD:rstrtmgr.dll",
+    "/DELAYLOAD:setupapi.dll",
+    "/DELAYLOAD:shell32.dll",
+    "/DELAYLOAD:shlwapi.dll",
+    "/DELAYLOAD:urlmon.dll",
+    "/DELAYLOAD:user32.dll",
+    "/DELAYLOAD:usp10.dll",
+    "/DELAYLOAD:uxtheme.dll",
+    "/DELAYLOAD:wer.dll",
+    "/DELAYLOAD:wevtapi.dll",
+    "/DELAYLOAD:wininet.dll",
+    "/DELAYLOAD:winusb.dll",
+    "/DELAYLOAD:wsock32.dll",
+    "/DELAYLOAD:wtsapi32.dll",
+  ]
+}
+
+config("delayloads_not_for_child_dll") {
+  ldflags = [
+    "/DELAYLOAD:advapi32.dll",
+    "/DELAYLOAD:crypt32.dll",
+    "/DELAYLOAD:dbghelp.dll",
+    "/DELAYLOAD:dhcpcsvc.dll",
+    "/DELAYLOAD:dwrite.dll",
+    "/DELAYLOAD:iphlpapi.dll",
+    "/DELAYLOAD:oleaut32.dll",
+    "/DELAYLOAD:secur32.dll",
+    "/DELAYLOAD:uiautomationcore.dll",
+    "/DELAYLOAD:userenv.dll",
+    "/DELAYLOAD:winhttp.dll",
+    "/DELAYLOAD:winmm.dll",
+    "/DELAYLOAD:winspool.drv",
+    "/DELAYLOAD:wintrust.dll",
+    "/DELAYLOAD:ws2_32.dll",
+  ]
+}
+
+# CRT --------------------------------------------------------------------------
+
+# Configures how the runtime library (CRT) is going to be used.
+# See https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx for a reference of
+# what each value does.
+config("default_crt") {
+  if (is_component_build) {
+    # Component mode: dynamic CRT. Since the library is shared, it requires
+    # exceptions or will give errors about things not matching, so keep
+    # exceptions on.
+    configs = [ ":dynamic_crt" ]
+  } else {
+    if (current_os == "winuwp") {
+      # https://blogs.msdn.microsoft.com/vcblog/2014/06/10/the-great-c-runtime-crt-refactoring/
+      # contains a details explanation of what is happening with the Windows
+      # CRT in Visual Studio releases related to Windows store applications.
+      configs = [ ":dynamic_crt" ]
+    } else {
+      # Desktop Windows: static CRT.
+      configs = [ ":static_crt" ]
+    }
+  }
+}
+
+# Use this to force use of the release CRT when building perf-critical build
+# tools that need to be fully optimized even in debug builds, for those times
+# when the debug CRT is part of the bottleneck. This also avoids *implicitly*
+# defining _DEBUG.
+config("release_crt") {
+  if (is_component_build) {
+    cflags = [ "/MD" ]
+  } else {
+    cflags = [ "/MT" ]
+  }
+}
+
+config("dynamic_crt") {
+  if (is_debug) {
+    # This pulls in the DLL debug CRT and defines _DEBUG
+    cflags = [ "/MDd" ]
+  } else {
+    cflags = [ "/MD" ]
+  }
+}
+
+config("static_crt") {
+  if (is_debug) {
+    # This pulls in the static debug CRT and defines _DEBUG
+    cflags = [ "/MTd" ]
+  } else {
+    cflags = [ "/MT" ]
+  }
+}
+
+# Subsystem --------------------------------------------------------------------
+
+# This is appended to the subsystem to specify a minimum version.
+if (current_cpu == "x64") {
+  # The number after the comma is the minimum required OS version.
+  # 5.02 = Windows Server 2003.
+  subsystem_version_suffix = ",5.02"
+} else if (current_cpu == "arm64") {
+  # Windows ARM64 requires Windows 10.
+  subsystem_version_suffix = ",10.0"
+} else {
+  # 5.01 = Windows XP.
+  subsystem_version_suffix = ",5.01"
+}
+
+config("console") {
+  ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
+}
+config("windowed") {
+  ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+# Applies incremental linking or not depending on the current configuration.
+config("default_incremental_linking") {
+  # Enable incremental linking for debug builds and all component builds - any
+  # builds where performance is not job one.
+  # TODO(thakis): Always turn this on with lld, no reason not to.
+  if (is_debug || is_component_build) {
+    ldflags = [ "/INCREMENTAL" ]
+    if (use_lld) {
+      # lld doesn't use ilk files and doesn't really have an incremental link
+      # mode; the only effect of the flag is that the .lib file timestamp isn't
+      # updated if the .lib doesn't change.
+      # TODO(thakis): Why pass /OPT:NOREF for lld, but not otherwise?
+      # TODO(thakis): /INCREMENTAL is on by default in link.exe, but not in
+      # lld.
+      ldflags += [ "/OPT:NOREF" ]
+    }
+  } else {
+    ldflags = [ "/INCREMENTAL:NO" ]
+  }
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+  defines = [
+    "_UNICODE",
+    "UNICODE",
+  ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accommodating" version of windows.h.
+config("lean_and_mean") {
+  defines = [ "WIN32_LEAN_AND_MEAN" ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+  defines = [ "NOMINMAX" ]
+}
diff --git a/src/build/config/win/console_app.gni b/src/build/config/win/console_app.gni
new file mode 100644
index 0000000..cac2ef5
--- /dev/null
+++ b/src/build/config/win/console_app.gni
@@ -0,0 +1,18 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # If true, builds as a console app (rather than a windowed app), which allows
+  # logging to be printed to the user. This will cause a terminal window to pop
+  # up when the executable is not run from the command line, so should only be
+  # used for development. Only has an effect on Windows builds.
+  win_console_app = false
+}
+
+if (is_win && is_asan) {
+  # AddressSanitizer build should be a console app since it writes to stderr.
+  win_console_app = true
+}
diff --git a/src/build/config/win/control_flow_guard.gni b/src/build/config/win/control_flow_guard.gni
new file mode 100644
index 0000000..bf6a82a
--- /dev/null
+++ b/src/build/config/win/control_flow_guard.gni
@@ -0,0 +1,23 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # Set this to true to enable generation of CFG indirect call dispatch
+  # guards.
+  win_enable_cfg_guards = false
+}
+
+if (win_enable_cfg_guards) {
+  # Control Flow Guard (CFG)
+  # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
+  # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can't be
+  # turned on either.
+  # ASan and CFG leads to slow process startup. Chromium's test runner uses
+  # lots of child processes, so this means things are really slow. Disable CFG
+  # for now. https://crbug.com/846966
+  assert(!is_debug && !is_asan,
+         "CFG does not work well in debug builds or with ASAN")
+}
diff --git a/src/build/config/win/manifest.gni b/src/build/config/win/manifest.gni
new file mode 100644
index 0000000..e211508
--- /dev/null
+++ b/src/build/config/win/manifest.gni
@@ -0,0 +1,118 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# HOW MANIFESTS WORK IN THE GN BUILD
+#
+# Use the windows_manifest template to declare a manifest generation step.
+# This will combine all listed .manifest files. To link this manifest, just
+# depend on the manifest target from your executable or shared library.
+#
+# This will define an empty placeholder target on non-Windows platforms so
+# the manifest declarations and dependencies do not need to be inside of OS
+# conditionals.
+#
+# A binary can depend on only one manifest target, but the manifest target
+# can depend on many individual .manifest files which will be merged. As a
+# result, only executables and shared libraries should depend on manifest
+# targets. If you want to add a manifest to a component, put the dependency
+# behind a "if (is_component_build)" conditional.
+#
+# Generally you will just want the defaults for the Chrome build. In this case
+# the binary should just depend on one of the targets in //build/win/. There
+# are also individual manifest files in that directory you can reference via
+# the *_manifest variables defined below to pick and choose only some defaults.
+# You might combine these with a custom manifest file to get specific behavior.
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome OS compatibility list.
+default_compatibility_manifest = "//build/win/compatibility.manifest"
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome common constrols compatibility.
+common_controls_manifest = "//build/win/common_controls.manifest"
+
+# Reference this manifest to request that Windows not perform any elevation
+# when running your program. Otherwise, it might do some autodetection and
+# request elevated privileges from the user. This is normally what you want.
+as_invoker_manifest = "//build/win/as_invoker.manifest"
+
+# An alternative to as_invoker_manifest when you want the application to always
+# elevate.
+require_administrator_manifest = "//build/win/require_administrator.manifest"
+
+# Request the segment heap. See https://crbug.com/1014701 for details.
+declare_args() {
+  enable_segment_heap = false
+}
+segment_heap_manifest = "//build/win/segment_heap.manifest"
+
+# Construct a target to combine the given manifest files into a .rc file.
+#
+# Variables for the windows_manifest template:
+#
+#   sources: (required)
+#     List of source .manifest files to add.
+#
+#   deps: (optional)
+#   visibility: (optional)
+#     Normal meaning.
+#
+# Example:
+#
+#   windows_manifest("doom_melon_manifest") {
+#     sources = [
+#       "doom_melon.manifest",   # Custom values in here.
+#       default_compatibility_manifest,  # Want the normal OS compat list.
+#     ]
+#   }
+#
+#   executable("doom_melon") {
+#     deps = [ ":doom_melon_manifest" ]
+#     ...
+#   }
+
+if (is_win) {
+  template("windows_manifest") {
+    config_name = "${target_name}__config"
+    source_set_name = target_name
+
+    config(config_name) {
+      visibility = [ ":$source_set_name" ]
+      assert(defined(invoker.sources),
+             "\"sources\" must be defined for a windows_manifest target")
+      manifests = []
+      foreach(i, rebase_path(invoker.sources, root_build_dir)) {
+        manifests += [ "/manifestinput:" + i ]
+      }
+      ldflags = [
+                  "/manifest:embed",
+
+                  # We handle UAC by adding explicit .manifest files instead.
+                  "/manifestuac:no",
+                ] + manifests
+    }
+
+    # This source set only exists to add a dep on the invoker's deps and to
+    # add a public_config that sets ldflags on dependents.
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      public_configs = [ ":$config_name" ]
+
+      # Apply any dependencies from the invoker to this target, since those
+      # dependencies may have created the input manifest files.
+      forward_variables_from(invoker, [ "deps" ])
+    }
+  }
+} else {
+  # Make a no-op group on non-Windows platforms so windows_manifest
+  # instantiations don't need to be inside windows blocks.
+  template("windows_manifest") {
+    group(target_name) {
+      # Prevent unused variable warnings on non-Windows platforms.
+      assert(invoker.sources != "")
+      assert(!defined(invoker.deps) || invoker.deps != "")
+      assert(!defined(invoker.visibility) || invoker.visibility != "")
+    }
+  }
+}
diff --git a/src/build/config/win/visual_studio_version.gni b/src/build/config/win/visual_studio_version.gni
new file mode 100644
index 0000000..982fbe8
--- /dev/null
+++ b/src/build/config/win/visual_studio_version.gni
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to Visual Studio. If empty, the default is used which is to use the
+  # automatic toolchain in depot_tools. If set, you must also set the
+  # visual_studio_version and wdk_path.
+  visual_studio_path = ""
+
+  # Version of Visual Studio pointed to by the visual_studio_path.
+  # Currently always "2015".
+  visual_studio_version = ""
+
+  # Directory of the Windows driver kit. If visual_studio_path is empty, this
+  # will be auto-filled.
+  wdk_path = ""
+
+  # Full path to the Windows SDK, not including a backslash at the end.
+  # This value is the default location, override if you have a different
+  # installation location.
+  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10"
+}
+
+if (visual_studio_path == "") {
+  toolchain_data =
+      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+  visual_studio_path = toolchain_data.vs_path
+  windows_sdk_path = toolchain_data.sdk_path
+  visual_studio_version = toolchain_data.vs_version
+  wdk_path = toolchain_data.wdk_dir
+  visual_studio_runtime_dirs = toolchain_data.runtime_dirs
+} else {
+  assert(visual_studio_version != "",
+         "You must set the visual_studio_version if you set the path")
+  assert(wdk_path != "",
+         "You must set the wdk_path if you set the visual studio path")
+  visual_studio_runtime_dirs = []
+}
diff --git a/src/build/config/x64.gni b/src/build/config/x64.gni
new file mode 100644
index 0000000..9e86979
--- /dev/null
+++ b/src/build/config/x64.gni
@@ -0,0 +1,24 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# These are primarily relevant in current_cpu == "x64" contexts, where
+# X64 code is being compiled.
+if (current_cpu == "x64") {
+  declare_args() {
+    # The micro architecture of x64 cpu. This will be a string like "haswell" or
+    # "skylake". An empty string means to use the default architecture which is
+    # "x86-64".
+    # CPU options for "x86-64" in GCC can be found at
+    # https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html
+    # CPU options for "x86-64" in llvm can be found at
+    # https://github.com/llvm/llvm-project/blob/master/llvm/include/llvm/Support/X86TargetParser.def
+    x64_arch = ""
+  }
+
+  if ((is_posix && !is_apple) || is_fuchsia) {
+    if (x64_arch == "") {
+      x64_arch = "x86-64"
+    }
+  }
+}
diff --git a/src/build/config/zip.gni b/src/build/config/zip.gni
new file mode 100644
index 0000000..68bc494
--- /dev/null
+++ b/src/build/config/zip.gni
@@ -0,0 +1,48 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("python.gni")
+
+# Creates a zip archive of the inputs.
+#
+# output (required)
+#     Path to output zip.
+# inputs (required)
+#     List of input files to zip.
+# base_dir (optional)
+#     If provided, the archive paths will be relative to this directory.
+#     Applies only to |inputs|.
+#
+# deps, public_deps, data, data_deps, testonly, visibility
+#     Normal meaning.
+template("zip") {
+  action_with_pydeps(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data",
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    script = "//build/android/gyp/zip.py"
+    inputs = invoker.inputs
+    outputs = [ invoker.output ]
+
+    args = [
+      "--output",
+      rebase_path(invoker.output, root_build_dir),
+    ]
+
+    _rebased_inputs = rebase_path(invoker.inputs, root_build_dir)
+    args += [ "--input-files=$_rebased_inputs" ]
+    if (defined(invoker.base_dir)) {
+      args += [
+        "--input-files-base-dir",
+        rebase_path(invoker.base_dir, root_build_dir),
+      ]
+    }
+  }
+}
diff --git a/src/build/copy_test_data_ios.py b/src/build/copy_test_data_ios.py
new file mode 100755
index 0000000..cd23fb2
--- /dev/null
+++ b/src/build/copy_test_data_ios.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+from __future__ import print_function
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+  pass
+
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+  """Returns a list of all the files under a given path."""
+  output = []
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
+    return output
+
+  # Files get returned without modification.
+  if not os.path.isdir(path):
+    output.append(path)
+    return output
+
+  # Directories get recursively expanded.
+  contents = os.listdir(path)
+  for item in contents:
+    full_path = os.path.join(path, item)
+    output.extend(ListFilesForPath(full_path))
+  return output
+
+def CalcInputs(inputs):
+  """Computes the full list of input files for a set of command-line arguments.
+  """
+  # |inputs| is a list of paths, which may be directories.
+  output = []
+  for input in inputs:
+    output.extend(ListFilesForPath(input))
+  return output
+
+def CopyFiles(relative_filenames, output_basedir):
+  """Copies files to the given output directory."""
+  for file in relative_filenames:
+    relative_dirname = os.path.dirname(file)
+    output_dir = os.path.join(output_basedir, relative_dirname)
+    output_filename = os.path.join(output_basedir, file)
+
+    # In cases where a directory has turned into a file or vice versa, delete it
+    # before copying it below.
+    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+      os.remove(output_dir)
+    if os.path.exists(output_filename) and os.path.isdir(output_filename):
+      shutil.rmtree(output_filename)
+
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    shutil.copy(file, output_filename)
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+  parser.add_option('--inputs', action='store_true', dest='list_inputs')
+  parser.add_option('--outputs', action='store_true', dest='list_outputs')
+  options, arglist = parser.parse_args(argv)
+
+  if len(arglist) == 0:
+    raise WrongNumberOfArgumentsException('<input_files> required.')
+
+  files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+  if options.list_inputs:
+    return '\n'.join(escaped_files)
+
+  if not options.output_dir:
+    raise WrongNumberOfArgumentsException('-o required.')
+
+  if options.list_outputs:
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+    return '\n'.join(outputs)
+
+  CopyFiles(files_to_copy, options.output_dir)
+  return
+
+def main(argv):
+  try:
+    result = DoMain(argv[1:])
+  except WrongNumberOfArgumentsException as e:
+    print(e, file=sys.stderr)
+    return 1
+  if result:
+    print(result)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/cp.py b/src/build/cp.py
new file mode 100755
index 0000000..0f32536
--- /dev/null
+++ b/src/build/cp.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import os
+import shutil
+import sys
+
+
+def Main(src, dst):
+  # Use copy instead of copyfile to ensure the executable bit is copied.
+  return shutil.copy(src, os.path.normpath(dst))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/src/build/detect_host_arch.py b/src/build/detect_host_arch.py
new file mode 100755
index 0000000..cad0f4b
--- /dev/null
+++ b/src/build/detect_host_arch.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+from __future__ import print_function
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+  """Returns the host architecture with a predictable string."""
+  host_arch = platform.machine()
+
+  # Convert machine type to format recognized by gyp.
+  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+    host_arch = 'ia32'
+  elif host_arch in ['x86_64', 'amd64']:
+    host_arch = 'x64'
+  elif host_arch.startswith('arm'):
+    host_arch = 'arm'
+  elif host_arch.startswith('aarch64'):
+    host_arch = 'arm64'
+  elif host_arch.startswith('mips64'):
+    host_arch = 'mips64'
+  elif host_arch.startswith('mips'):
+    host_arch = 'mips'
+  elif host_arch.startswith('ppc'):
+    host_arch = 'ppc'
+  elif host_arch.startswith('s390'):
+    host_arch = 's390'
+
+
+  # platform.machine is based on running kernel. It's possible to use 64-bit
+  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+  # Distinguish between different userland bitness by querying
+  # the python binary.
+  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+    host_arch = 'ia32'
+  if host_arch == 'arm64' and platform.architecture()[0] == '32bit':
+    host_arch = 'arm'
+
+  return host_arch
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return HostArch()
+
+if __name__ == '__main__':
+  print(DoMain([]))
diff --git a/src/build/dir_exists.py b/src/build/dir_exists.py
new file mode 100755
index 0000000..70d367e
--- /dev/null
+++ b/src/build/dir_exists.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(_is_dir(sys.argv[1]))
+  return 0
+
+def _is_dir(dir_name):
+  return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return _is_dir(args[0])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/docs/debugging_slow_builds.md b/src/build/docs/debugging_slow_builds.md
new file mode 100644
index 0000000..315690c
--- /dev/null
+++ b/src/build/docs/debugging_slow_builds.md
@@ -0,0 +1,19 @@
+# Debugging slow builds
+
+Some tips for debugging slow build times:
+* Use [ninjatracing](https://github.com/nico/ninjatracing) and chrome:tracing to
+  view a timeline of the most recent build.
+  * Many bots output a build trace (look for a `"ninja_log"` link).
+* Use `gn gen --tracelog trace.json` to create a similar trace for `gn gen`.
+* Depot Tool's `autoninja` has logic for summarizing slow steps. Enable it via:
+  * `NINJA_SUMMARIZE_BUILD=1 autoninja -C out/Debug my_target`
+* Many Android templates make use of
+  [`md5_check.py`](https://cs.chromium.org/chromium/src/build/android/gyp/util/md5_check.py)
+  to optimize incremental builds.
+  * Set `PRINT_BUILD_EXPLANATIONS=1` to have these commands log which inputs
+    changed.
+* If you suspect files are being rebuilt unnecessarily during incremental
+  builds:
+  * Use `ninja -n -d explain` to figure out why ninja thinks a target is dirty.
+  * Ensure actions are taking advantage of ninja's `restat=1` feature by not
+    updating timestamps on outputs when their content does not change.
diff --git a/src/build/docs/mac_hermetic_toolchain.md b/src/build/docs/mac_hermetic_toolchain.md
new file mode 100644
index 0000000..d5c88de
--- /dev/null
+++ b/src/build/docs/mac_hermetic_toolchain.md
@@ -0,0 +1,34 @@
+# Mac and iOS hermetic toolchain instructions
+
+The following is a short explanation of why we use a the hermetic toolchain
+and instructions on how to roll a new toolchain. This toolchain is only
+available to Googlers and infra bots.
+
+## How to roll a new hermetic toolchain.
+
+1. Download a new version of Xcode, and confirm either mac or ios builds
+   properly with this new version.
+
+2. Create a new CIPD package by moving Xcode.app to the `build/` directory, then
+   follow the instructions in
+   [build/xcode_binaries.yaml](../xcode_binaries.yaml).
+
+   The CIPD package creates a subset of the toolchain necessary for a build.
+
+2. Create a CL with the updated `MAC_BINARIES_TAG` in 
+   [mac_toolchain.py](../mac_toolchain.py) with the version created by the
+   previous command.
+
+3. Run the CL through the trybots to confirm the roll works.
+
+## Why we use a hermetic toolchain.
+
+Building Chrome Mac currently requires many binaries that come bundled with
+Xcode, as well the macOS and iphoneOS SDK (also bundled with Xcode). Note that
+Chrome ships its own version of clang (compiler), but is dependent on Xcode
+for these other binaries. Using a hermetic toolchain has two main benefits:
+
+1. Build Chrome with a well-defined toolchain (rather than whatever happens to
+   be installed on the machine).
+
+2. Easily roll/update the toolchain.
diff --git a/src/build/docs/writing_gn_templates.md b/src/build/docs/writing_gn_templates.md
new file mode 100644
index 0000000..39f7bed
--- /dev/null
+++ b/src/build/docs/writing_gn_templates.md
@@ -0,0 +1,337 @@
+# Writing GN Templates
+GN and Ninja are documented here:
+* GN: https://gn.googlesource.com/gn/+/master/docs/
+* Ninja: https://ninja-build.org/manual.html
+
+[TOC]
+
+## Things to Consider When Writing Templates
+### Inputs and Depfiles
+List all files read (or executed) by an action as `inputs`.
+ * It is not enough to have inputs listed by dependent targets. They must be
+   listed directly by targets that use them, or added by a depfile.
+ * Non-system Python imports are inputs! For scripts that import such modules,
+   use [`action_with_pydeps`] to ensure all dependent Python files are captured
+   as inputs.
+
+[`action_with_pydeps`]: https://cs.chromium.org/chromium/src/build/config/python.gni?rcl=320ee4295eb7fabaa112f08d1aacc88efd1444e5&l=75
+
+To understand *why* actions must list all inputs directly, you need to
+understand ninja's "restat" directive, which is used for all GN `action()`s.
+
+From https://ninja-build.org/manual.html:
+
+> if present, causes Ninja to re-stat the command’s outputs after execution of
+> the command. Each output whose modification time the command did not change
+> will be treated as though it had never needed to be built. This may cause the
+> output’s reverse dependencies to be removed from the list of pending build
+> actions.
+
+So, if your action depends on target "X", and "X" does not change its outputs
+when rebuilt, then ninja will not bother to rebuild your target.
+
+For action inputs that are not computable during "gn gen", actions can write
+depfiles (.d files) to add additional input files as dependencies for
+subsequent builds. They are relevant only for incremental builds since they
+won't exist for the initial build.
+ * Depfiles should not list files that GN already lists as `inputs`.
+   * Besides being redundant, listing them also makes it harder to remove
+     inputs, since removing them from GN does not immediately remove them from
+     depfiles.
+   * Stale paths in depfiles can cause ninja to complain of circular
+     dependencies [in some cases](https://bugs.chromium.org/p/chromium/issues/detail?id=639042).
+
+### Ensuring "gn analyze" Knows About your Inputs
+"gn analyze" is used by bots to run only affected tests and build only affected
+targets. Try it out locally via:
+```bash
+echo "compute_inputs_for_analyze = true" >> out/Debug/args.gn
+gn analyze //out/Debug <(echo '{
+    "files": ["//BUILD.gn"],
+    "test_targets": ["//base"],
+    "additional_compile_targets":[]}') result.txt; cat result.txt
+```
+* For analyze to work properly, GN must know about all inputs.
+* Inputs added by depfiles are *not available* to "gn analyze".
+  * When paths listed in a target's depfile are listed as `inputs` to a
+    dependent target, analyze will be correct.
+    * Example: An  `AndroidManifest.xml` file is an input to an
+      `android_library()` and is included in an `android_apk()`'s depfile.
+      `gn analyze` will know that a change to the file will require the APK
+      to be rebuilt, because the file is marked as an input to the library, and
+      the library is a dep of the APK.
+  * When paths listed in a target's depfile are *not* listed as `inputs` to a
+    dependent target, a few options exist:
+    * Rather than putting the inputs in a depfile, force users of your template
+      to list them, and then have your action re-compute them and assert that
+      they were correct.
+      * `jinja_template()` does this.
+    * Rather than putting the inputs in a depfile, compute them beforehand and
+      save them to a text file. Have your template Use `read_file()` to read
+      them in.
+      * `action_with_pydeps()` does this.
+    * Continue using a depfile, but use an `exec_script()` to compute them when
+      [`compute_inputs_for_analyze`](https://cs.chromium.org/chromium/src/build/config/compute_inputs_for_analyze.gni)
+      is set.
+      * `grit()` does this.
+
+### Outputs
+#### What to List as Outputs
+Do not list files as `outputs` unless they are important. Outputs are important
+if they are:
+  * used as an input by another target, or
+  * are roots in the dependency graph (e.g. binaries, apks, etc).
+
+Example:
+* An action runs a binary that creates an output as well as a log file. Do not
+  list the log file as an output.
+  
+#### Where to Place Outputs
+**Option 1:** To make outputs visible in codesearch (e.g. generated sources):
+* use `$target_gen_dir/$target_name.$EXTENSION`.
+
+**Option 2:** Otherwise (for binary files):
+* use `$target_out_dir/$target_name.$EXTENSION`.
+
+**Option 3:** For outputs that are required at runtime
+(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/master/docs/reference.md#runtime_deps)),
+options 1 & 2 do not work because they are not archived in builder/tester bot
+configurations. In this case:
+* use `$root_out_dir/gen.runtime` or `$root_out_dir/obj.runtime`.
+
+Example:
+```python
+# This .json file is used at runtime and thus cannot go in target_gen_dir.
+_target_dir_name = rebase_path(get_label_info(":$target_name", "dir"), "//")
+_output_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.json"
+```
+
+**Option 4:** For outputs that map 1:1 with executables, and whose paths cannot
+be derived at runtime:
+* use `$root_build_dir/YOUR_NAME_HERE/$target_name`.
+
+Examples:
+```python
+# Wrapper scripts for apks:
+_output_path = "$root_build_dir/bin/$target_name"
+# Metadata for apks. Used by binary size tools.
+_output_path = "$root_build_dir/size-info/${invoker.name}.apk.jar.info"
+```
+
+## Best Practices for Python Actions
+Outputs should be atomic and take advantage of `restat=1`.
+* Make outputs atomic by writing to temporary files and then moving them to
+  their final location.
+  * Rationale: An interrupted write can leave a file with an updated timestamp
+    and corrupt contents. Ninja looks only at timestamps.
+* Do not overwrite an existing output with identical contents.
+  * Rationale: `restat=1` is a ninja feature enabled for all actions that
+    short-circuits a build when output timestamps do not change. This feature is
+    the reason that the total number of build steps sometimes decreases when
+    building..
+* Use [`build_utils.AtomicOutput()`](https://cs.chromium.org/chromium/src/build/android/gyp/util/build_utils.py?rcl=7d6ba28e92bec865a7b7876c35b4621d56fb37d8&l=128)
+  to perform both of these techniques.
+
+Actions should be deterministic in order to avoid hard-to-reproduce bugs.
+Given identical inputs, they should produce byte-for-byte identical outputs.
+* Some common mistakes:
+  * Depending on filesystem iteration order.
+  * Writing timestamps in files (or in zip entries).
+  * Writing absolute paths in outputs.
+
+## Style Guide
+Chromium GN files follow
+[GN's Style Guide](https://gn.googlesource.com/gn/+/master/docs/style_guide.md)
+with a few additions.
+
+### Action Granularity
+ * Prefer writing new Python scripts that do what you want over
+   composing multiple separate actions within a template.
+   * Fewer targets makes for a simpler build graph.
+   * GN logic and build logic winds up much simpler.
+
+Bad:
+```python
+template("generate_zipped_sources") {
+  generate_files("${target_name}__gen") {
+    ...
+    outputs = [ "$target_gen_dir/$target_name.temp" ]
+  }
+  zip(target_name) {
+    deps = [ ":${target_name}__gen" ]
+    inputs = [ "$target_gen_dir/$target_name.temp" ]
+    outputs = [ invoker.output_zip ]
+  }
+}
+```
+
+Good:
+```python
+template("generate_zipped_sources") {
+  action(target_name) {
+    script = "generate_and_zip.py"
+    ...
+    outputs = [ invoker.output_zip ]
+  }
+}
+```
+
+### Naming for Intermediate Targets
+Targets that are not relevant to users of your template should be named as:
+`${target_name}__$something`.
+
+Example:
+```python
+template("my_template") {
+  action("${target_name}__helper") {
+    ...
+  }
+  action(target_name) {
+    deps = [ ":${target_name}__helper" ]
+    ...
+  }
+}
+```
+
+This scheme ensures that subtargets defined in templates do not conflict with
+top-level targets.
+
+### Visibility for Intermediate Targets
+
+You can restrict what targets can depend on one another using [visibility].
+When writing templates, with multiple intermediate targets, `visibility` should
+only be applied to the final target (the one named `target_name`). Applying only
+to the final target ensures that the invoker-provided visibility does not
+prevent intermediate targets from depending on each other.
+
+[visibility]: https://gn.googlesource.com/gn/+/master/docs/reference.md#var_visibility
+
+Example:
+```python
+template("my_template") {
+  # Do not forward visibility here.
+  action("${target_name}__helper") {
+    # Do not forward visibility here.
+    ...
+  }
+  action(target_name) {
+    # Forward visibility here.
+    forward_variables_from(invoker, [ "visibility" ])
+    deps = [ ":${target_name}__helper" ]
+    ...
+  }
+}
+```
+
+### Variables
+Prefix variables within templates and targets with an underscore. For example:
+
+```python
+template("example") {
+  _outer_sources = invoker.extra_sources
+
+  source_set(target_name) {
+    _inner_sources = invoker.sources
+    sources = _outer_sources + _inner_sources
+  }
+}
+```
+
+This convention conveys that `sources` is relevant to `source_set`, while
+`_outer_sources`  and `_inner_sources` are not.
+
+### Passing Arguments to Targets
+Pass arguments to targets by assigning them directly within target definitions.
+
+When a GN template goes to resolve `invoker.FOO`, GN will look in all enclosing
+scopes of the target's definition. It is hard to figure out where `invoker.FOO`
+is coming from when it is not assigned directly within the target definition.
+
+Bad:
+```python
+template("hello") {
+  script = "..."
+  action(target_name) {
+    # This action will see "script" from the enclosing scope.
+  }
+}
+```
+
+Good:
+```python
+template("hello") {
+  action(target_name) {
+    script = "..."  # This is equivalent, but much more clear.
+  }
+}
+```
+
+**Exception:** `testonly` and `visibility` can be set in the outer scope so that
+they are implicitly passed to all targets within a template.
+
+This is okay:
+```python
+template("hello") {
+  testonly = true  # Applies to all nested targets.
+  action(target_name) {
+    script = "..."
+  }
+}
+```
+
+### Using forward_variables_from()
+Using [forward_variables_from()] is encouraged, but special care needs to be
+taken when forwarding `"*"`. The variables `testonly` and `visibility` should
+always be listed explicitly in case they are assigned in an enclosing
+scope.
+See [this bug] for more a full example.
+
+To make this easier, `//build/config/BUILDCONFIG.gn` defines:
+```python
+TESTONLY_AND_VISIBILITY = [ "testonly", "visibility" ]
+```
+
+Example usage:
+```python
+template("action_wrapper") {
+  action(target_name) {
+    forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    ...
+  }
+}
+```
+
+If your template defines multiple targets, be careful to apply `testonly` to
+both, but `visibility` only to the primary one (so that the primary one is not
+prevented from depending on the other ones).
+
+Example:
+```python
+template("template_with_multiple_targets") {
+  action("${target_name}__helper) {
+    forward_variables_from(invoker, [ "testonly" ])
+    ...
+  }
+  action(target_name) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    ...
+  }
+}
+```
+
+An alternative would be to explicitly set `visibility` on all inner targets,
+but doing so tends to be tedious and has little benefit.
+
+[this bug]: https://bugs.chromium.org/p/chromium/issues/detail?id=862232
+[forward_variables_from]: https://gn.googlesource.com/gn/+/master/docs/reference.md#func_forward_variables_from
+
+## Useful Ninja Flags
+Useful ninja flags when developing build rules:
+* `ninja -v` - log the full command-line of every target.
+* `ninja -v -n` - log the full command-line of every target without having
+  to wait for a build.
+* `ninja -w dupbuild=err` - fail if multiple targets have the same output.
+* `ninja -d keeprsp` - prevent ninja from deleting response files.
+* `ninja -n -d explain` - print why ninja thinks a target is dirty.
+* `ninja -j1` - execute only one command at a time.
diff --git a/src/build/dotfile_settings.gni b/src/build/dotfile_settings.gni
new file mode 100644
index 0000000..3d869b3
--- /dev/null
+++ b/src/build/dotfile_settings.gni
@@ -0,0 +1,43 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains variables that can be imported into a repo's dotfile (.gn)
+# to make it easier to roll new versions of //build in.
+
+build_dotfile_settings = {
+  exec_script_whitelist = [
+    "//build/config/android/config.gni",
+    "//build/config/android/internal_rules.gni",
+    "//build/config/android/rules.gni",
+    "//build/config/chromeos/rules.gni",
+    "//build/config/compiler/BUILD.gn",
+    "//build/config/compiler/pgo/BUILD.gn",
+    "//build/config/gcc/gcc_version.gni",
+    "//build/config/host_byteorder.gni",
+    "//build/config/ios/ios_sdk.gni",
+    "//build/config/ios/rules.gni",
+    "//build/config/linux/atk/BUILD.gn",
+    "//build/config/linux/atspi2/BUILD.gn",
+    "//build/config/linux/BUILD.gn",
+    "//build/config/linux/dri/BUILD.gn",
+    "//build/config/linux/pkg_config.gni",
+    "//build/config/mac/mac_sdk.gni",
+    "//build/config/mac/rules.gni",
+    "//build/config/posix/BUILD.gn",
+    "//build/config/python.gni",
+    "//build/config/sysroot.gni",
+    "//build/config/win/BUILD.gn",
+    "//build/config/win/visual_studio_version.gni",
+    "//build/timestamp.gni",
+    "//build/toolchain/apple/toolchain.gni",
+    "//build/toolchain/BUILD.gn",
+    "//build/toolchain/concurrent_links.gni",
+    "//build/toolchain/goma.gni",
+    "//build/toolchain/nacl/BUILD.gn",
+    "//build/toolchain/toolchain.gni",
+    "//build/toolchain/win/BUILD.gn",
+    "//build/util/branding.gni",
+    "//build/util/version.gni",
+  ]
+}
diff --git a/src/build/download_nacl_toolchains.py b/src/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..286a92a
--- /dev/null
+++ b/src/build/download_nacl_toolchains.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import sys
+
+
+def Main(args):
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+  package_version = os.path.join(package_version_dir, 'package_version.py')
+  if not os.path.exists(package_version):
+    print("Can't find '%s'" % package_version)
+    print('Presumably you are intentionally building without NativeClient.')
+    print('Skipping NativeClient toolchain download.')
+    sys.exit(0)
+  sys.path.insert(0, package_version_dir)
+  import package_version
+
+  # BUG:
+  # We remove this --optional-pnacl argument, and instead replace it with
+  # --no-pnacl for most cases.  However, if the bot name is an sdk
+  # bot then we will go ahead and download it.  This prevents increasing the
+  # gclient sync time for developers, or standard Chrome bots.
+  if '--optional-pnacl' in args:
+    args.remove('--optional-pnacl')
+    use_pnacl = False
+    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
+      use_pnacl = True
+    if use_pnacl:
+      print('\n*** DOWNLOADING PNACL TOOLCHAIN ***\n')
+    else:
+      args = ['--exclude', 'pnacl_newlib'] + args
+
+  # Only download the ARM gcc toolchain if we are building for ARM
+  # TODO(olonho): we need to invent more reliable way to get build
+  # configuration info, to know if we're building for ARM.
+  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+    args = ['--exclude', 'nacl_arm_newlib'] + args
+
+  return package_version.main(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/env_dump.py b/src/build/env_dump.py
new file mode 100755
index 0000000..3f82173
--- /dev/null
+++ b/src/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--output-json',
+                    help='File to dump the environment as JSON into.')
+  parser.add_option(
+      '-d', '--dump-mode', action='store_true',
+      help='Dump the environment to sys.stdout and exit immediately.')
+
+  parser.disable_interspersed_args()
+  options, args = parser.parse_args()
+  if options.dump_mode:
+    if args or options.output_json:
+      parser.error('Cannot specify args or --output-json with --dump-mode.')
+    json.dump(dict(os.environ), sys.stdout)
+  else:
+    if not options.output_json:
+      parser.error('Requires --output-json option.')
+
+    envsetup_cmd = ' '.join(map(pipes.quote, args))
+    full_cmd = [
+        'bash', '-c',
+        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+    ]
+    try:
+      output = subprocess.check_output(full_cmd)
+    except Exception as e:
+      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+    env_diff = {}
+    new_env = json.loads(output)
+    for k, val in new_env.items():
+      if k == '_' or (k in os.environ and os.environ[k] == val):
+        continue
+      env_diff[k] = val
+    with open(options.output_json, 'w') as f:
+      json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/extract_from_cab.py b/src/build/extract_from_cab.py
new file mode 100755
index 0000000..9695b96
--- /dev/null
+++ b/src/build/extract_from_cab.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+  """Run 'expand' suppressing noisy output. Returns returncode from process."""
+  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+  out, _ = popen.communicate()
+  if popen.returncode:
+    # expand emits errors to stdout, so if we fail, then print that out.
+    print(out)
+  return popen.returncode
+
+def main():
+  if len(sys.argv) != 4:
+    print('Usage: extract_from_cab.py cab_path archived_file output_dir')
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/extract_partition.py b/src/build/extract_partition.py
new file mode 100755
index 0000000..4b2f064
--- /dev/null
+++ b/src/build/extract_partition.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Extracts an LLD partition from an ELF file."""
+
+import argparse
+import subprocess
+import sys
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument(
+      '--partition',
+      help='Name of partition if not the main partition',
+      metavar='PART')
+  parser.add_argument(
+      '--objcopy',
+      required=True,
+      help='Path to llvm-objcopy binary',
+      metavar='FILE')
+  parser.add_argument(
+      '--unstripped-output',
+      required=True,
+      help='Unstripped output file',
+      metavar='FILE')
+  parser.add_argument(
+      '--stripped-output',
+      required=True,
+      help='Stripped output file',
+      metavar='FILE')
+  parser.add_argument('--dwp', help='Path to dwp binary', metavar='FILE')
+  parser.add_argument('input', help='Input file')
+  args = parser.parse_args()
+
+  objcopy_args = [args.objcopy]
+  if args.partition:
+    objcopy_args += ['--extract-partition', args.partition]
+  else:
+    objcopy_args += ['--extract-main-partition']
+  objcopy_args += [args.input, args.unstripped_output]
+  subprocess.check_call(objcopy_args)
+
+  objcopy_args = [
+      args.objcopy, '--strip-all', args.unstripped_output, args.stripped_output
+  ]
+  subprocess.check_call(objcopy_args)
+
+  if args.dwp:
+    dwp_args = [
+        args.dwp, '-e', args.unstripped_output, '-o',
+        args.unstripped_output + '.dwp'
+    ]
+    # Suppress output here because it doesn't seem to be useful. The most
+    # common error is a segfault, which will happen if files are missing.
+    subprocess.check_output(dwp_args, stderr=subprocess.STDOUT)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/find_depot_tools.py b/src/build/find_depot_tools.py
new file mode 100755
index 0000000..49a9138
--- /dev/null
+++ b/src/build/find_depot_tools.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Small utility function to find depot_tools and add it to the python path.
+
+Will throw an ImportError exception if depot_tools can't be found since it
+imports breakpad.
+
+This can also be used as a standalone script to print out the depot_tools
+directory location.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+
+# Path to //src
+SRC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def IsRealDepotTools(path):
+  expanded_path = os.path.expanduser(path)
+  return os.path.isfile(os.path.join(expanded_path, 'gclient.py'))
+
+
+def add_depot_tools_to_path():
+  """Search for depot_tools and add it to sys.path."""
+  # First, check if we have a DEPS'd in "depot_tools".
+  deps_depot_tools = os.path.join(SRC, 'third_party', 'depot_tools')
+  if IsRealDepotTools(deps_depot_tools):
+    # Put the pinned version at the start of the sys.path, in case there
+    # are other non-pinned versions already on the sys.path.
+    sys.path.insert(0, deps_depot_tools)
+    return deps_depot_tools
+
+  # Then look if depot_tools is already in PYTHONPATH.
+  for i in sys.path:
+    if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i):
+      return i
+  # Then look if depot_tools is in PATH, common case.
+  for i in os.environ['PATH'].split(os.pathsep):
+    if IsRealDepotTools(i):
+      sys.path.append(i.rstrip(os.sep))
+      return i
+  # Rare case, it's not even in PATH, look upward up to root.
+  root_dir = os.path.dirname(os.path.abspath(__file__))
+  previous_dir = os.path.abspath(__file__)
+  while root_dir and root_dir != previous_dir:
+    i = os.path.join(root_dir, 'depot_tools')
+    if IsRealDepotTools(i):
+      sys.path.append(i)
+      return i
+    previous_dir = root_dir
+    root_dir = os.path.dirname(root_dir)
+  print('Failed to find depot_tools', file=sys.stderr)
+  return None
+
+DEPOT_TOOLS_PATH = add_depot_tools_to_path()
+
+# pylint: disable=W0611
+import breakpad
+
+
+def main():
+  if DEPOT_TOOLS_PATH is None:
+    return 1
+  print(DEPOT_TOOLS_PATH)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fix_gn_headers.py b/src/build/fix_gn_headers.py
new file mode 100755
index 0000000..71fb332
--- /dev/null
+++ b/src/build/fix_gn_headers.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fix header files missing in GN.
+
+This script takes the missing header files from check_gn_headers.py, and
+try to fix them by adding them to the GN files.
+Manual cleaning up is likely required afterwards.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+
+def GitGrep(pattern):
+  p = subprocess.Popen(
+      ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'],
+      stdout=subprocess.PIPE)
+  out, _ = p.communicate()
+  return out, p.returncode
+
+
+def ValidMatches(basename, cc, grep_lines):
+  """Filter out 'git grep' matches with header files already."""
+  matches = []
+  for line in grep_lines:
+    gnfile, linenr, contents = line.split(':')
+    linenr = int(linenr)
+    new = re.sub(cc, basename, contents)
+    lines = open(gnfile).read().splitlines()
+    assert contents in lines[linenr - 1]
+    # Skip if it's already there. It could be before or after the match.
+    if lines[linenr] == new:
+      continue
+    if lines[linenr - 2] == new:
+      continue
+    print('    ', gnfile, linenr, new)
+    matches.append((gnfile, linenr, new))
+  return matches
+
+
+def AddHeadersNextToCC(headers, skip_ambiguous=True):
+  """Add header files next to the corresponding .cc files in GN files.
+
+  When skip_ambiguous is True, skip if multiple .cc files are found.
+  Returns unhandled headers.
+
+  Manual cleaning up is likely required, especially if not skip_ambiguous.
+  """
+  edits = {}
+  unhandled = []
+  for filename in headers:
+    filename = filename.strip()
+    if not (filename.endswith('.h') or filename.endswith('.hh')):
+      continue
+    basename = os.path.basename(filename)
+    print(filename)
+    cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b'
+    out, returncode = GitGrep('(/|")' + cc + '"')
+    if returncode != 0 or not out:
+      unhandled.append(filename)
+      continue
+
+    matches = ValidMatches(basename, cc, out.splitlines())
+
+    if len(matches) == 0:
+      continue
+    if len(matches) > 1:
+      print('\n[WARNING] Ambiguous matching for', filename)
+      for i in enumerate(matches, 1):
+        print('%d: %s' % (i[0], i[1]))
+      print()
+      if skip_ambiguous:
+        continue
+
+      picked = raw_input('Pick the matches ("2,3" for multiple): ')
+      try:
+        matches = [matches[int(i) - 1] for i in picked.split(',')]
+      except (ValueError, IndexError):
+        continue
+
+    for match in matches:
+      gnfile, linenr, new = match
+      print('  ', gnfile, linenr, new)
+      edits.setdefault(gnfile, {})[linenr] = new
+
+  for gnfile in edits:
+    lines = open(gnfile).read().splitlines()
+    for l in sorted(edits[gnfile].keys(), reverse=True):
+      lines.insert(l, edits[gnfile][l])
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+  return unhandled
+
+
+def AddHeadersToSources(headers, skip_ambiguous=True):
+  """Add header files to the sources list in the first GN file.
+
+  The target GN file is the first one up the parent directories.
+  This usually does the wrong thing for _test files if the test and the main
+  target are in the same .gn file.
+  When skip_ambiguous is True, skip if multiple sources arrays are found.
+
+  "git cl format" afterwards is required. Manually cleaning up duplicated items
+  is likely required.
+  """
+  for filename in headers:
+    filename = filename.strip()
+    print(filename)
+    dirname = os.path.dirname(filename)
+    while not os.path.exists(os.path.join(dirname, 'BUILD.gn')):
+      dirname = os.path.dirname(dirname)
+    rel = filename[len(dirname) + 1:]
+    gnfile = os.path.join(dirname, 'BUILD.gn')
+
+    lines = open(gnfile).read().splitlines()
+    matched = [i for i, l in enumerate(lines) if ' sources = [' in l]
+    if skip_ambiguous and len(matched) > 1:
+      print('[WARNING] Multiple sources in', gnfile)
+      continue
+
+    if len(matched) < 1:
+      continue
+    print('  ', gnfile, rel)
+    index = matched[0]
+    lines.insert(index + 1, '"%s",' % rel)
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+
+def RemoveHeader(headers, skip_ambiguous=True):
+  """Remove non-existing headers in GN files.
+
+  When skip_ambiguous is True, skip if multiple matches are found.
+  """
+  edits = {}
+  unhandled = []
+  for filename in headers:
+    filename = filename.strip()
+    if not (filename.endswith('.h') or filename.endswith('.hh')):
+      continue
+    basename = os.path.basename(filename)
+    print(filename)
+    out, returncode = GitGrep('(/|")' + basename + '"')
+    if returncode != 0 or not out:
+      unhandled.append(filename)
+      print('  Not found')
+      continue
+
+    grep_lines = out.splitlines()
+    matches = []
+    for line in grep_lines:
+      gnfile, linenr, contents = line.split(':')
+      print('    ', gnfile, linenr, contents)
+      linenr = int(linenr)
+      lines = open(gnfile).read().splitlines()
+      assert contents in lines[linenr - 1]
+      matches.append((gnfile, linenr, contents))
+
+    if len(matches) == 0:
+      continue
+    if len(matches) > 1:
+      print('\n[WARNING] Ambiguous matching for', filename)
+      for i in enumerate(matches, 1):
+        print('%d: %s' % (i[0], i[1]))
+      print()
+      if skip_ambiguous:
+        continue
+
+      picked = raw_input('Pick the matches ("2,3" for multiple): ')
+      try:
+        matches = [matches[int(i) - 1] for i in picked.split(',')]
+      except (ValueError, IndexError):
+        continue
+
+    for match in matches:
+      gnfile, linenr, contents = match
+      print('  ', gnfile, linenr, contents)
+      edits.setdefault(gnfile, set()).add(linenr)
+
+  for gnfile in edits:
+    lines = open(gnfile).read().splitlines()
+    for l in sorted(edits[gnfile], reverse=True):
+      lines.pop(l - 1)
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+  return unhandled
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('input_file', help="missing or non-existing headers, "
+                      "output of check_gn_headers.py")
+  parser.add_argument('--prefix',
+                      help="only handle path name with this prefix")
+  parser.add_argument('--remove', action='store_true',
+                      help="treat input_file as non-existing headers")
+
+  args, _extras = parser.parse_known_args()
+
+  headers = open(args.input_file).readlines()
+
+  if args.prefix:
+    headers = [i for i in headers if i.startswith(args.prefix)]
+
+  if args.remove:
+    RemoveHeader(headers, False)
+  else:
+    unhandled = AddHeadersNextToCC(headers)
+    AddHeadersToSources(unhandled)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fuchsia/DIR_METADATA b/src/build/fuchsia/DIR_METADATA
new file mode 100644
index 0000000..fe8198a
--- /dev/null
+++ b/src/build/fuchsia/DIR_METADATA
@@ -0,0 +1,5 @@
+monorail {
+  component: "Fuchsia"
+}
+
+team_email: "cr-fuchsia@chromium.org"
diff --git a/src/build/fuchsia/__init__.py b/src/build/fuchsia/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/fuchsia/__init__.py
diff --git a/src/build/fuchsia/aemu_target.py b/src/build/fuchsia/aemu_target.py
new file mode 100644
index 0000000..321feb5
--- /dev/null
+++ b/src/build/fuchsia/aemu_target.py
@@ -0,0 +1,127 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running and interacting with Fuchsia on AEMU."""
+
+import emu_target
+import os
+import platform
+import qemu_target
+import logging
+
+from common import GetEmuRootForPlatform
+
+
+def GetTargetType():
+  return AemuTarget
+
+
+class AemuTarget(qemu_target.QemuTarget):
+  EMULATOR_NAME = 'aemu'
+
+  def __init__(self,
+               out_dir,
+               target_cpu,
+               system_log_file,
+               cpu_cores,
+               require_kvm,
+               ram_size_mb,
+               enable_graphics,
+               hardware_gpu,
+               fuchsia_out_dir=None):
+    super(AemuTarget,
+          self).__init__(out_dir, target_cpu, system_log_file, cpu_cores,
+                         require_kvm, ram_size_mb, fuchsia_out_dir)
+
+    # TODO(crbug.com/1000907): Enable AEMU for arm64.
+    if platform.machine() == 'aarch64':
+      raise Exception('AEMU does not support arm64 hosts.')
+    self._enable_graphics = enable_graphics
+    self._hardware_gpu = hardware_gpu
+
+  @staticmethod
+  def CreateFromArgs(args):
+    return AemuTarget(args.out_dir, args.target_cpu, args.system_log_file,
+                      args.cpu_cores, args.require_kvm, args.ram_size_mb,
+                      args.enable_graphics, args.hardware_gpu,
+                      args.fuchsia_out_dir)
+
+  @staticmethod
+  def RegisterArgs(arg_parser):
+    aemu_args = arg_parser.add_argument_group('aemu', 'AEMU arguments')
+    aemu_args.add_argument('--enable-graphics',
+                           action='store_true',
+                           default=False,
+                           help='Start AEMU with graphics instead of '\
+                                'headless.')
+    aemu_args.add_argument('--hardware-gpu',
+                           action='store_true',
+                           default=False,
+                           help='Use local GPU hardware instead of '\
+                                'Swiftshader.')
+
+  def _EnsureEmulatorExists(self, path):
+    assert os.path.exists(path), \
+          'This checkout is missing %s.' % (self.EMULATOR_NAME)
+
+  def _BuildCommand(self):
+    aemu_folder = GetEmuRootForPlatform(self.EMULATOR_NAME)
+
+    self._EnsureEmulatorExists(aemu_folder)
+    aemu_path = os.path.join(aemu_folder, 'emulator')
+
+    # `VirtioInput` is needed for touch input device support on Fuchsia.
+    # `RefCountPipe` is needed for proper cleanup of resources when a process
+    # that uses Vulkan dies inside the guest
+    aemu_features = 'VirtioInput,RefCountPipe'
+
+    # Configure the CPU to emulate.
+    # On Linux, we can enable lightweight virtualization (KVM) if the host and
+    # guest architectures are the same.
+    if self._IsKvmEnabled():
+      aemu_features += ',KVM,GLDirectMem,Vulkan'
+    else:
+      if self._target_cpu != 'arm64':
+        aemu_features += ',-GLDirectMem'
+
+    # Use Swiftshader for Vulkan if requested
+    gpu_target = 'swiftshader_indirect'
+    if self._hardware_gpu:
+      gpu_target = 'host'
+
+    aemu_command = [aemu_path]
+    if not self._enable_graphics:
+      aemu_command.append('-no-window')
+    # All args after -fuchsia flag gets passed to QEMU
+    aemu_command.extend([
+        '-feature', aemu_features, '-window-size', '1024x600', '-gpu',
+        gpu_target, '-verbose', '-fuchsia'
+    ])
+
+    aemu_command.extend(self._BuildQemuConfig())
+
+    aemu_command.extend([
+      '-vga', 'none',
+      '-device', 'isa-debug-exit,iobase=0xf4,iosize=0x04',
+      '-device', 'virtio-keyboard-pci',
+      '-device', 'virtio_input_multi_touch_pci_1',
+      '-device', 'ich9-ahci,id=ahci'])
+    logging.info(' '.join(aemu_command))
+    return aemu_command
+
+  def _GetVulkanIcdFile(self):
+    return os.path.join(GetEmuRootForPlatform(self.EMULATOR_NAME), 'lib64',
+                        'vulkan', 'vk_swiftshader_icd.json')
+
+  def _SetEnv(self):
+    env = os.environ.copy()
+    aemu_logging_env = {
+        "ANDROID_EMU_VK_NO_CLEANUP": "1",
+        "ANDROID_EMUGL_LOG_PRINT": "1",
+        "ANDROID_EMUGL_VERBOSE": "1",
+        "VK_ICD_FILENAMES": self._GetVulkanIcdFile(),
+        "VK_LOADER_DEBUG": "info,error",
+    }
+    env.update(aemu_logging_env)
+    return env
diff --git a/src/build/fuchsia/amber_repo.py b/src/build/fuchsia/amber_repo.py
new file mode 100644
index 0000000..3b057fd
--- /dev/null
+++ b/src/build/fuchsia/amber_repo.py
@@ -0,0 +1,172 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import json
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+import time
+
+from six.moves import urllib
+
+
+# Maximum amount of time to block while waiting for "pm serve" to come up.
+_PM_SERVE_LIVENESS_TIMEOUT_SECS = 10
+
+_MANAGED_REPO_NAME = 'chrome_runner'
+
+
+class AmberRepo(object):
+  """Abstract interface for a repository used to serve packages to devices."""
+
+  def __init__(self, target):
+    self._target = target
+
+  def PublishPackage(self, package_path):
+    pm_tool = common.GetHostToolPathFromPlatform('pm')
+    subprocess.check_call(
+        [pm_tool, 'publish', '-a', '-f', package_path, '-r', self.GetPath(),
+         '-vt', '-v'],
+        stderr=subprocess.STDOUT)
+
+  def GetPath(self):
+    pass
+
+
+class ManagedAmberRepo(AmberRepo):
+  """Creates and serves packages from an ephemeral repository."""
+
+  def __init__(self, target):
+    AmberRepo.__init__(self, target)
+    self._with_count = 0
+
+    self._amber_root = tempfile.mkdtemp()
+    pm_tool = common.GetHostToolPathFromPlatform('pm')
+    subprocess.check_call([pm_tool, 'newrepo', '-repo', self._amber_root])
+    logging.info('Creating and serving temporary Amber root: {}.'.format(
+        self._amber_root))
+
+    serve_port = common.GetAvailableTcpPort()
+    self._pm_serve_task = subprocess.Popen(
+        [pm_tool, 'serve', '-d', os.path.join(self._amber_root, 'repository'),
+         '-l', ':%d' % serve_port, '-q'])
+
+    # Block until "pm serve" starts serving HTTP traffic at |serve_port|.
+    timeout = time.time() + _PM_SERVE_LIVENESS_TIMEOUT_SECS
+    while True:
+      try:
+        urllib.request.urlopen('http://localhost:%d' % serve_port,
+                               timeout=1).read()
+        break
+      except urllib.error.URLError:
+        logging.info('Waiting until \'pm serve\' is up...')
+
+      if time.time() >= timeout:
+        raise Exception('Timed out while waiting for \'pm serve\'.')
+
+      time.sleep(1)
+
+    remote_port = common.ConnectPortForwardingTask(target, serve_port, 0)
+    self._RegisterAmberRepository(self._amber_root, remote_port)
+
+  def __enter__(self):
+    self._with_count += 1
+    return self
+
+  def __exit__(self, type, value, tb):
+    """Allows the repository to delete itself when it leaves the scope of a
+    'with' block."""
+    self._with_count -= 1
+    if self._with_count > 0:
+      return
+
+    logging.info('Cleaning up Amber root: ' + self._amber_root)
+    shutil.rmtree(self._amber_root)
+    self._amber_root = None
+
+    self._UnregisterAmberRepository()
+    self._pm_serve_task.kill()
+    self._pm_serve_task = None
+
+  def GetPath(self):
+    return self._amber_root
+
+  def _RegisterAmberRepository(self, tuf_repo, remote_port):
+    """Configures a device to use a local TUF repository as an installation
+    source for packages.
+    |tuf_repo|: The host filesystem path to the TUF repository.
+    |remote_port|: The reverse-forwarded port used to connect to instance of
+                   `pm serve` that is serving the contents of |tuf_repo|."""
+
+    # Extract the public signing key for inclusion in the config file.
+    root_keys = []
+    root_json_path = os.path.join(tuf_repo, 'repository', 'root.json')
+    root_json = json.load(open(root_json_path, 'r'))
+    for root_key_id in root_json['signed']['roles']['root']['keyids']:
+      root_keys.append({
+          'Type': root_json['signed']['keys'][root_key_id]['keytype'],
+          'Value': root_json['signed']['keys'][root_key_id]['keyval']['public']
+      })
+
+    # "pm serve" can automatically generate a "config.json" file at query time,
+    # but the file is unusable because it specifies URLs with port
+    # numbers that are unreachable from across the port forwarding boundary.
+    # So instead, we generate our own config file with the forwarded port
+    # numbers instead.
+    config_file = open(os.path.join(tuf_repo, 'repository', 'repo_config.json'),
+                       'w')
+    json.dump({
+        'ID': _MANAGED_REPO_NAME,
+        'RepoURL': "http://127.0.0.1:%d" % remote_port,
+        'BlobRepoURL': "http://127.0.0.1:%d/blobs" % remote_port,
+        'RatePeriod': 10,
+        'RootKeys': root_keys,
+        'StatusConfig': {
+            'Enabled': True
+        },
+        'Auto': True
+    }, config_file)
+    config_file.close()
+
+    # Register the repo.
+    return_code = self._target.RunCommand(
+        [('amberctl rm_src -n %s; ' +
+          'amberctl add_src -f http://127.0.0.1:%d/repo_config.json')
+         % (_MANAGED_REPO_NAME, remote_port)])
+    if return_code != 0:
+      raise Exception('Error code %d when running amberctl.' % return_code)
+
+
+  def _UnregisterAmberRepository(self):
+    """Unregisters the Amber repository."""
+
+    logging.debug('Unregistering Amber repository.')
+    self._target.RunCommand(['amberctl', 'rm_src', '-n', _MANAGED_REPO_NAME])
+
+    # Re-enable 'devhost' repo if it's present. This is useful for devices that
+    # were booted with 'fx serve'.
+    self._target.RunCommand(['amberctl', 'enable_src', '-n', 'devhost'],
+                            silent=True)
+
+
+class ExternalAmberRepo(AmberRepo):
+  """Publishes packages to an Amber repository located and served externally
+  (ie. located under a Fuchsia build directory and served by "fx serve"."""
+
+  def __init__(self, amber_root):
+    self._amber_root = amber_root
+    logging.info('Using existing Amber root: {}'.format(amber_root))
+    logging.info('Ensure that "fx serve" is running.')
+
+  def GetPath(self):
+    return self._amber_root
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, tb):
+    pass
diff --git a/src/build/fuchsia/binary_sizes.py b/src/build/fuchsia/binary_sizes.py
new file mode 100755
index 0000000..da874bb
--- /dev/null
+++ b/src/build/fuchsia/binary_sizes.py
@@ -0,0 +1,556 @@
+#!/usr/bin/env python2
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+'''Implements Chrome-Fuchsia package binary size checks.'''
+
+from __future__ import division
+from __future__ import print_function
+
+import argparse
+import collections
+import copy
+import json
+import logging
+import math
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+import traceback
+import uuid
+
+from common import GetHostToolPathFromPlatform, GetHostArchFromPlatform
+from common import SDK_ROOT, DIR_SOURCE_ROOT
+
+# Structure representing the compressed and uncompressed sizes for a Fuchsia
+# package.
+PackageSizes = collections.namedtuple('PackageSizes',
+                                      ['compressed', 'uncompressed'])
+
+# Structure representing a Fuchsia package blob and its compressed and
+# uncompressed sizes.
+Blob = collections.namedtuple(
+    'Blob', ['name', 'hash', 'compressed', 'uncompressed', 'is_counted'])
+
+
+def CreateSizesExternalDiagnostic(sizes_guid):
+  """Creates a histogram external sizes diagnostic."""
+
+  benchmark_diagnostic = {
+      'type': 'GenericSet',
+      'guid': str(sizes_guid),
+      'values': ['sizes'],
+  }
+
+  return benchmark_diagnostic
+
+
+def CreateSizesHistogramItem(name, size, sizes_guid):
+  """Create a performance dashboard histogram from the histogram template and
+  binary size data."""
+
+  # Chromium performance dashboard histogram containing binary size data.
+  histogram = {
+      'name': name,
+      'unit': 'sizeInBytes_smallerIsBetter',
+      'diagnostics': {
+          'benchmarks': str(sizes_guid),
+      },
+      'sampleValues': [size],
+      'running': [1, size, math.log(size), size, size, size, 0],
+      'description': 'chrome-fuchsia package binary sizes',
+      'summaryOptions': {
+          'avg': True,
+          'count': False,
+          'max': False,
+          'min': False,
+          'std': False,
+          'sum': False,
+      },
+  }
+
+  return histogram
+
+
+def CreateSizesHistogram(package_sizes):
+  """Create a performance dashboard histogram from binary size data."""
+
+  sizes_guid = uuid.uuid1()
+  histogram = [CreateSizesExternalDiagnostic(sizes_guid)]
+  for name, size in package_sizes.items():
+    histogram.append(
+        CreateSizesHistogramItem('%s_%s' % (name, 'compressed'),
+                                 size.compressed, sizes_guid))
+    histogram.append(
+        CreateSizesHistogramItem('%s_%s' % (name, 'uncompressed'),
+                                 size.uncompressed, sizes_guid))
+  return histogram
+
+
+def CreateTestResults(test_status, timestamp):
+  """Create test results data to write to JSON test results file.
+
+  The JSON data format is defined in
+  https://chromium.googlesource.com/chromium/src/+/master/docs/testing/json_test_results_format.md
+  """
+
+  results = {
+      'tests': {},
+      'interrupted': False,
+      'path_delimiter': '.',
+      'version': 3,
+      'seconds_since_epoch': timestamp,
+  }
+
+  num_failures_by_type = {result: 0 for result in ['FAIL', 'PASS', 'CRASH']}
+  for metric in test_status:
+    actual_status = test_status[metric]
+    num_failures_by_type[actual_status] += 1
+    results['tests'][metric] = {
+        'expected': 'PASS',
+        'actual': actual_status,
+    }
+  results['num_failures_by_type'] = num_failures_by_type
+
+  return results
+
+
+def GetTestStatus(package_sizes, sizes_config, test_completed):
+  """Checks package sizes against size limits.
+
+  Returns a tuple of overall test pass/fail status and a dictionary mapping size
+  limit checks to PASS/FAIL/CRASH status."""
+
+  if not test_completed:
+    test_status = {'binary_sizes': 'CRASH'}
+  else:
+    test_status = {}
+    for metric, limit in sizes_config['size_limits'].items():
+      # Strip the "_compressed" suffix from |metric| if it exists.
+      match = re.match(r'(?P<name>\w+)_compressed', metric)
+      package_name = match.group('name') if match else metric
+      if package_name not in package_sizes:
+        raise Exception('package "%s" not in sizes "%s"' %
+                        (package_name, str(package_sizes)))
+      if package_sizes[package_name].compressed <= limit:
+        test_status[metric] = 'PASS'
+      else:
+        test_status[metric] = 'FAIL'
+
+  all_tests_passed = all(status == 'PASS' for status in test_status.values())
+
+  return all_tests_passed, test_status
+
+
+def WriteSimpleTestResults(results_path, test_completed):
+  """Writes simplified test results file.
+
+  Used when test status is not available.
+  """
+
+  simple_isolated_script_output = {
+      'valid': test_completed,
+      'failures': [],
+      'version': 'simplified',
+  }
+  with open(results_path, 'w') as output_file:
+    json.dump(simple_isolated_script_output, output_file)
+
+
+def WriteTestResults(results_path, test_completed, test_status, timestamp):
+  """Writes test results file containing test PASS/FAIL/CRASH statuses."""
+
+  if test_status:
+    test_results = CreateTestResults(test_status, timestamp)
+    with open(results_path, 'w') as results_file:
+      json.dump(test_results, results_file)
+  else:
+    WriteSimpleTestResults(results_path, test_completed)
+
+
+def WriteGerritPluginSizeData(output_path, package_sizes):
+  """Writes a package size dictionary in json format for the Gerrit binary
+  sizes plugin."""
+
+  with open(output_path, 'w') as sizes_file:
+    sizes_data = {name: size.compressed for name, size in package_sizes.items()}
+    json.dump(sizes_data, sizes_file)
+
+
+def WritePackageBlobsJson(json_path, package_blobs):
+  """Writes package blob information in human-readable JSON format.
+
+  The json data is an array of objects containing these keys:
+    'path': string giving blob location in the local file system
+    'merkle': the blob's Merkle hash
+    'bytes': the number of uncompressed bytes in the blod
+    'size': the size of the compressed blob in bytes.  A multiple of the blobfs
+        block size (8192)
+    'is_counted: true if the blob counts towards the package budget, or false
+        if not (for ICU blobs or blobs distributed in the SDK)"""
+
+  formatted_blob_stats_per_package = {}
+  for package in package_blobs:
+    blob_data = []
+    for blob_name in package_blobs[package]:
+      blob = package_blobs[package][blob_name]
+      blob_data.append({
+          'path': blob.name,
+          'merkle': blob.hash,
+          'bytes': blob.uncompressed,
+          'size': blob.compressed,
+          'is_counted': blob.is_counted
+      })
+    formatted_blob_stats_per_package[package] = blob_data
+
+  with (open(json_path, 'w')) as json_file:
+    json.dump(formatted_blob_stats_per_package, json_file, indent=2)
+
+
+def GetCompressedSize(file_path):
+  """Measures file size after blobfs compression."""
+
+  compressor_path = GetHostToolPathFromPlatform('blobfs-compression')
+  try:
+    temp_dir = tempfile.mkdtemp()
+    compressed_file_path = os.path.join(temp_dir, os.path.basename(file_path))
+    compressor_cmd = [
+        compressor_path,
+        '--source_file=%s' % file_path,
+        '--compressed_file=%s' % compressed_file_path
+    ]
+    proc = subprocess.Popen(compressor_cmd,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.STDOUT)
+    proc.wait()
+    compressor_output = proc.stdout.read()
+    if proc.returncode != 0:
+      print(compressor_output, file=sys.stderr)
+      raise Exception('Error while running %s' % compressor_path)
+  finally:
+    shutil.rmtree(temp_dir)
+
+  # Match a compressed bytes total from blobfs-compression output like
+  # Wrote 360830 bytes (40% compression)
+  blobfs_compressed_bytes_re = r'Wrote\s+(?P<bytes>\d+)\s+bytes'
+
+  match = re.search(blobfs_compressed_bytes_re, compressor_output)
+  if not match:
+    print(compressor_output, file=sys.stderr)
+    raise Exception('Could not get compressed bytes for %s' % file_path)
+
+  # Round the compressed file size up to an integer number of blobfs blocks.
+  BLOBFS_BLOCK_SIZE = 8192  # Fuchsia's blobfs file system uses 8KiB blocks.
+  blob_bytes = int(match.group('bytes'))
+  return int(math.ceil(blob_bytes / BLOBFS_BLOCK_SIZE)) * BLOBFS_BLOCK_SIZE
+
+
+def ExtractFarFile(file_path, extract_dir):
+  """Extracts contents of a Fuchsia archive file to the specified directory."""
+
+  far_tool = GetHostToolPathFromPlatform('far')
+
+  if not os.path.isfile(far_tool):
+    raise Exception('Could not find FAR host tool "%s".' % far_tool)
+  if not os.path.isfile(file_path):
+    raise Exception('Could not find FAR file "%s".' % file_path)
+
+  subprocess.check_call([
+      far_tool, 'extract',
+      '--archive=%s' % file_path,
+      '--output=%s' % extract_dir
+  ])
+
+
+def GetBlobNameHashes(meta_dir):
+  """Returns mapping from Fuchsia pkgfs paths to blob hashes.  The mapping is
+  read from the extracted meta.far archive contained in an extracted package
+  archive."""
+
+  blob_name_hashes = {}
+  contents_path = os.path.join(meta_dir, 'meta', 'contents')
+  with open(contents_path) as lines:
+    for line in lines:
+      (pkgfs_path, blob_hash) = line.strip().split('=')
+      blob_name_hashes[pkgfs_path] = blob_hash
+  return blob_name_hashes
+
+
+# Compiled regular expression matching strings like *.so, *.so.1, *.so.2, ...
+SO_FILENAME_REGEXP = re.compile(r'\.so(\.\d+)?$')
+
+
+def GetSdkModules():
+  """Finds shared objects (.so) under the Fuchsia SDK arch directory in dist or
+  lib subdirectories.
+
+  Returns a set of shared objects' filenames.
+  """
+
+  # Fuchsia SDK arch directory path (contains all shared object files).
+  sdk_arch_dir = os.path.join(SDK_ROOT, 'arch')
+  # Leaf subdirectories containing shared object files.
+  sdk_so_leaf_dirs = ['dist', 'lib']
+  # Match a shared object file name.
+  sdk_so_filename_re = r'\.so(\.\d+)?$'
+
+  lib_names = set()
+  for dirpath, _, file_names in os.walk(sdk_arch_dir):
+    if os.path.basename(dirpath) in sdk_so_leaf_dirs:
+      for name in file_names:
+        if SO_FILENAME_REGEXP.search(name):
+          lib_names.add(name)
+  return lib_names
+
+
+def FarBaseName(name):
+  _, name = os.path.split(name)
+  name = re.sub(r'\.far$', '', name)
+  return name
+
+
+def GetPackageMerkleRoot(far_file_path):
+  """Returns a package's Merkle digest."""
+
+  # The digest is the first word on the first line of the merkle tool's output.
+  merkle_tool = GetHostToolPathFromPlatform('merkleroot')
+  output = subprocess.check_output([merkle_tool, far_file_path])
+  return output.splitlines()[0].split()[0]
+
+
+def GetBlobs(far_file, build_out_dir):
+  """Calculates compressed and uncompressed blob sizes for specified FAR file.
+  Marks ICU blobs and blobs from SDK libraries as not counted."""
+
+  base_name = FarBaseName(far_file)
+
+  extract_dir = tempfile.mkdtemp()
+
+  # Extract files and blobs from the specified Fuchsia archive.
+  far_file_path = os.path.join(build_out_dir, far_file)
+  far_extract_dir = os.path.join(extract_dir, base_name)
+  ExtractFarFile(far_file_path, far_extract_dir)
+
+  # Extract the meta.far archive contained in the specified Fuchsia archive.
+  meta_far_file_path = os.path.join(far_extract_dir, 'meta.far')
+  meta_far_extract_dir = os.path.join(extract_dir, '%s_meta' % base_name)
+  ExtractFarFile(meta_far_file_path, meta_far_extract_dir)
+
+  # Map Linux filesystem blob names to blob hashes.
+  blob_name_hashes = GetBlobNameHashes(meta_far_extract_dir)
+
+  # "System" files whose sizes are not charged against component size budgets.
+  # Fuchsia SDK modules and the ICU icudtl.dat file sizes are not counted.
+  system_files = GetSdkModules() | set(['icudtl.dat'])
+
+  # Add the meta.far file blob.
+  blobs = {}
+  meta_name = 'meta.far'
+  meta_hash = GetPackageMerkleRoot(meta_far_file_path)
+  compressed = GetCompressedSize(meta_far_file_path)
+  uncompressed = os.path.getsize(meta_far_file_path)
+  blobs[meta_name] = Blob(meta_name, meta_hash, compressed, uncompressed, True)
+
+  # Add package blobs.
+  for blob_name, blob_hash in blob_name_hashes.items():
+    extracted_blob_path = os.path.join(far_extract_dir, blob_hash)
+    compressed = GetCompressedSize(extracted_blob_path)
+    uncompressed = os.path.getsize(extracted_blob_path)
+    is_counted = os.path.basename(blob_name) not in system_files
+    blobs[blob_name] = Blob(blob_name, blob_hash, compressed, uncompressed,
+                            is_counted)
+
+  shutil.rmtree(extract_dir)
+
+  return blobs
+
+
+def GetPackageBlobs(far_files, build_out_dir):
+  """Returns dictionary mapping package names to blobs contained in the package.
+
+  Prints package blob size statistics."""
+
+  package_blobs = {}
+  for far_file in far_files:
+    package_name = FarBaseName(far_file)
+    if package_name in package_blobs:
+      raise Exception('Duplicate FAR file base name "%s".' % package_name)
+    package_blobs[package_name] = GetBlobs(far_file, build_out_dir)
+
+  # Print package blob sizes (does not count sharing).
+  for package_name in sorted(package_blobs.keys()):
+    print('Package blob sizes: %s' % package_name)
+    print('%-64s %12s %12s %s' %
+          ('blob hash', 'compressed', 'uncompressed', 'path'))
+    print('%s %s %s %s' % (64 * '-', 12 * '-', 12 * '-', 20 * '-'))
+    for blob_name in sorted(package_blobs[package_name].keys()):
+      blob = package_blobs[package_name][blob_name]
+      if blob.is_counted:
+        print('%64s %12d %12d %s' %
+              (blob.hash, blob.compressed, blob.uncompressed, blob.name))
+
+  return package_blobs
+
+
+def GetPackageSizes(package_blobs):
+  """Calculates compressed and uncompressed package sizes from blob sizes."""
+
+  # TODO(crbug.com/1126177): Use partial sizes for blobs shared by
+  # non Chrome-Fuchsia packages.
+
+  # Count number of packages sharing blobs (a count of 1 is not shared).
+  blob_counts = collections.defaultdict(int)
+  for package_name in package_blobs:
+    for blob_name in package_blobs[package_name]:
+      blob_counts[blob_name] += 1
+
+  # Package sizes are the sum of blob sizes divided by their share counts.
+  package_sizes = {}
+  for package_name in package_blobs:
+    compressed_total = 0
+    uncompressed_total = 0
+    for blob_name in package_blobs[package_name]:
+      blob = package_blobs[package_name][blob_name]
+      if blob.is_counted:
+        count = blob_counts[blob_name]
+        compressed_total += blob.compressed // count
+        uncompressed_total += blob.uncompressed // count
+    package_sizes[package_name] = PackageSizes(compressed_total,
+                                               uncompressed_total)
+
+  return package_sizes
+
+
+def GetBinarySizesAndBlobs(args, sizes_config):
+  """Get binary size data and contained blobs for packages specified in args.
+
+  If "total_size_name" is set, then computes a synthetic package size which is
+  the aggregated sizes across all packages."""
+
+  # Calculate compressed and uncompressed package sizes.
+  package_blobs = GetPackageBlobs(sizes_config['far_files'], args.build_out_dir)
+  package_sizes = GetPackageSizes(package_blobs)
+
+  # Optionally calculate total compressed and uncompressed package sizes.
+  if 'far_total_name' in sizes_config:
+    compressed = sum([a.compressed for a in package_sizes.values()])
+    uncompressed = sum([a.uncompressed for a in package_sizes.values()])
+    package_sizes[sizes_config['far_total_name']] = PackageSizes(
+        compressed, uncompressed)
+
+  for name, size in package_sizes.items():
+    print('%s: compressed size %d, uncompressed size %d' %
+          (name, size.compressed, size.uncompressed))
+
+  return package_sizes, package_blobs
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--build-out-dir',
+      '--output-directory',
+      type=os.path.realpath,
+      required=True,
+      help='Location of the build artifacts.',
+  )
+  parser.add_argument(
+      '--isolated-script-test-output',
+      type=os.path.realpath,
+      help='File to which simplified JSON results will be written.')
+  parser.add_argument(
+      '--size-plugin-json-path',
+      help='Optional path for json size data for the Gerrit binary size plugin',
+  )
+  parser.add_argument(
+      '--sizes-path',
+      default=os.path.join('fuchsia', 'release', 'size_tests',
+                           'fyi_sizes.json'),
+      help='path to package size limits json file.  The path is relative to '
+      'the workspace src directory')
+  parser.add_argument('--verbose',
+                      '-v',
+                      action='store_true',
+                      help='Enable verbose output')
+  # Accepted to conform to the isolated script interface, but ignored.
+  parser.add_argument('--isolated-script-test-filter', help=argparse.SUPPRESS)
+  parser.add_argument('--isolated-script-test-perf-output',
+                      help=argparse.SUPPRESS)
+  args = parser.parse_args()
+
+  if args.verbose:
+    print('Fuchsia binary sizes')
+    print('Working directory', os.getcwd())
+    print('Args:')
+    for var in vars(args):
+      print('  {}: {}'.format(var, getattr(args, var) or ''))
+
+  if not os.path.isdir(args.build_out_dir):
+    raise Exception('Could not find build output directory "%s".' %
+                    args.build_out_dir)
+
+  with open(os.path.join(DIR_SOURCE_ROOT, args.sizes_path)) as sizes_file:
+    sizes_config = json.load(sizes_file)
+
+  if args.verbose:
+    print('Sizes Config:')
+    print(json.dumps(sizes_config))
+
+  for far_rel_path in sizes_config['far_files']:
+    far_abs_path = os.path.join(args.build_out_dir, far_rel_path)
+    if not os.path.isfile(far_abs_path):
+      raise Exception('Could not find FAR file "%s".' % far_abs_path)
+
+  test_name = 'sizes'
+  timestamp = time.time()
+  test_completed = False
+  all_tests_passed = False
+  test_status = {}
+  package_sizes = {}
+  package_blobs = {}
+  sizes_histogram = []
+
+  results_directory = None
+  if args.isolated_script_test_output:
+    results_directory = os.path.join(
+        os.path.dirname(args.isolated_script_test_output), test_name)
+    if not os.path.exists(results_directory):
+      os.makedirs(results_directory)
+
+  try:
+    package_sizes, package_blobs = GetBinarySizesAndBlobs(args, sizes_config)
+    sizes_histogram = CreateSizesHistogram(package_sizes)
+    test_completed = True
+  except:
+    _, value, trace = sys.exc_info()
+    traceback.print_tb(trace)
+    print(str(value))
+  finally:
+    all_tests_passed, test_status = GetTestStatus(package_sizes, sizes_config,
+                                                  test_completed)
+
+    if results_directory:
+      WriteTestResults(os.path.join(results_directory, 'test_results.json'),
+                       test_completed, test_status, timestamp)
+      with open(os.path.join(results_directory, 'perf_results.json'), 'w') as f:
+        json.dump(sizes_histogram, f)
+      WritePackageBlobsJson(
+          os.path.join(results_directory, 'package_blobs.json'), package_blobs)
+
+    if args.isolated_script_test_output:
+      WriteTestResults(args.isolated_script_test_output, test_completed,
+                       test_status, timestamp)
+
+    if args.size_plugin_json_path:
+      WriteGerritPluginSizeData(args.size_plugin_json_path, package_sizes)
+
+    return 0 if all_tests_passed else 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fuchsia/binary_sizes_test.py b/src/build/fuchsia/binary_sizes_test.py
new file mode 100755
index 0000000..962e4c9
--- /dev/null
+++ b/src/build/fuchsia/binary_sizes_test.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import math
+import os
+import shutil
+import subprocess
+import tempfile
+import time
+import unittest
+
+import binary_sizes
+
+from common import DIR_SOURCE_ROOT
+
+
+class TestBinarySizes(unittest.TestCase):
+  tmpdir = None
+
+  @classmethod
+  def setUpClass(cls):
+    cls.tmpdir = tempfile.mkdtemp()
+
+  @classmethod
+  def tearDownClass(cls):
+    shutil.rmtree(cls.tmpdir)
+
+  # TODO(crbug.com/1145648): Add tests covering FAR file input and histogram
+  # output.
+
+  def testCommitFromBuildProperty(self):
+    commit_position = binary_sizes.CommitPositionFromBuildProperty(
+        'refs/heads/master@{#819458}')
+    self.assertEqual(commit_position, 819458)
+
+  def testCompressedSize(self):
+    """Verifies that the compressed file size can be extracted from the
+    blobfs-compression output."""
+
+    uncompressed_file = tempfile.NamedTemporaryFile(delete=False)
+    for line in range(200):
+      uncompressed_file.write(
+          'Lorem ipsum dolor sit amet, consectetur adipiscing elit. '
+          'Sed eleifend')
+    uncompressed_file.close()
+    compressed_path = uncompressed_file.name + '.compressed'
+    compressor_path = os.path.join(DIR_SOURCE_ROOT, 'third_party',
+                                   'fuchsia-sdk', 'sdk', 'tools', 'x64',
+                                   'blobfs-compression')
+    subprocess.call([compressor_path, uncompressed_file.name, compressed_path])
+    self.assertEqual(binary_sizes.CompressedSize(uncompressed_file.name),
+                     os.path.getsize(compressed_path))
+    os.remove(uncompressed_file.name)
+    os.remove(compressed_path)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/fuchsia/boot_data.py b/src/build/fuchsia/boot_data.py
new file mode 100644
index 0000000..bac60ec
--- /dev/null
+++ b/src/build/fuchsia/boot_data.py
@@ -0,0 +1,114 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions used to provision Fuchsia boot images."""
+
+import common
+import logging
+import os
+import subprocess
+import tempfile
+import time
+import uuid
+
+_SSH_CONFIG_TEMPLATE = """
+Host *
+  CheckHostIP no
+  StrictHostKeyChecking no
+  ForwardAgent no
+  ForwardX11 no
+  UserKnownHostsFile {known_hosts}
+  User fuchsia
+  IdentitiesOnly yes
+  IdentityFile {identity}
+  ServerAliveInterval 2
+  ServerAliveCountMax 5
+  ControlMaster auto
+  ControlPersist 1m
+  ControlPath /tmp/ssh-%r@%h:%p
+  ConnectTimeout 5
+  """
+
+# Specifies boot files intended for use by an emulator.
+TARGET_TYPE_QEMU = 'qemu'
+
+# Specifies boot files intended for use by anything (incl. physical devices).
+TARGET_TYPE_GENERIC = 'generic'
+
+def _GetPubKeyPath(output_dir):
+  """Returns a path to the generated SSH public key."""
+
+  return os.path.join(output_dir, 'id_ed25519.pub')
+
+
+def ProvisionSSH(output_dir):
+  """Generates a keypair and config file for SSH."""
+
+  host_key_path = os.path.join(output_dir, 'ssh_key')
+  host_pubkey_path = host_key_path + '.pub'
+  id_key_path = os.path.join(output_dir, 'id_ed25519')
+  id_pubkey_path = _GetPubKeyPath(output_dir)
+  known_hosts_path = os.path.join(output_dir, 'known_hosts')
+  ssh_config_path = os.path.join(output_dir, 'ssh_config')
+
+  logging.debug('Generating SSH credentials.')
+  if not os.path.isfile(host_key_path):
+    subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-h', '-f',
+                           host_key_path, '-P', '', '-N', ''],
+                          stdout=open(os.devnull))
+  if not os.path.isfile(id_key_path):
+    subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', id_key_path,
+                           '-P', '', '-N', ''], stdout=open(os.devnull))
+
+  with open(ssh_config_path, "w") as ssh_config:
+    ssh_config.write(
+        _SSH_CONFIG_TEMPLATE.format(identity=id_key_path,
+                                    known_hosts=known_hosts_path))
+
+  if os.path.exists(known_hosts_path):
+    os.remove(known_hosts_path)
+
+
+def GetTargetFile(filename, target_arch, target_type):
+  """Computes a path to |filename| in the Fuchsia boot image directory specific
+  to |target_type| and |target_arch|."""
+
+  assert target_type == TARGET_TYPE_QEMU or target_type == TARGET_TYPE_GENERIC
+
+  return os.path.join(common.IMAGES_ROOT, target_arch, target_type, filename)
+
+
+def GetSSHConfigPath(output_dir):
+  return output_dir + '/ssh_config'
+
+
+def GetBootImage(output_dir, target_arch, target_type):
+  """"Gets a path to the Zircon boot image, with the SSH client public key
+  added."""
+
+  ProvisionSSH(output_dir)
+  pubkey_path = _GetPubKeyPath(output_dir)
+  zbi_tool = common.GetHostToolPathFromPlatform('zbi')
+  image_source_path = GetTargetFile('zircon-a.zbi', target_arch, target_type)
+  image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi')
+
+  cmd = [ zbi_tool, '-o', image_dest_path, image_source_path,
+          '-e', 'data/ssh/authorized_keys=' + pubkey_path ]
+  subprocess.check_call(cmd)
+
+  return image_dest_path
+
+
+def GetKernelArgs(output_dir):
+  return ['devmgr.epoch=%d' % time.time()]
+
+
+def AssertBootImagesExist(arch, platform):
+  assert os.path.exists(GetTargetFile('zircon-a.zbi', arch, platform)), \
+      'This checkout is missing the files necessary for\n' \
+      'booting this configuration of Fuchsia.\n' \
+      'To check out the files, add this entry to the "custom_vars"\n' \
+      'section of your .gclient file:\n\n' \
+      '    "checkout_fuchsia_boot_images": "%s.%s"\n\n' % \
+           (platform, arch)
diff --git a/src/build/fuchsia/common.py b/src/build/fuchsia/common.py
new file mode 100644
index 0000000..99ced81
--- /dev/null
+++ b/src/build/fuchsia/common.py
@@ -0,0 +1,140 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import platform
+import signal
+import socket
+import subprocess
+import sys
+import time
+import threading
+
+DIR_SOURCE_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+IMAGES_ROOT = os.path.join(
+    DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'images')
+SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'sdk')
+
+def EnsurePathExists(path):
+  """Checks that the file |path| exists on the filesystem and returns the path
+  if it does, raising an exception otherwise."""
+
+  if not os.path.exists(path):
+    raise IOError('Missing file: ' + path)
+
+  return path
+
+def GetHostOsFromPlatform():
+  host_platform = sys.platform
+  if host_platform.startswith('linux'):
+    return 'linux'
+  elif host_platform.startswith('darwin'):
+    return 'mac'
+  raise Exception('Unsupported host platform: %s' % host_platform)
+
+def GetHostArchFromPlatform():
+  host_arch = platform.machine()
+  if host_arch == 'x86_64':
+    return 'x64'
+  elif host_arch == 'aarch64':
+    return 'arm64'
+  raise Exception('Unsupported host architecture: %s' % host_arch)
+
+def GetHostToolPathFromPlatform(tool):
+  host_arch = platform.machine()
+  return os.path.join(SDK_ROOT, 'tools', GetHostArchFromPlatform(), tool)
+
+
+def GetEmuRootForPlatform(emulator):
+  return os.path.join(
+      DIR_SOURCE_ROOT, 'third_party', '{0}-{1}-{2}'.format(
+          emulator, GetHostOsFromPlatform(), GetHostArchFromPlatform()))
+
+
+def ConnectPortForwardingTask(target, local_port, remote_port = 0):
+  """Establishes a port forwarding SSH task to a localhost TCP endpoint hosted
+  at port |local_port|. Blocks until port forwarding is established.
+
+  Returns the remote port number."""
+
+  forwarding_flags = ['-O', 'forward',  # Send SSH mux control signal.
+                      '-R', '%d:localhost:%d' % (remote_port, local_port),
+                      '-v',   # Get forwarded port info from stderr.
+                      '-NT']  # Don't execute command; don't allocate terminal.
+
+  if remote_port != 0:
+    # Forward to a known remote port.
+    task = target.RunCommand([], ssh_args=forwarding_flags)
+    if task.returncode != 0:
+      raise Exception('Could not establish a port forwarding connection.')
+    return
+
+  task = target.RunCommandPiped([],
+                                ssh_args=forwarding_flags,
+                                stdout=subprocess.PIPE,
+                                stderr=open('/dev/null'))
+  output = task.stdout.readlines()
+  task.wait()
+  if task.returncode != 0:
+    raise Exception('Got an error code when requesting port forwarding: %d' %
+                    task.returncode)
+
+  parsed_port = int(output[0].strip())
+  logging.debug('Port forwarding established (local=%d, device=%d)' %
+                (local_port, parsed_port))
+  return parsed_port
+
+
+def GetAvailableTcpPort():
+  """Finds a (probably) open port by opening and closing a listen socket."""
+  sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+  sock.bind(("", 0))
+  port = sock.getsockname()[1]
+  sock.close()
+  return port
+
+
+def SubprocessCallWithTimeout(command, silent=False, timeout_secs=None):
+  """Helper function for running a command.
+
+  Args:
+    command: The command to run.
+    silent: If true, stdout and stderr of the command will not be printed.
+    timeout_secs: Maximum amount of time allowed for the command to finish.
+
+  Returns:
+    A tuple of (return code, stdout, stderr) of the command. Raises
+    an exception if the subprocess times out.
+  """
+
+  if silent:
+    devnull = open(os.devnull, 'w')
+    process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
+  else:
+    process = subprocess.Popen(command,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE)
+  timeout_timer = None
+  if timeout_secs:
+
+    def interrupt_process():
+      process.send_signal(signal.SIGKILL)
+
+    timeout_timer = threading.Timer(timeout_secs, interrupt_process)
+
+    # Ensure that keyboard interrupts are handled properly (crbug/1198113).
+    timeout_timer.daemon = True
+
+    timeout_timer.start()
+
+  out, err = process.communicate()
+  if timeout_timer:
+    timeout_timer.cancel()
+
+  if process.returncode == -9:
+    raise Exception('Timeout when executing \"%s\".' % ' '.join(command))
+
+  return process.returncode, out, err
diff --git a/src/build/fuchsia/common_args.py b/src/build/fuchsia/common_args.py
new file mode 100644
index 0000000..877beaa
--- /dev/null
+++ b/src/build/fuchsia/common_args.py
@@ -0,0 +1,173 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import importlib
+import logging
+import os
+import sys
+
+from common import GetHostArchFromPlatform
+
+BUILTIN_TARGET_NAMES = ['aemu', 'qemu', 'device']
+
+
+def _AddTargetSpecificationArgs(arg_parser):
+  """Returns a parser that handles the target type used for the test run."""
+
+  device_args = arg_parser.add_argument_group(
+      'target',
+      'Arguments specifying the Fuchsia target type. To see a list of '
+      'arguments available for a specific target type, specify the desired '
+      'target to use and add the --help flag.')
+  device_args.add_argument('--target-cpu',
+                           default=GetHostArchFromPlatform(),
+                           help='GN target_cpu setting for the build. Defaults '
+                           'to the same architecture as host cpu.')
+  device_args.add_argument('--device',
+                           default=None,
+                           choices=BUILTIN_TARGET_NAMES + ['custom'],
+                           help='Choose to run on aemu|qemu|device. '
+                           'By default, Fuchsia will run on AEMU on x64 '
+                           'hosts and QEMU on arm64 hosts. Alternatively, '
+                           'setting to custom will require specifying the '
+                           'subclass of Target class used via the '
+                           '--custom-device-target flag.')
+  device_args.add_argument('-d',
+                           action='store_const',
+                           dest='device',
+                           const='device',
+                           help='Run on device instead of emulator.')
+  device_args.add_argument('--custom-device-target',
+                           default=None,
+                           help='Specify path to file that contains the '
+                           'subclass of Target that will be used. Only '
+                           'needed if device specific operations such as '
+                           'paving is required.')
+
+
+def _GetPathToBuiltinTarget(target_name):
+  return '%s_target' % target_name
+
+
+def _LoadTargetClass(target_path):
+  try:
+    loaded_target = importlib.import_module(target_path)
+  except ImportError:
+    logging.error(
+        'Cannot import from %s. Make sure that --custom-device-target '
+        'is pointing to a file containing a target '
+        'module.' % target_path)
+    raise
+  return loaded_target.GetTargetType()
+
+
+def AddCommonArgs(arg_parser):
+  """Adds command line arguments to |arg_parser| for options which are shared
+  across test and executable target types.
+
+  Args:
+    arg_parser: an ArgumentParser object."""
+
+  common_args = arg_parser.add_argument_group('common', 'Common arguments')
+  common_args.add_argument('--runner-logs-dir',
+                           help='Directory to write test runner logs to.')
+  common_args.add_argument('--exclude-system-logs',
+                           action='store_false',
+                           dest='include_system_logs',
+                           help='Do not show system log data.')
+  common_args.add_argument('--verbose',
+                           '-v',
+                           default=False,
+                           action='store_true',
+                           help='Enable debug-level logging.')
+  common_args.add_argument(
+      '--out-dir',
+      type=os.path.realpath,
+      help=('Path to the directory in which build files are located. '
+            'Defaults to current directory.'))
+  common_args.add_argument('--system-log-file',
+                           help='File to write system logs to. Specify '
+                           '\'-\' to log to stdout.')
+  common_args.add_argument('--fuchsia-out-dir',
+                           help='Path to a Fuchsia build output directory. '
+                           'Setting the GN arg '
+                           '"default_fuchsia_build_dir_for_installation" '
+                           'will cause it to be passed here.')
+
+  package_args = arg_parser.add_argument_group('package', 'Fuchsia Packages')
+  package_args.add_argument(
+      '--package',
+      action='append',
+      help='Paths of the packages to install, including '
+      'all dependencies.')
+  package_args.add_argument(
+      '--package-name',
+      help='Name of the package to execute, defined in ' + 'package metadata.')
+
+  emu_args = arg_parser.add_argument_group('emu', 'General emulator arguments')
+  emu_args.add_argument('--cpu-cores',
+                        type=int,
+                        default=4,
+                        help='Sets the number of CPU cores to provide.')
+  emu_args.add_argument('--ram-size-mb',
+                        type=int,
+                        default=2048,
+                        help='Sets the emulated RAM size (MB).'),
+  emu_args.add_argument('--allow-no-kvm',
+                        action='store_false',
+                        dest='require_kvm',
+                        default=True,
+                        help='Do not require KVM acceleration for '
+                        'emulators.')
+
+
+# Register the arguments for all known target types and the optional custom
+# target type (specified on the commandline).
+def AddTargetSpecificArgs(arg_parser):
+  # Parse the minimal set of arguments to determine if custom targets need to
+  # be loaded so that their arguments can be registered.
+  target_spec_parser = argparse.ArgumentParser(add_help=False)
+  _AddTargetSpecificationArgs(target_spec_parser)
+  target_spec_args, _ = target_spec_parser.parse_known_args()
+  _AddTargetSpecificationArgs(arg_parser)
+
+  for target in BUILTIN_TARGET_NAMES:
+    _LoadTargetClass(_GetPathToBuiltinTarget(target)).RegisterArgs(arg_parser)
+  if target_spec_args.custom_device_target:
+    _LoadTargetClass(
+        target_spec_args.custom_device_target).RegisterArgs(arg_parser)
+
+
+def ConfigureLogging(args):
+  """Configures the logging level based on command line |args|."""
+
+  logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO),
+                      format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
+
+  # The test server spawner is too noisy with INFO level logging, so tweak
+  # its verbosity a bit by adjusting its logging level.
+  logging.getLogger('chrome_test_server_spawner').setLevel(
+      logging.DEBUG if args.verbose else logging.WARN)
+
+  # Verbose SCP output can be useful at times but oftentimes is just too noisy.
+  # Only enable it if -vv is passed.
+  logging.getLogger('ssh').setLevel(
+      logging.DEBUG if args.verbose else logging.WARN)
+
+
+def GetDeploymentTargetForArgs(args):
+  """Constructs a deployment target object using command line arguments.
+     If needed, an additional_args dict can be used to supplement the
+     command line arguments."""
+
+  if args.device == 'custom':
+    return _LoadTargetClass(args.custom_device_target).CreateFromArgs(args)
+
+  if args.device:
+    device = args.device
+  else:
+    device = 'aemu' if args.target_cpu == 'x64' else 'qemu'
+
+  return _LoadTargetClass(_GetPathToBuiltinTarget(device)).CreateFromArgs(args)
diff --git a/src/build/fuchsia/deploy_to_amber_repo.py b/src/build/fuchsia/deploy_to_amber_repo.py
new file mode 100755
index 0000000..80ac2fe
--- /dev/null
+++ b/src/build/fuchsia/deploy_to_amber_repo.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Deploys Fuchsia packages to an Amber repository in a Fuchsia
+build output directory."""
+
+import amber_repo
+import argparse
+import os
+import sys
+
+
+# Populates the GDB-standard symbol directory structure |build_ids_path| with
+# the files and build IDs specified in |ids_txt_path|.
+def InstallSymbols(ids_txt_path, build_ids_path):
+  for entry in open(ids_txt_path, 'r'):
+    build_id, binary_relpath = entry.strip().split(' ')
+    binary_abspath = os.path.abspath(os.path.join(os.path.dirname(ids_txt_path),
+                                                  binary_relpath))
+    symbol_dir = os.path.join(build_ids_path, build_id[:2])
+    symbol_file = os.path.join(symbol_dir, build_id[2:] + '.debug')
+
+    if not os.path.exists(symbol_dir):
+      os.makedirs(symbol_dir)
+
+    if os.path.islink(symbol_file) or os.path.exists(symbol_file):
+      # Clobber the existing entry to ensure that the symlink's target is
+      # up to date.
+      os.unlink(symbol_file)
+
+    os.symlink(os.path.relpath(binary_abspath, symbol_dir), symbol_file)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--package', action='append', required=True,
+                      help='Paths to packages to install.')
+  parser.add_argument('--fuchsia-out-dir',
+                      required=True,
+                      help='Path to a Fuchsia build output directory. '
+                      'Setting the GN arg '
+                      '"default_fuchsia_build_dir_for_installation" '
+                      'will cause it to be passed here.')
+  args = parser.parse_args()
+  assert args.package
+
+  fuchsia_out_dir = os.path.expanduser(args.fuchsia_out_dir)
+  repo = amber_repo.ExternalAmberRepo(
+      os.path.join(fuchsia_out_dir, 'amber-files'))
+  print('Installing packages and symbols in Amber repo %s...' % repo.GetPath())
+
+  for package in args.package:
+    repo.PublishPackage(package)
+    InstallSymbols(os.path.join(os.path.dirname(package), 'ids.txt'),
+                   os.path.join(fuchsia_out_dir, '.build-id'))
+
+  print('Installation success.')
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fuchsia/device_target.py b/src/build/fuchsia/device_target.py
new file mode 100644
index 0000000..cb0fe50
--- /dev/null
+++ b/src/build/fuchsia/device_target.py
@@ -0,0 +1,280 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running and interacting with Fuchsia on devices."""
+
+from __future__ import print_function
+
+import amber_repo
+import boot_data
+import filecmp
+import logging
+import os
+import re
+import subprocess
+import sys
+import target
+import tempfile
+import time
+import uuid
+
+from common import SDK_ROOT, EnsurePathExists, GetHostToolPathFromPlatform
+
+# The maximum times to attempt mDNS resolution when connecting to a freshly
+# booted Fuchsia instance before aborting.
+BOOT_DISCOVERY_ATTEMPTS = 30
+
+# Number of failed connection attempts before redirecting system logs to stdout.
+CONNECT_RETRY_COUNT_BEFORE_LOGGING = 10
+
+# Number of seconds to wait for device discovery.
+BOOT_DISCOVERY_TIMEOUT_SECS = 2 * 60
+
+# The timeout limit for one call to the device-finder tool.
+_DEVICE_FINDER_TIMEOUT_LIMIT_SECS = \
+    BOOT_DISCOVERY_TIMEOUT_SECS / BOOT_DISCOVERY_ATTEMPTS
+
+# Time between a reboot command is issued and when connection attempts from the
+# host begin.
+_REBOOT_SLEEP_PERIOD = 20
+
+
+def GetTargetType():
+  return DeviceTarget
+
+
+class DeviceTarget(target.Target):
+  """Prepares a device to be used as a deployment target. Depending on the
+  command line parameters, it automatically handling a number of preparatory
+  steps relating to address resolution.
+
+  If |_node_name| is unset:
+    If there is one running device, use it for deployment and execution.
+
+    If there are more than one running devices, then abort and instruct the
+    user to re-run the command with |_node_name|
+
+  If |_node_name| is set:
+    If there is a running device with a matching nodename, then it is used
+    for deployment and execution.
+
+  If |_host| is set:
+    Deploy to a device at the host IP address as-is."""
+
+  def __init__(self,
+               out_dir,
+               target_cpu,
+               host=None,
+               node_name=None,
+               port=None,
+               ssh_config=None,
+               fuchsia_out_dir=None,
+               os_check='update',
+               system_log_file=None):
+    """out_dir: The directory which will contain the files that are
+                   generated to support the deployment.
+    target_cpu: The CPU architecture of the deployment target. Can be
+                "x64" or "arm64".
+    host: The address of the deployment target device.
+    node_name: The node name of the deployment target device.
+    port: The port of the SSH service on the deployment target device.
+    ssh_config: The path to SSH configuration data.
+    fuchsia_out_dir: The path to a Fuchsia build output directory, for
+                     deployments to devices paved with local Fuchsia builds.
+    os_check: If 'check', the target's SDK version must match.
+              If 'update', the target will be repaved if the SDK versions
+                  mismatch.
+              If 'ignore', the target's SDK version is ignored."""
+
+    super(DeviceTarget, self).__init__(out_dir, target_cpu)
+
+    self._system_log_file = system_log_file
+    self._host = host
+    self._port = port
+    self._fuchsia_out_dir = None
+    self._node_name = node_name
+    self._os_check = os_check
+    self._amber_repo = None
+
+    if self._host and self._node_name:
+      raise Exception('Only one of "--host" or "--name" can be specified.')
+
+    if fuchsia_out_dir:
+      if ssh_config:
+        raise Exception('Only one of "--fuchsia-out-dir" or "--ssh_config" can '
+                        'be specified.')
+
+      self._fuchsia_out_dir = os.path.expanduser(fuchsia_out_dir)
+      # Use SSH keys from the Fuchsia output directory.
+      self._ssh_config_path = os.path.join(self._fuchsia_out_dir, 'ssh-keys',
+                                           'ssh_config')
+      self._os_check = 'ignore'
+
+    elif ssh_config:
+      # Use the SSH config provided via the commandline.
+      self._ssh_config_path = os.path.expanduser(ssh_config)
+
+    else:
+      # Default to using an automatically generated SSH config and keys.
+      boot_data.ProvisionSSH(out_dir)
+      self._ssh_config_path = boot_data.GetSSHConfigPath(out_dir)
+
+  @staticmethod
+  def CreateFromArgs(args):
+    return DeviceTarget(args.out_dir, args.target_cpu, args.host,
+                        args.node_name, args.port, args.ssh_config,
+                        args.fuchsia_out_dir, args.os_check,
+                        args.system_log_file)
+
+  @staticmethod
+  def RegisterArgs(arg_parser):
+    device_args = arg_parser.add_argument_group(
+        'device', 'External device deployment arguments')
+    device_args.add_argument('--host',
+                             help='The IP of the target device. Optional.')
+    device_args.add_argument('--node-name',
+                             help='The node-name of the device to boot or '
+                             'deploy to. Optional, will use the first '
+                             'discovered device if omitted.')
+    device_args.add_argument('--port',
+                             '-p',
+                             type=int,
+                             default=None,
+                             help='The port of the SSH service running on the '
+                             'device. Optional.')
+    device_args.add_argument('--ssh-config',
+                             '-F',
+                             help='The path to the SSH configuration used for '
+                             'connecting to the target device.')
+    device_args.add_argument(
+        '--os-check',
+        choices=['check', 'update', 'ignore'],
+        default='update',
+        help="Sets the OS version enforcement policy. If 'check', then the "
+        "deployment process will halt if the target\'s version doesn\'t "
+        "match. If 'update', then the target device will automatically "
+        "be repaved. If 'ignore', then the OS version won\'t be checked.")
+
+  def _ProvisionDeviceIfNecessary(self):
+    if self._Discover():
+      self._WaitUntilReady()
+    else:
+      raise Exception('Could not find device. If the device is connected '
+                      'to the host remotely, make sure that --host flag is '
+                      'set and that remote serving is set up.')
+
+  def _Discover(self):
+    """Queries mDNS for the IP address of a booted Fuchsia instance whose name
+    matches |_node_name| on the local area network. If |_node_name| isn't
+    specified, and there is only one device on the network, then returns the
+    IP address of that advice.
+
+    Sets |_host_name| and returns True if the device was found,
+    or waits up to |timeout| seconds and returns False if the device couldn't
+    be found."""
+
+    dev_finder_path = GetHostToolPathFromPlatform('device-finder')
+
+    if self._node_name:
+      command = [
+          dev_finder_path,
+          'resolve',
+          '-timeout',
+          "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS,
+          '-device-limit',
+          '1',  # Exit early as soon as a host is found.
+          self._node_name
+      ]
+    else:
+      command = [
+          dev_finder_path, 'list', '-full', '-timeout',
+          "%ds" % _DEVICE_FINDER_TIMEOUT_LIMIT_SECS
+      ]
+
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=open(os.devnull, 'w'))
+
+    output = set(proc.communicate()[0].strip().split('\n'))
+    if proc.returncode != 0:
+      return False
+
+    if self._node_name:
+      # Handle the result of "device-finder resolve".
+      self._host = output.pop().strip()
+
+    else:
+      name_host_pairs = [x.strip().split(' ') for x in output]
+
+      # Handle the output of "device-finder list".
+      if len(name_host_pairs) > 1:
+        print('More than one device was discovered on the network.')
+        print('Use --node-name <name> to specify the device to use.')
+        print('\nList of devices:')
+        for pair in name_host_pairs:
+          print('  ' + pair[1])
+        print()
+        raise Exception('Ambiguous target device specification.')
+
+      assert len(name_host_pairs) == 1
+      self._host, self._node_name = name_host_pairs[0]
+
+    logging.info('Found device "%s" at address %s.' % (self._node_name,
+                                                       self._host))
+
+    return True
+
+  def Start(self):
+    if self._host:
+      self._WaitUntilReady()
+    else:
+      self._ProvisionDeviceIfNecessary()
+
+  def GetAmberRepo(self):
+    if not self._amber_repo:
+      if self._fuchsia_out_dir:
+        # Deploy to an already-booted device running a local Fuchsia build.
+        self._amber_repo = amber_repo.ExternalAmberRepo(
+            os.path.join(self._fuchsia_out_dir, 'amber-files'))
+      else:
+        # Create an ephemeral Amber repo, then start both "pm serve" as well as
+        # the bootserver.
+        self._amber_repo = amber_repo.ManagedAmberRepo(self)
+
+    return self._amber_repo
+
+  def _ParseNodename(self, output):
+    # Parse the nodename from bootserver stdout.
+    m = re.search(r'.*Proceeding with nodename (?P<nodename>.*)$', output,
+                  re.MULTILINE)
+    if not m:
+      raise Exception('Couldn\'t parse nodename from bootserver output.')
+    self._node_name = m.groupdict()['nodename']
+    logging.info('Booted device "%s".' % self._node_name)
+
+    # Repeatdly query mDNS until we find the device, or we hit the timeout of
+    # DISCOVERY_TIMEOUT_SECS.
+    logging.info('Waiting for device to join network.')
+    for _ in xrange(BOOT_DISCOVERY_ATTEMPTS):
+      if self._Discover():
+        break
+
+    if not self._host:
+      raise Exception('Device %s couldn\'t be discovered via mDNS.' %
+                      self._node_name)
+
+    self._WaitUntilReady();
+
+  def _GetEndpoint(self):
+    return (self._host, self._port)
+
+  def _GetSshConfigPath(self):
+    return self._ssh_config_path
+
+  def Restart(self):
+    """Restart the device."""
+
+    self.RunCommandPiped('dm reboot')
+    time.sleep(_REBOOT_SLEEP_PERIOD)
+    self.Start()
diff --git a/src/build/fuchsia/emu_target.py b/src/build/fuchsia/emu_target.py
new file mode 100644
index 0000000..4f8ddbb
--- /dev/null
+++ b/src/build/fuchsia/emu_target.py
@@ -0,0 +1,142 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running/interacting with Fuchsia on an emulator."""
+
+import amber_repo
+import boot_data
+import logging
+import os
+import runner_logs
+import subprocess
+import sys
+import target
+import tempfile
+
+
+class EmuTarget(target.Target):
+  def __init__(self, out_dir, target_cpu, system_log_file, fuchsia_out_dir):
+    """out_dir: The directory which will contain the files that are
+                   generated to support the emulator deployment.
+    target_cpu: The emulated target CPU architecture.
+                Can be 'x64' or 'arm64'."""
+
+    # fuchsia_out_dir is unused by emulator targets.
+    del fuchsia_out_dir
+
+    super(EmuTarget, self).__init__(out_dir, target_cpu)
+    self._emu_process = None
+    self._system_log_file = system_log_file
+    self._amber_repo = None
+
+  def __enter__(self):
+    return self
+
+  def _BuildCommand(self):
+    """Build the command that will be run to start Fuchsia in the emulator."""
+    pass
+
+  def _SetEnv(self):
+    return os.environ.copy()
+
+  # Used by the context manager to ensure that the emulator is killed when
+  # the Python process exits.
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.Shutdown();
+
+  def Start(self):
+    emu_command = self._BuildCommand()
+
+    # We pass a separate stdin stream. Sharing stdin across processes
+    # leads to flakiness due to the OS prematurely killing the stream and the
+    # Python script panicking and aborting.
+    # The precise root cause is still nebulous, but this fix works.
+    # See crbug.com/741194.
+    logging.debug('Launching %s.' % (self.EMULATOR_NAME))
+    logging.debug(' '.join(emu_command))
+
+    # Zircon sends debug logs to serial port (see kernel.serial=legacy flag
+    # above). Serial port is redirected to a file through emulator stdout.
+    # If runner_logs are not enabled, we output the kernel serial log
+    # to a temporary file, and print that out if we are unable to connect to
+    # the emulator guest, to make it easier to diagnose connectivity issues.
+    temporary_log_file = None
+    if runner_logs.IsEnabled():
+      stdout = runner_logs.FileStreamFor('serial_log')
+    else:
+      temporary_log_file = tempfile.NamedTemporaryFile('w')
+      stdout = temporary_log_file
+
+    # TODO(crbug.com/1100402): Delete when no longer needed for debug info.
+    # Log system statistics at the start of the emulator run.
+    _LogSystemStatistics('system_start_statistics_log')
+
+    self._emu_process = subprocess.Popen(emu_command,
+                                         stdin=open(os.devnull),
+                                         stdout=stdout,
+                                         stderr=subprocess.STDOUT,
+                                         env=self._SetEnv())
+
+    try:
+      self._WaitUntilReady()
+    except target.FuchsiaTargetException:
+      if temporary_log_file:
+        logging.info('Kernel logs:\n' +
+                     open(temporary_log_file.name, 'r').read())
+      raise
+
+  def GetAmberRepo(self):
+    if not self._amber_repo:
+      self._amber_repo = amber_repo.ManagedAmberRepo(self)
+
+    return self._amber_repo
+
+  def Shutdown(self):
+    if not self._emu_process:
+      logging.error('%s did not start' % (self.EMULATOR_NAME))
+      return
+    returncode = self._emu_process.poll()
+    if returncode == None:
+      logging.info('Shutting down %s' % (self.EMULATOR_NAME))
+      self._emu_process.kill()
+    elif returncode == 0:
+      logging.info('%s quit unexpectedly without errors' % self.EMULATOR_NAME)
+    elif returncode < 0:
+      logging.error('%s was terminated by signal %d' %
+                    (self.EMULATOR_NAME, -returncode))
+    else:
+      logging.error('%s quit unexpectedly with exit code %d' %
+                    (self.EMULATOR_NAME, returncode))
+
+    # TODO(crbug.com/1100402): Delete when no longer needed for debug info.
+    # Log system statistics at the end of the emulator run.
+    _LogSystemStatistics('system_end_statistics_log')
+
+
+  def _IsEmuStillRunning(self):
+    if not self._emu_process:
+      return False
+    return os.waitpid(self._emu_process.pid, os.WNOHANG)[0] == 0
+
+  def _GetEndpoint(self):
+    if not self._IsEmuStillRunning():
+      raise Exception('%s quit unexpectedly.' % (self.EMULATOR_NAME))
+    return ('localhost', self._host_ssh_port)
+
+  def _GetSshConfigPath(self):
+    return boot_data.GetSSHConfigPath(self._out_dir)
+
+
+# TODO(crbug.com/1100402): Delete when no longer needed for debug info.
+def _LogSystemStatistics(log_file_name):
+  statistics_log = runner_logs.FileStreamFor(log_file_name)
+  # Log the cpu load and process information.
+  subprocess.call(['top', '-b', '-n', '1'],
+                  stdin=open(os.devnull),
+                  stdout=statistics_log,
+                  stderr=subprocess.STDOUT)
+  subprocess.call(['ps', '-ax'],
+                  stdin=open(os.devnull),
+                  stdout=statistics_log,
+                  stderr=subprocess.STDOUT)
diff --git a/src/build/fuchsia/generic_x64_target.py b/src/build/fuchsia/generic_x64_target.py
new file mode 100644
index 0000000..5fece12
--- /dev/null
+++ b/src/build/fuchsia/generic_x64_target.py
@@ -0,0 +1,99 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Implements commands for running and interacting with Fuchsia generic
+build on devices."""
+
+import boot_data
+import device_target
+import logging
+import os
+
+from common import SDK_ROOT, EnsurePathExists, \
+                   GetHostToolPathFromPlatform, SubprocessCallWithTimeout
+
+
+def GetTargetType():
+  return GenericX64PavedDeviceTarget
+
+
+class GenericX64PavedDeviceTarget(device_target.DeviceTarget):
+  """In addition to the functionality provided by DeviceTarget, this class
+  automatically handles paving of x64 devices that use generic Fuchsia build.
+
+  If there are no running devices, then search for a device running Zedboot
+  and pave it.
+
+  If there's only one running device, or |_node_name| is set, then the
+  device's SDK version is checked unless --os-check=ignore is set.
+  If --os-check=update is set, then the target device is repaved if the SDK
+  version doesn't match."""
+
+  TARGET_HASH_FILE_PATH = '/data/.hash'
+
+  def _SDKHashMatches(self):
+    """Checks if /data/.hash on the device matches SDK_ROOT/.hash.
+
+    Returns True if the files are identical, or False otherwise.
+    """
+
+    with tempfile.NamedTemporaryFile() as tmp:
+      # TODO: Avoid using an exception for when file is unretrievable.
+      try:
+        self.GetFile(TARGET_HASH_FILE_PATH, tmp.name)
+      except subprocess.CalledProcessError:
+        # If the file is unretrievable for whatever reason, assume mismatch.
+        return False
+
+      return filecmp.cmp(tmp.name, os.path.join(SDK_ROOT, '.hash'), False)
+
+  def _ProvisionDeviceIfNecessary(self):
+    should_provision = False
+
+    if self._Discover():
+      self._WaitUntilReady()
+
+      if self._os_check != 'ignore':
+        if self._SDKHashMatches():
+          if self._os_check == 'update':
+            logging.info('SDK hash does not match; rebooting and repaving.')
+            self.RunCommand(['dm', 'reboot'])
+            should_provision = True
+          elif self._os_check == 'check':
+            raise Exception('Target device SDK version does not match.')
+    else:
+      should_provision = True
+
+    if should_provision:
+      self._ProvisionDevice()
+
+  def _ProvisionDevice(self):
+    """Pave a device with a generic image of Fuchsia."""
+
+    bootserver_path = GetHostToolPathFromPlatform('bootserver')
+    bootserver_command = [
+        bootserver_path, '-1', '--fvm',
+        EnsurePathExists(
+            boot_data.GetTargetFile('storage-sparse.blk',
+                                    self._GetTargetSdkArch(),
+                                    boot_data.TARGET_TYPE_GENERIC)),
+        EnsurePathExists(
+            boot_data.GetBootImage(self._out_dir, self._GetTargetSdkArch(),
+                                   boot_data.TARGET_TYPE_GENERIC))
+    ]
+
+    if self._node_name:
+      bootserver_command += ['-n', self._node_name]
+
+    bootserver_command += ['--']
+    bootserver_command += boot_data.GetKernelArgs(self._out_dir)
+
+    logging.debug(' '.join(bootserver_command))
+    _, stdout = SubprocessCallWithTimeout(bootserver_command,
+                                          silent=False,
+                                          timeout_secs=300)
+
+    self._ParseNodename(stdout)
+
+    # Update the target's hash to match the current tree's.
+    self.PutFile(os.path.join(SDK_ROOT, '.hash'), TARGET_HASH_FILE_PATH)
diff --git a/src/build/fuchsia/linux.sdk.sha1 b/src/build/fuchsia/linux.sdk.sha1
new file mode 100644
index 0000000..7cb92c4
--- /dev/null
+++ b/src/build/fuchsia/linux.sdk.sha1
@@ -0,0 +1 @@
+4.20210430.1.1
diff --git a/src/build/fuchsia/mac.sdk.sha1 b/src/build/fuchsia/mac.sdk.sha1
new file mode 100644
index 0000000..7cb92c4
--- /dev/null
+++ b/src/build/fuchsia/mac.sdk.sha1
@@ -0,0 +1 @@
+4.20210430.1.1
diff --git a/src/build/fuchsia/net_test_server.py b/src/build/fuchsia/net_test_server.py
new file mode 100644
index 0000000..56005cf
--- /dev/null
+++ b/src/build/fuchsia/net_test_server.py
@@ -0,0 +1,90 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import json
+import logging
+import os
+import re
+import socket
+import sys
+import subprocess
+import tempfile
+
+DIR_SOURCE_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import chrome_test_server_spawner
+
+
+# Implementation of chrome_test_server_spawner.PortForwarder that uses SSH's
+# remote port forwarding feature to forward ports.
+class SSHPortForwarder(chrome_test_server_spawner.PortForwarder):
+  def __init__(self, target):
+    self._target = target
+
+    # Maps the host (server) port to the device port number.
+    self._port_mapping = {}
+
+  def Map(self, port_pairs):
+    for p in port_pairs:
+      _, host_port = p
+      self._port_mapping[host_port] = \
+          common.ConnectPortForwardingTask(self._target, host_port)
+
+  def GetDevicePortForHostPort(self, host_port):
+    return self._port_mapping[host_port]
+
+  def Unmap(self, device_port):
+    for host_port, entry in self._port_mapping.iteritems():
+      if entry == device_port:
+        forwarding_args = [
+            '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port]
+        task = self._target.RunCommandPiped([],
+                                            ssh_args=forwarding_args,
+                                            stdout=open(os.devnull, 'w'),
+                                            stderr=subprocess.PIPE)
+        task.wait()
+        if task.returncode != 0:
+          raise Exception(
+              'Error %d when unmapping port %d' % (task.returncode,
+                                                   device_port))
+        del self._port_mapping[host_port]
+        return
+
+    raise Exception('Unmap called for unknown port: %d' % device_port)
+
+
+def SetupTestServer(target, test_concurrency, for_package, for_realms=[]):
+  """Provisions a forwarding test server and configures |target| to use it.
+
+  Returns a Popen object for the test server process."""
+
+  logging.debug('Starting test server.')
+  # The TestLauncher can launch more jobs than the limit specified with
+  # --test-launcher-jobs so the max number of spawned test servers is set to
+  # twice that limit here. See https://crbug.com/913156#c19.
+  spawning_server = chrome_test_server_spawner.SpawningServer(
+      0, SSHPortForwarder(target), test_concurrency * 2)
+  forwarded_port = common.ConnectPortForwardingTask(
+      target, spawning_server.server_port)
+  spawning_server.Start()
+
+  logging.debug('Test server listening for connections (port=%d)' %
+                spawning_server.server_port)
+  logging.debug('Forwarded port is %d' % forwarded_port)
+
+  config_file = tempfile.NamedTemporaryFile(delete=True)
+
+  config_file.write(json.dumps({
+    'spawner_url_base': 'http://localhost:%d' % forwarded_port
+  }))
+
+  config_file.flush()
+  target.PutFile(config_file.name,
+                 '/tmp/net-test-server-config',
+                 for_package=for_package,
+                 for_realms=for_realms)
+
+  return spawning_server
diff --git a/src/build/fuchsia/qemu_image.py b/src/build/fuchsia/qemu_image.py
new file mode 100644
index 0000000..ab5e040
--- /dev/null
+++ b/src/build/fuchsia/qemu_image.py
@@ -0,0 +1,75 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Workaround for qemu-img bug on arm64 platforms with multiple cores.
+
+Runs qemu-img command with timeout and retries the command if it hangs.
+
+See:
+crbug.com/1046861 QEMU is out of date; current version of qemu-img
+is unstable
+
+https://bugs.launchpad.net/qemu/+bug/1805256 qemu-img hangs on
+rcu_call_ready_event logic in Aarch64 when converting images
+
+TODO(crbug.com/1046861): Remove this workaround when the bug is fixed.
+"""
+
+import logging
+import subprocess
+import tempfile
+import time
+
+
+# qemu-img p99 run time on Cavium ThunderX2 servers is 26 seconds.
+# Using 2x the p99 time as the timeout.
+QEMU_IMG_TIMEOUT_SEC = 52
+
+
+def _ExecQemuImgWithTimeout(command):
+  """Execute qemu-img command in subprocess with timeout.
+
+  Returns: None if command timed out or return code if command completed.
+  """
+
+  logging.info('qemu-img starting')
+  command_output_file = tempfile.NamedTemporaryFile('w')
+  p = subprocess.Popen(command, stdout=command_output_file,
+                       stderr=subprocess.STDOUT)
+  start_sec = time.time()
+  while p.poll() is None and time.time() - start_sec < QEMU_IMG_TIMEOUT_SEC:
+    time.sleep(1)
+  stop_sec = time.time()
+  logging.info('qemu-img duration: %f' % float(stop_sec - start_sec))
+
+  if p.poll() is None:
+    returncode = None
+    p.kill()
+    p.wait()
+  else:
+    returncode = p.returncode
+
+  log_level = logging.WARN if returncode else logging.DEBUG
+  for line in open(command_output_file.name, 'r'):
+    logging.log(log_level, 'qemu-img stdout: ' + line.strip())
+
+  return returncode
+
+
+def ExecQemuImgWithRetry(command):
+  """ Execute qemu-img command in subprocess with 2 retries.
+
+  Raises CalledProcessError if command does not complete successfully.
+  """
+
+  tries = 0
+  status = None
+  while status is None and tries <= 2:
+    tries += 1
+    status = _ExecQemuImgWithTimeout(command)
+
+  if status is None:
+    raise subprocess.CalledProcessError(-1, command)
+  if status:
+    raise subprocess.CalledProcessError(status, command)
diff --git a/src/build/fuchsia/qemu_target.py b/src/build/fuchsia/qemu_target.py
new file mode 100644
index 0000000..236c41d
--- /dev/null
+++ b/src/build/fuchsia/qemu_target.py
@@ -0,0 +1,255 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running and interacting with Fuchsia on QEMU."""
+
+import boot_data
+import common
+import emu_target
+import hashlib
+import logging
+import os
+import platform
+import qemu_image
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from common import GetHostArchFromPlatform, GetEmuRootForPlatform
+from common import EnsurePathExists
+from qemu_image import ExecQemuImgWithRetry
+from target import FuchsiaTargetException
+
+
+# Virtual networking configuration data for QEMU.
+GUEST_NET = '192.168.3.0/24'
+GUEST_IP_ADDRESS = '192.168.3.9'
+HOST_IP_ADDRESS = '192.168.3.2'
+GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b'
+
+# Capacity of the system's blobstore volume.
+EXTENDED_BLOBSTORE_SIZE = 1073741824  # 1GB
+
+
+def GetTargetType():
+  return QemuTarget
+
+
+class QemuTarget(emu_target.EmuTarget):
+  EMULATOR_NAME = 'qemu'
+
+  def __init__(self,
+               out_dir,
+               target_cpu,
+               system_log_file,
+               cpu_cores,
+               require_kvm,
+               ram_size_mb,
+               fuchsia_out_dir=None):
+    super(QemuTarget, self).__init__(out_dir, target_cpu, system_log_file,
+                                     fuchsia_out_dir)
+    self._cpu_cores=cpu_cores
+    self._require_kvm=require_kvm
+    self._ram_size_mb=ram_size_mb
+
+  @staticmethod
+  def CreateFromArgs(args):
+    return QemuTarget(args.out_dir, args.target_cpu, args.system_log_file,
+                      args.cpu_cores, args.require_kvm, args.ram_size_mb,
+                      args.fuchsia_out_dir)
+
+  def _IsKvmEnabled(self):
+    kvm_supported = sys.platform.startswith('linux') and \
+                    os.access('/dev/kvm', os.R_OK | os.W_OK)
+    same_arch = \
+        (self._target_cpu == 'arm64' and platform.machine() == 'aarch64') or \
+        (self._target_cpu == 'x64' and platform.machine() == 'x86_64')
+    if kvm_supported and same_arch:
+      return True
+    elif self._require_kvm:
+      if same_arch:
+        if not os.path.exists('/dev/kvm'):
+          kvm_error = 'File /dev/kvm does not exist. Please install KVM first.'
+        else:
+          kvm_error = 'To use KVM acceleration, add user to the kvm group '\
+                      'with "sudo usermod -a -G kvm $USER". Log out and back '\
+                      'in for the change to take effect.'
+        raise FuchsiaTargetException(kvm_error)
+      else:
+        raise FuchsiaTargetException('KVM unavailable when CPU architecture '\
+                                     'of host is different from that of'\
+                                     ' target. See --allow-no-kvm.')
+    else:
+      return False
+
+  def _BuildQemuConfig(self):
+    boot_data.AssertBootImagesExist(self._GetTargetSdkArch(), 'qemu')
+
+    emu_command = [
+        '-kernel',
+        EnsurePathExists(
+            boot_data.GetTargetFile('qemu-kernel.kernel',
+                                    self._GetTargetSdkArch(),
+                                    boot_data.TARGET_TYPE_QEMU)),
+        '-initrd',
+        EnsurePathExists(
+            boot_data.GetBootImage(self._out_dir, self._GetTargetSdkArch(),
+                                   boot_data.TARGET_TYPE_QEMU)),
+        '-m',
+        str(self._ram_size_mb),
+        '-smp',
+        str(self._cpu_cores),
+
+        # Attach the blobstore and data volumes. Use snapshot mode to discard
+        # any changes.
+        '-snapshot',
+        '-drive',
+        'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on' %
+        _EnsureBlobstoreQcowAndReturnPath(self._out_dir,
+                                          self._GetTargetSdkArch()),
+        '-device',
+        'virtio-blk-pci,drive=blobstore',
+
+        # Use stdio for the guest OS only; don't attach the QEMU interactive
+        # monitor.
+        '-serial',
+        'stdio',
+        '-monitor',
+        'none',
+    ]
+
+    # Configure the machine to emulate, based on the target architecture.
+    if self._target_cpu == 'arm64':
+      emu_command.extend([
+          '-machine','virt,gic_version=3',
+      ])
+    else:
+      emu_command.extend([
+          '-machine', 'q35',
+      ])
+
+    # Configure virtual network. It is used in the tests to connect to
+    # testserver running on the host.
+    netdev_type = 'virtio-net-pci'
+    netdev_config = 'user,id=net0,net=%s,dhcpstart=%s,host=%s' % \
+            (GUEST_NET, GUEST_IP_ADDRESS, HOST_IP_ADDRESS)
+
+    self._host_ssh_port = common.GetAvailableTcpPort()
+    netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port
+    emu_command.extend([
+      '-netdev', netdev_config,
+      '-device', '%s,netdev=net0,mac=%s' % (netdev_type, GUEST_MAC_ADDRESS),
+    ])
+
+    # Configure the CPU to emulate.
+    # On Linux, we can enable lightweight virtualization (KVM) if the host and
+    # guest architectures are the same.
+    if self._IsKvmEnabled():
+      kvm_command = ['-enable-kvm', '-cpu']
+      if self._target_cpu == 'arm64':
+        kvm_command.append('host')
+      else:
+        kvm_command.append('host,migratable=no,+invtsc')
+    else:
+      logging.warning('Unable to launch %s with KVM acceleration. '
+                      'The guest VM will be slow.' % (self.EMULATOR_NAME))
+      if self._target_cpu == 'arm64':
+        kvm_command = ['-cpu', 'cortex-a53']
+      else:
+        kvm_command = ['-cpu', 'Haswell,+smap,-check,-fsgsbase']
+
+    emu_command.extend(kvm_command)
+
+    kernel_args = boot_data.GetKernelArgs(self._out_dir)
+
+    # TERM=dumb tells the guest OS to not emit ANSI commands that trigger
+    # noisy ANSI spew from the user's terminal emulator.
+    kernel_args.append('TERM=dumb')
+
+    # Construct kernel cmd line
+    kernel_args.append('kernel.serial=legacy')
+
+    # Don't 'reboot' the emulator if the kernel crashes
+    kernel_args.append('kernel.halt-on-panic=true')
+
+    emu_command.extend(['-append', ' '.join(kernel_args)])
+
+    return emu_command
+
+  def _BuildCommand(self):
+    if self._target_cpu == 'arm64':
+      qemu_exec = 'qemu-system-' + 'aarch64'
+    elif self._target_cpu == 'x64':
+      qemu_exec = 'qemu-system-' + 'x86_64'
+    else:
+      raise Exception('Unknown target_cpu %s:' % self._target_cpu)
+
+    qemu_command = [
+        os.path.join(GetEmuRootForPlatform(self.EMULATOR_NAME), 'bin',
+                     qemu_exec)
+    ]
+    qemu_command.extend(self._BuildQemuConfig())
+    qemu_command.append('-nographic')
+    return qemu_command
+
+def _ComputeFileHash(filename):
+  hasher = hashlib.md5()
+  with open(filename, 'rb') as f:
+    buf = f.read(4096)
+    while buf:
+      hasher.update(buf)
+      buf = f.read(4096)
+
+  return hasher.hexdigest()
+
+
+def _EnsureBlobstoreQcowAndReturnPath(out_dir, target_arch):
+  """Returns a file containing the Fuchsia blobstore in a QCOW format,
+  with extra buffer space added for growth."""
+
+  qimg_tool = os.path.join(common.GetEmuRootForPlatform('qemu'),
+                           'bin', 'qemu-img')
+  fvm_tool = common.GetHostToolPathFromPlatform('fvm')
+  blobstore_path = boot_data.GetTargetFile('storage-full.blk', target_arch,
+                                           'qemu')
+  qcow_path = os.path.join(out_dir, 'gen', 'blobstore.qcow')
+
+  # Check a hash of the blobstore to determine if we can re-use an existing
+  # extended version of it.
+  blobstore_hash_path = os.path.join(out_dir, 'gen', 'blobstore.hash')
+  current_blobstore_hash = _ComputeFileHash(blobstore_path)
+
+  if os.path.exists(blobstore_hash_path) and os.path.exists(qcow_path):
+    if current_blobstore_hash == open(blobstore_hash_path, 'r').read():
+      return qcow_path
+
+  # Add some extra room for growth to the Blobstore volume.
+  # Fuchsia is unable to automatically extend FVM volumes at runtime so the
+  # volume enlargement must be performed prior to QEMU startup.
+
+  # The 'fvm' tool only supports extending volumes in-place, so make a
+  # temporary copy of 'blobstore.bin' before it's mutated.
+  extended_blobstore = tempfile.NamedTemporaryFile()
+  shutil.copyfile(blobstore_path, extended_blobstore.name)
+  subprocess.check_call([fvm_tool, extended_blobstore.name, 'extend',
+                         '--length', str(EXTENDED_BLOBSTORE_SIZE),
+                         blobstore_path])
+
+  # Construct a QCOW image from the extended, temporary FVM volume.
+  # The result will be retained in the build output directory for re-use.
+  qemu_img_cmd = [qimg_tool, 'convert', '-f', 'raw', '-O', 'qcow2',
+                  '-c', extended_blobstore.name, qcow_path]
+  # TODO(crbug.com/1046861): Remove arm64 call with retries when bug is fixed.
+  if common.GetHostArchFromPlatform() == 'arm64':
+    qemu_image.ExecQemuImgWithRetry(qemu_img_cmd)
+  else:
+    subprocess.check_call(qemu_img_cmd)
+
+  # Write out a hash of the original blobstore file, so that subsequent runs
+  # can trivially check if a cached extended FVM volume is available for reuse.
+  with open(blobstore_hash_path, 'w') as blobstore_hash_file:
+    blobstore_hash_file.write(current_blobstore_hash)
+
+  return qcow_path
diff --git a/src/build/fuchsia/qemu_target_test.py b/src/build/fuchsia/qemu_target_test.py
new file mode 100755
index 0000000..44b3802
--- /dev/null
+++ b/src/build/fuchsia/qemu_target_test.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python2
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import qemu_target
+import shutil
+import subprocess
+import tempfile
+import time
+import unittest
+
+TEST_PAYLOAD = "Let's get this payload across the finish line!"
+
+tmpdir = tempfile.mkdtemp()
+
+# Register the target with the context manager so that it always gets
+# torn down on process exit. Otherwise there might be lingering QEMU instances
+# if Python crashes or is interrupted.
+with qemu_target.QemuTarget(tmpdir, 'x64') as target:
+  class TestQemuTarget(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+      target.Start()
+
+    @classmethod
+    def tearDownClass(cls):
+      target.Shutdown()
+      shutil.rmtree(tmpdir)
+
+    def testCopyBidirectional(self):
+      tmp_path = tmpdir + "/payload"
+      with open(tmp_path, "w") as tmpfile:
+        tmpfile.write(TEST_PAYLOAD)
+      target.PutFile(tmp_path, '/tmp/payload')
+
+      tmp_path_roundtrip = tmp_path + ".roundtrip"
+      target.GetFile('/tmp/payload', tmp_path_roundtrip)
+      with open(tmp_path_roundtrip) as roundtrip:
+        self.assertEqual(TEST_PAYLOAD, roundtrip.read())
+
+    def testRunCommand(self):
+      self.assertEqual(0, target.RunCommand(['true']))
+      self.assertEqual(1, target.RunCommand(['false']))
+
+    def testRunCommandPiped(self):
+      proc = target.RunCommandPiped(['cat'],
+                                    stdin=subprocess.PIPE,
+                                    stdout=subprocess.PIPE)
+      proc.stdin.write(TEST_PAYLOAD)
+      proc.stdin.flush()
+      proc.stdin.close()
+      self.assertEqual(TEST_PAYLOAD, proc.stdout.readline())
+      proc.kill()
+
+
+  if __name__ == '__main__':
+    unittest.main()
diff --git a/src/build/fuchsia/remote_cmd.py b/src/build/fuchsia/remote_cmd.py
new file mode 100644
index 0000000..56aa8b1
--- /dev/null
+++ b/src/build/fuchsia/remote_cmd.py
@@ -0,0 +1,131 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import threading
+
+from common import SubprocessCallWithTimeout
+
+_SSH = ['ssh']
+_SCP = ['scp', '-C']  # Use gzip compression.
+_SSH_LOGGER = logging.getLogger('ssh')
+
+COPY_TO_TARGET = 0
+COPY_FROM_TARGET = 1
+
+
+def _IsLinkLocalIPv6(hostname):
+  return hostname.startswith('fe80::')
+
+def _EscapeIfIPv6Address(address):
+  if ':' in address:
+    return '[' + address + ']'
+  else:
+    return address
+
+class CommandRunner(object):
+  """Helper class used to execute commands on a remote host over SSH."""
+
+  def __init__(self, config_path, host, port):
+    """Creates a CommandRunner that connects to the specified |host| and |port|
+    using the ssh config at the specified |config_path|.
+
+    config_path: Full path to SSH configuration.
+    host: The hostname or IP address of the remote host.
+    port: The port to connect to."""
+
+    self._config_path = config_path
+    self._host = host
+    self._port = port
+
+  def _GetSshCommandLinePrefix(self):
+    cmd_prefix = _SSH + ['-F', self._config_path, self._host]
+    if self._port:
+      cmd_prefix += ['-p', str(self._port)]
+    return cmd_prefix
+
+  def RunCommand(self, command, silent, timeout_secs=None):
+    """Executes an SSH command on the remote host and blocks until completion.
+
+    command: A list of strings containing the command and its arguments.
+    silent: If true, suppresses all output from 'ssh'.
+    timeout_secs: If set, limits the amount of time that |command| may run.
+                  Commands which exceed the timeout are killed.
+
+    Returns the exit code from the remote command."""
+
+    ssh_command = self._GetSshCommandLinePrefix() + command
+    logging.warning(ssh_command)
+    _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command))
+    retval, _, _ = SubprocessCallWithTimeout(ssh_command, silent, timeout_secs)
+    return retval
+
+
+  def RunCommandPiped(self, command, stdout, stderr, ssh_args = None, **kwargs):
+    """Executes an SSH command on the remote host and returns a process object
+    with access to the command's stdio streams. Does not block.
+
+    command: A list of strings containing the command and its arguments.
+    stdout: subprocess stdout.  Must not be None.
+    stderr: subprocess stderr.  Must not be None.
+    ssh_args: Arguments that will be passed to SSH.
+    kwargs: A dictionary of parameters to be passed to subprocess.Popen().
+            The parameters can be used to override stdin and stdout, for
+            example.
+
+    Returns a Popen object for the command."""
+
+    if not stdout or not stderr:
+      raise Exception('Stdout/stderr must be specified explicitly')
+
+    if not ssh_args:
+      ssh_args = []
+
+    ssh_command = self._GetSshCommandLinePrefix() + ssh_args + ['--'] + command
+    logging.warning(ssh_command)
+    _SSH_LOGGER.debug(' '.join(ssh_command))
+    return subprocess.Popen(ssh_command, stdout=stdout, stderr=stderr, **kwargs)
+
+
+  def RunScp(self, sources, dest, direction, recursive=False):
+    """Copies a file to or from a remote host using SCP and blocks until
+    completion.
+
+    sources: Paths of the files to be copied.
+    dest: The path that |source| will be copied to.
+    direction: Indicates whether the file should be copied to
+               or from the remote side.
+               Valid values are COPY_TO_TARGET or COPY_FROM_TARGET.
+    recursive: If true, performs a recursive copy.
+
+    Function will raise an assertion if a failure occurred."""
+
+    scp_command = _SCP[:]
+    if _SSH_LOGGER.getEffectiveLevel() == logging.DEBUG:
+      scp_command.append('-v')
+    if recursive:
+      scp_command.append('-r')
+
+    host = _EscapeIfIPv6Address(self._host)
+
+    if direction == COPY_TO_TARGET:
+      dest = "%s:%s" % (host, dest)
+    else:
+      sources = ["%s:%s" % (host, source) for source in sources]
+
+    scp_command += ['-F', self._config_path]
+    if self._port:
+      scp_command += ['-P', str(self._port)]
+    scp_command += sources
+    scp_command += [dest]
+
+    _SSH_LOGGER.debug(' '.join(scp_command))
+    try:
+      scp_output = subprocess.check_output(scp_command,
+                                           stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError as error:
+      _SSH_LOGGER.info(error.output)
+      raise
diff --git a/src/build/fuchsia/run_test_package.py b/src/build/fuchsia/run_test_package.py
new file mode 100644
index 0000000..eb4b0e1
--- /dev/null
+++ b/src/build/fuchsia/run_test_package.py
@@ -0,0 +1,280 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Contains a helper function for deploying and executing a packaged
+executable on a Target."""
+
+from __future__ import print_function
+
+import common
+import hashlib
+import logging
+import multiprocessing
+import os
+import re
+import select
+import subprocess
+import sys
+import threading
+import uuid
+
+from symbolizer import BuildIdsPaths, RunSymbolizer, SymbolizerFilter
+
+FAR = common.GetHostToolPathFromPlatform('far')
+
+# Amount of time to wait for the termination of the system log output thread.
+_JOIN_TIMEOUT_SECS = 5
+
+
+def _AttachKernelLogReader(target):
+  """Attaches a kernel log reader as a long-running SSH task."""
+
+  logging.info('Attaching kernel logger.')
+  return target.RunCommandPiped(['dlog', '-f'],
+                                stdin=open(os.devnull, 'r'),
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.STDOUT)
+
+
+class SystemLogReader(object):
+  """Collects and symbolizes Fuchsia system log to a file."""
+
+  def __init__(self):
+    self._listener_proc = None
+    self._symbolizer_proc = None
+    self._system_log = None
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Stops the system logging processes and closes the output file."""
+    if self._symbolizer_proc:
+      self._symbolizer_proc.kill()
+    if self._listener_proc:
+      self._listener_proc.kill()
+    if self._system_log:
+      self._system_log.close()
+
+  def Start(self, target, package_paths, system_log_file):
+    """Start a system log reader as a long-running SSH task."""
+    logging.debug('Writing fuchsia system log to %s' % system_log_file)
+
+    self._listener_proc = target.RunCommandPiped(['log_listener'],
+                                                 stdout=subprocess.PIPE,
+                                                 stderr=subprocess.STDOUT)
+
+    self._system_log = open(system_log_file, 'w', buffering=1)
+    self._symbolizer_proc = RunSymbolizer(self._listener_proc.stdout,
+                                          self._system_log,
+                                          BuildIdsPaths(package_paths))
+
+
+class MergedInputStream(object):
+  """Merges a number of input streams into a UNIX pipe on a dedicated thread.
+  Terminates when the file descriptor of the primary stream (the first in
+  the sequence) is closed."""
+
+  def __init__(self, streams):
+    assert len(streams) > 0
+    self._streams = streams
+    self._output_stream = None
+    self._thread = None
+
+  def Start(self):
+    """Returns a pipe to the merged output stream."""
+
+    read_pipe, write_pipe = os.pipe()
+
+    self._output_stream = os.fdopen(write_pipe, 'w', 1)
+    self._thread = threading.Thread(target=self._Run)
+    self._thread.start()
+
+    return os.fdopen(read_pipe, 'r')
+
+  def _Run(self):
+    streams_by_fd = {}
+    primary_fd = self._streams[0].fileno()
+    for s in self._streams:
+      streams_by_fd[s.fileno()] = s
+
+    # Set when the primary FD is closed. Input from other FDs will continue to
+    # be processed until select() runs dry.
+    flush = False
+
+    # The lifetime of the MergedInputStream is bound to the lifetime of
+    # |primary_fd|.
+    while primary_fd:
+      # When not flushing: block until data is read or an exception occurs.
+      rlist, _, xlist = select.select(streams_by_fd, [], streams_by_fd)
+
+      if len(rlist) == 0 and flush:
+        break
+
+      for fileno in xlist:
+        del streams_by_fd[fileno]
+        if fileno == primary_fd:
+          primary_fd = None
+
+      for fileno in rlist:
+        # TODO(chonggu): Encode streams with 'utf-8' instead of decoding each
+        # line read once we drop Python 2 support.
+        line = streams_by_fd[fileno].readline().decode('utf-8')
+        if line:
+          self._output_stream.write(line)
+        else:
+          del streams_by_fd[fileno]
+          if fileno == primary_fd:
+            primary_fd = None
+
+    # Flush the streams by executing nonblocking reads from the input file
+    # descriptors until no more data is available,  or all the streams are
+    # closed.
+    while streams_by_fd:
+      rlist, _, _ = select.select(streams_by_fd, [], [], 0)
+
+      if not rlist:
+        break
+
+      for fileno in rlist:
+        # TODO(chonggu): Switch to encoding='utf-8' once we drop Python 2
+        # support.
+        line = streams_by_fd[fileno].readline().decode('utf-8')
+        if line:
+          self._output_stream.write(line)
+        else:
+          del streams_by_fd[fileno]
+
+
+def _GetComponentUri(package_name):
+  return 'fuchsia-pkg://fuchsia.com/%s#meta/%s.cmx' % (package_name,
+                                                       package_name)
+
+
+class RunTestPackageArgs:
+  """RunTestPackage() configuration arguments structure.
+
+  code_coverage: If set, the test package will be run via 'runtests', and the
+                 output will be saved to /tmp folder on the device.
+  system_logging: If set, connects a system log reader to the target.
+  test_realm_label: Specifies the realm name that run-test-component should use.
+      This must be specified if a filter file is to be set, or a results summary
+      file fetched after the test suite has run.
+  use_run_test_component: If True then the test package will be run hermetically
+                          via 'run-test-component', rather than using 'run'.
+  """
+
+  def __init__(self):
+    self.code_coverage = False
+    self.system_logging = False
+    self.test_realm_label = None
+    self.use_run_test_component = False
+
+  @staticmethod
+  def FromCommonArgs(args):
+    run_test_package_args = RunTestPackageArgs()
+    run_test_package_args.code_coverage = args.code_coverage
+    run_test_package_args.system_logging = args.include_system_logs
+    return run_test_package_args
+
+
+def _DrainStreamToStdout(stream, quit_event):
+  """Outputs the contents of |stream| until |quit_event| is set."""
+
+  while not quit_event.is_set():
+    rlist, _, _ = select.select([stream], [], [], 0.1)
+    if rlist:
+      line = rlist[0].readline()
+      if not line:
+        return
+      print(line.rstrip())
+
+
+def RunTestPackage(output_dir, target, package_paths, package_name,
+                   package_args, args):
+  """Installs the Fuchsia package at |package_path| on the target,
+  executes it with |package_args|, and symbolizes its output.
+
+  output_dir: The path containing the build output files.
+  target: The deployment Target object that will run the package.
+  package_paths: The paths to the .far packages to be installed.
+  package_name: The name of the primary package to run.
+  package_args: The arguments which will be passed to the Fuchsia process.
+  args: RunTestPackageArgs instance configuring how the package will be run.
+
+  Returns the exit code of the remote package process."""
+
+  system_logger = (_AttachKernelLogReader(target)
+                   if args.system_logging else None)
+  try:
+    if system_logger:
+      # Spin up a thread to asynchronously dump the system log to stdout
+      # for easier diagnoses of early, pre-execution failures.
+      log_output_quit_event = multiprocessing.Event()
+      log_output_thread = threading.Thread(target=lambda: _DrainStreamToStdout(
+          system_logger.stdout, log_output_quit_event))
+      log_output_thread.daemon = True
+      log_output_thread.start()
+
+    with target.GetAmberRepo():
+      target.InstallPackage(package_paths)
+
+      if system_logger:
+        log_output_quit_event.set()
+        log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
+
+      logging.info('Running application.')
+
+      # TODO(crbug.com/1156768): Deprecate runtests.
+      if args.code_coverage:
+        # runtests requires specifying an output directory and a double dash
+        # before the argument list.
+        command = ['runtests', '-o', '/tmp', _GetComponentUri(package_name)]
+        if args.test_realm_label:
+          command += ['--realm-label', args.test_realm_label]
+        command += ['--']
+      elif args.use_run_test_component:
+        command = ['run-test-component']
+        if args.test_realm_label:
+          command += ['--realm-label=%s' % args.test_realm_label]
+        command.append(_GetComponentUri(package_name))
+      else:
+        command = ['run', _GetComponentUri(package_name)]
+
+      command.extend(package_args)
+
+      process = target.RunCommandPiped(command,
+                                       stdin=open(os.devnull, 'r'),
+                                       stdout=subprocess.PIPE,
+                                       stderr=subprocess.STDOUT)
+
+      if system_logger:
+        output_stream = MergedInputStream(
+            [process.stdout, system_logger.stdout]).Start()
+      else:
+        output_stream = process.stdout
+
+      # Run the log data through the symbolizer process.
+      output_stream = SymbolizerFilter(output_stream,
+                                       BuildIdsPaths(package_paths))
+
+      for next_line in output_stream:
+        print(next_line.rstrip())
+
+      process.wait()
+      if process.returncode == 0:
+        logging.info('Process exited normally with status code 0.')
+      else:
+        # The test runner returns an error status code if *any* tests fail,
+        # so we should proceed anyway.
+        logging.warning('Process exited with status code %d.' %
+                        process.returncode)
+
+  finally:
+    if system_logger:
+      logging.info('Terminating kernel log reader.')
+      log_output_quit_event.set()
+      log_output_thread.join()
+      system_logger.kill()
+
+  return process.returncode
diff --git a/src/build/fuchsia/runner_exceptions.py b/src/build/fuchsia/runner_exceptions.py
new file mode 100644
index 0000000..03f872e
--- /dev/null
+++ b/src/build/fuchsia/runner_exceptions.py
@@ -0,0 +1,78 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Converts exceptions to return codes and prints error messages.
+
+This makes it easier to query build tables for particular error types as
+exit codes are visible to queries while exception stack traces are not."""
+
+import errno
+import fcntl
+import logging
+import os
+import subprocess
+import sys
+import traceback
+
+from target import FuchsiaTargetException
+
+def _PrintException(value, trace):
+  """Prints stack trace and error message for the current exception."""
+
+  traceback.print_tb(trace)
+  print(str(value))
+
+
+def IsStdoutBlocking():
+  """Returns True if sys.stdout is blocking or False if non-blocking.
+
+  sys.stdout should always be blocking.  Non-blocking is associated with
+  intermittent IOErrors (crbug.com/1080858).
+  """
+
+  nonblocking = fcntl.fcntl(sys.stdout, fcntl.F_GETFL) & os.O_NONBLOCK
+  return not nonblocking
+
+
+def HandleExceptionAndReturnExitCode():
+  """Maps the current exception to a return code and prints error messages.
+
+  Mapped exception types are assigned blocks of 8 return codes starting at 64.
+  The choice of 64 as the starting code is based on the Advanced Bash-Scripting
+  Guide (http://tldp.org/LDP/abs/html/exitcodes.html).
+
+  A generic exception is mapped to the start of the block.  More specific
+  exceptions are mapped to numbers inside the block.  For example, a
+  FuchsiaTargetException is mapped to return code 64, unless it involves SSH
+  in which case it is mapped to return code 65.
+
+  Exceptions not specifically mapped go to return code 1.
+
+  Returns the mapped return code."""
+
+  (type, value, trace) = sys.exc_info()
+  _PrintException(value, trace)
+
+  if type is FuchsiaTargetException:
+    if 'ssh' in str(value).lower():
+      print('Error: FuchsiaTargetException: SSH to Fuchsia target failed.')
+      return 65
+    return 64
+  elif type is IOError:
+    if value.errno == errno.EAGAIN:
+      logging.info('Python print to sys.stdout probably failed')
+      if not IsStdoutBlocking():
+        logging.warn('sys.stdout is non-blocking')
+      return 73
+    return 72
+  elif type is subprocess.CalledProcessError:
+    if os.path.basename(value.cmd[0]) == 'scp':
+      print('Error: scp operation failed - %s' % str(value))
+      return 81
+    if os.path.basename(value.cmd[0]) == 'qemu-img':
+      print('Error: qemu-img fuchsia image generation failed.')
+      return 82
+    return 80
+  else:
+    return 1
diff --git a/src/build/fuchsia/runner_logs.py b/src/build/fuchsia/runner_logs.py
new file mode 100644
index 0000000..20ab6b2
--- /dev/null
+++ b/src/build/fuchsia/runner_logs.py
@@ -0,0 +1,96 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates and manages test runner log file objects.
+
+Provides a context manager object for use in a with statement
+and a module level FileStreamFor function for use by clients.
+"""
+
+import collections
+import multiprocessing
+import os
+
+from symbolizer import RunSymbolizer
+
+SYMBOLIZED_SUFFIX = '.symbolized'
+
+_RunnerLogEntry = collections.namedtuple(
+    '_RunnerLogEntry', ['name', 'log_file', 'path', 'symbolize'])
+
+# Module singleton variable.
+_instance = None
+
+
+class RunnerLogManager(object):
+  """ Runner logs object for use in a with statement."""
+
+  def __init__(self, log_dir, build_ids_files):
+    global _instance
+    if _instance:
+      raise Exception('Only one RunnerLogManager can be instantiated')
+
+    self._log_dir = log_dir
+    self._build_ids_files = build_ids_files
+    self._runner_logs = []
+
+    if self._log_dir and not os.path.isdir(self._log_dir):
+      os.makedirs(self._log_dir)
+
+    _instance = self
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    pool = multiprocessing.Pool(4)
+    for log_entry in self._runner_logs:
+      pool.apply_async(_FinalizeLog, (log_entry, self._build_ids_files))
+    pool.close()
+    pool.join()
+    _instance = None
+
+
+  def _FileStreamFor(self, name, symbolize):
+    if any(elem.name == name for elem in self._runner_logs):
+      raise Exception('RunnerLogManager can only open "%s" once' % name)
+
+    path = os.path.join(self._log_dir, name) if self._log_dir else os.devnull
+    log_file = open(path, 'w')
+
+    self._runner_logs.append(_RunnerLogEntry(name, log_file, path, symbolize))
+
+    return log_file
+
+
+def _FinalizeLog(log_entry, build_ids_files):
+    log_entry.log_file.close()
+
+    if log_entry.symbolize:
+      input_file = open(log_entry.path, 'r')
+      output_file = open(log_entry.path + SYMBOLIZED_SUFFIX, 'w')
+      proc = RunSymbolizer(input_file, output_file, build_ids_files)
+      proc.wait()
+      output_file.close()
+      input_file.close()
+
+
+def IsEnabled():
+  """Returns True if the RunnerLogManager has been created, or False if not."""
+
+  return _instance is not None and _instance._log_dir is not None
+
+
+def FileStreamFor(name, symbolize=False):
+  """Opens a test runner file stream in the test runner log directory.
+
+  If no test runner log directory is specified, output is discarded.
+
+  name: log file name
+  symbolize: if True, make a symbolized copy of the log after closing it.
+
+  Returns an opened log file object."""
+
+  return _instance._FileStreamFor(name, symbolize) if IsEnabled() else open(
+      os.devnull, 'w')
diff --git a/src/build/fuchsia/sdk-bucket.txt b/src/build/fuchsia/sdk-bucket.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/fuchsia/sdk-bucket.txt
diff --git a/src/build/fuchsia/sdk-hash-files.list b/src/build/fuchsia/sdk-hash-files.list
new file mode 100644
index 0000000..6f37bcd
--- /dev/null
+++ b/src/build/fuchsia/sdk-hash-files.list
@@ -0,0 +1 @@
+{platform}.sdk.sha1
diff --git a/src/build/fuchsia/symbolizer.py b/src/build/fuchsia/symbolizer.py
new file mode 100644
index 0000000..8469d11
--- /dev/null
+++ b/src/build/fuchsia/symbolizer.py
@@ -0,0 +1,70 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+
+from common import SDK_ROOT
+from common import GetHostArchFromPlatform
+from common import GetHostToolPathFromPlatform
+
+
+def BuildIdsPaths(package_paths):
+  """Generates build ids paths for symbolizer processes."""
+
+  build_ids_paths = map(
+      lambda package_path: os.path.join(
+          os.path.dirname(package_path), 'ids.txt'),
+      package_paths)
+  return build_ids_paths
+
+
+def RunSymbolizer(input_file, output_file, build_ids_files):
+  """Starts a symbolizer process.
+
+  input_file: Input file to be symbolized.
+  output_file: Output file for symbolizer stdout and stderr.
+  build_ids_file: Path to the ids.txt file which maps build IDs to
+                  unstripped binaries on the filesystem.
+  Returns a Popen object for the started process."""
+
+  symbolizer = GetHostToolPathFromPlatform('symbolizer')
+  symbolizer_cmd = [
+      symbolizer, '--build-id-dir',
+      os.path.join(SDK_ROOT, '.build-id')
+  ]
+  for build_ids_file in build_ids_files:
+    symbolizer_cmd.extend(['--ids-txt', build_ids_file])
+
+  logging.info('Running "%s".' % ' '.join(symbolizer_cmd))
+  return subprocess.Popen(symbolizer_cmd, stdin=input_file, stdout=output_file,
+                          stderr=subprocess.STDOUT, close_fds=True)
+
+
+def SymbolizerFilter(input_file, build_ids_files):
+  """Symbolizes an output stream from a process.
+
+  input_file: Input file to be symbolized.
+  build_ids_file: Path to the ids.txt file which maps build IDs to
+                  unstripped binaries on the filesystem.
+  Returns a generator that yields symbolized process output."""
+
+  symbolizer_proc = RunSymbolizer(input_file, subprocess.PIPE, build_ids_files)
+
+  while True:
+    # TODO(chonggu): Switch to encoding='utf-8' once we drop Python 2
+    # support.
+    line = symbolizer_proc.stdout.readline().decode('utf-8')
+    if not line:
+      break
+
+    # Skip spam emitted by the symbolizer that obscures the symbolized output.
+    # TODO(https://crbug.com/1069446): Fix the symbolizer and remove this.
+    if '[[[ELF ' in line:
+      continue
+
+    yield line
+
+  symbolizer_proc.wait()
diff --git a/src/build/fuchsia/target.py b/src/build/fuchsia/target.py
new file mode 100644
index 0000000..abe38f7
--- /dev/null
+++ b/src/build/fuchsia/target.py
@@ -0,0 +1,328 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import json
+import logging
+import os
+import remote_cmd
+import runner_logs
+import subprocess
+import time
+
+
+_SHUTDOWN_CMD = ['dm', 'poweroff']
+_ATTACH_RETRY_INTERVAL = 1
+_ATTACH_RETRY_SECONDS = 120
+
+# Amount of time to wait for Amber to complete package installation, as a
+# mitigation against hangs due to amber/network-related failures.
+_INSTALL_TIMEOUT_SECS = 10 * 60
+
+
+def _GetPackageUri(package_name):
+  """Returns the URI for the specified package name."""
+  return 'fuchsia-pkg://fuchsia.com/%s' % (package_name)
+
+
+def _GetPackageInfo(package_path):
+  """Returns a tuple with the name and version of a package."""
+
+  # Query the metadata file which resides next to the package file.
+  package_info = json.load(
+      open(os.path.join(os.path.dirname(package_path), 'package')))
+  return (package_info['name'], package_info['version'])
+
+
+class _MapIsolatedPathsForPackage:
+  """Callable object which remaps /data and /tmp paths to their component-
+     specific locations, based on the package name and test realm path."""
+
+  def __init__(self, package_name, package_version, realms):
+    realms_path_fragment = '/r/'.join(['r/sys'] + realms)
+    package_sub_path = '{2}/fuchsia.com:{0}:{1}#meta:{0}.cmx/'.format(
+        package_name, package_version, realms_path_fragment)
+    self.isolated_format = '{0}' + package_sub_path + '{1}'
+
+  def __call__(self, path):
+    for isolated_directory in ['/data/' , '/tmp/']:
+      if (path+'/').startswith(isolated_directory):
+        return self.isolated_format.format(isolated_directory,
+                                           path[len(isolated_directory):])
+    return path
+
+
+class FuchsiaTargetException(Exception):
+  def __init__(self, message):
+    super(FuchsiaTargetException, self).__init__(message)
+
+
+class Target(object):
+  """Base class representing a Fuchsia deployment target."""
+
+  def __init__(self, out_dir, target_cpu):
+    self._out_dir = out_dir
+    self._started = False
+    self._dry_run = False
+    self._target_cpu = target_cpu
+    self._command_runner = None
+
+  @staticmethod
+  def CreateFromArgs(args):
+    raise NotImplementedError()
+
+  @staticmethod
+  def RegisterArgs(arg_parser):
+    pass
+
+  # Functions used by the Python context manager for teardown.
+  def __enter__(self):
+    return self
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    return
+
+  def Start(self):
+    """Handles the instantiation and connection process for the Fuchsia
+    target instance."""
+
+    pass
+
+  def IsStarted(self):
+    """Returns True if the Fuchsia target instance is ready to accept
+    commands."""
+
+    return self._started
+
+  def IsNewInstance(self):
+    """Returns True if the connected target instance is newly provisioned."""
+
+    return True
+
+  def GetCommandRunner(self):
+    """Returns CommandRunner that can be used to execute commands on the
+    target. Most clients should prefer RunCommandPiped() and RunCommand()."""
+
+    self._AssertIsStarted()
+
+    if self._command_runner == None:
+      host, port = self._GetEndpoint()
+      self._command_runner = \
+          remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
+
+    return self._command_runner
+
+  def RunCommandPiped(self, command, **kwargs):
+    """Starts a remote command and immediately returns a Popen object for the
+    command. The caller may interact with the streams, inspect the status code,
+    wait on command termination, etc.
+
+    command: A list of strings representing the command and arguments.
+    kwargs: A dictionary of parameters to be passed to subprocess.Popen().
+            The parameters can be used to override stdin and stdout, for
+            example.
+
+    Returns: a Popen object.
+
+    Note: method does not block."""
+
+    logging.debug('running (non-blocking) \'%s\'.' % ' '.join(command))
+    return self.GetCommandRunner().RunCommandPiped(command, **kwargs)
+
+  def RunCommand(self, command, silent=False, timeout_secs=None):
+    """Executes a remote command and waits for it to finish executing.
+
+    Returns the exit code of the command."""
+
+    logging.debug('running \'%s\'.' % ' '.join(command))
+    return self.GetCommandRunner().RunCommand(command, silent,
+                                              timeout_secs=timeout_secs)
+
+  def EnsureIsolatedPathsExist(self, for_package, for_realms):
+    """Ensures that the package's isolated /data and /tmp exist."""
+    for isolated_directory in ['/data', '/tmp']:
+      self.RunCommand([
+          'mkdir', '-p',
+          _MapIsolatedPathsForPackage(for_package, 0,
+                                      for_realms)(isolated_directory)
+      ])
+
+  def PutFile(self,
+              source,
+              dest,
+              recursive=False,
+              for_package=None,
+              for_realms=[]):
+    """Copies a file from the local filesystem to the target filesystem.
+
+    source: The path of the file being copied.
+    dest: The path on the remote filesystem which will be copied to.
+    recursive: If true, performs a recursive copy.
+    for_package: If specified, isolated paths in the |dest| are mapped to their
+                 obsolute paths for the package, on the target. This currently
+                 affects the /data and /tmp directories.
+    for_realms: If specified, identifies the sub-realm of 'sys' under which
+                isolated paths (see |for_package|) are stored.
+    """
+
+    assert type(source) is str
+    self.PutFiles([source], dest, recursive, for_package, for_realms)
+
+  def PutFiles(self,
+               sources,
+               dest,
+               recursive=False,
+               for_package=None,
+               for_realms=[]):
+    """Copies files from the local filesystem to the target filesystem.
+
+    sources: List of local file paths to copy from, or a single path.
+    dest: The path on the remote filesystem which will be copied to.
+    recursive: If true, performs a recursive copy.
+    for_package: If specified, /data in the |dest| is mapped to the package's
+                 isolated /data location.
+    for_realms: If specified, identifies the sub-realm of 'sys' under which
+                isolated paths (see |for_package|) are stored.
+    """
+
+    assert type(sources) is tuple or type(sources) is list
+    if for_package:
+      self.EnsureIsolatedPathsExist(for_package, for_realms)
+      dest = _MapIsolatedPathsForPackage(for_package, 0, for_realms)(dest)
+    logging.debug('copy local:%s => remote:%s' % (sources, dest))
+    self.GetCommandRunner().RunScp(sources, dest, remote_cmd.COPY_TO_TARGET,
+                                   recursive)
+
+  def GetFile(self,
+              source,
+              dest,
+              for_package=None,
+              for_realms=[],
+              recursive=False):
+    """Copies a file from the target filesystem to the local filesystem.
+
+    source: The path of the file being copied.
+    dest: The path on the local filesystem which will be copied to.
+    for_package: If specified, /data in paths in |sources| is mapped to the
+                 package's isolated /data location.
+    for_realms: If specified, identifies the sub-realm of 'sys' under which
+                isolated paths (see |for_package|) are stored.
+    recursive: If true, performs a recursive copy.
+    """
+    assert type(source) is str
+    self.GetFiles([source], dest, for_package, for_realms, recursive)
+
+  def GetFiles(self,
+               sources,
+               dest,
+               for_package=None,
+               for_realms=[],
+               recursive=False):
+    """Copies files from the target filesystem to the local filesystem.
+
+    sources: List of remote file paths to copy.
+    dest: The path on the local filesystem which will be copied to.
+    for_package: If specified, /data in paths in |sources| is mapped to the
+                 package's isolated /data location.
+    for_realms: If specified, identifies the sub-realm of 'sys' under which
+                isolated paths (see |for_package|) are stored.
+    recursive: If true, performs a recursive copy.
+    """
+    assert type(sources) is tuple or type(sources) is list
+    self._AssertIsStarted()
+    if for_package:
+      sources = map(_MapIsolatedPathsForPackage(for_package, 0, for_realms),
+                    sources)
+    logging.debug('copy remote:%s => local:%s' % (sources, dest))
+    return self.GetCommandRunner().RunScp(sources, dest,
+                                          remote_cmd.COPY_FROM_TARGET,
+                                          recursive)
+
+  def _GetEndpoint(self):
+    """Returns a (host, port) tuple for the SSH connection to the target."""
+    raise NotImplementedError
+
+  def _GetTargetSdkArch(self):
+    """Returns the Fuchsia SDK architecture name for the target CPU."""
+    if self._target_cpu == 'arm64' or self._target_cpu == 'x64':
+      return self._target_cpu
+    raise FuchsiaTargetException('Unknown target_cpu:' + self._target_cpu)
+
+  def _AssertIsStarted(self):
+    assert self.IsStarted()
+
+  def _WaitUntilReady(self):
+    logging.info('Connecting to Fuchsia using SSH.')
+
+    host, port = self._GetEndpoint()
+    end_time = time.time() + _ATTACH_RETRY_SECONDS
+    ssh_diagnostic_log = runner_logs.FileStreamFor('ssh_diagnostic_log')
+    while time.time() < end_time:
+      runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
+      ssh_proc = runner.RunCommandPiped(['true'],
+                                        ssh_args=['-v'],
+                                        stdout=ssh_diagnostic_log,
+                                        stderr=subprocess.STDOUT)
+      if ssh_proc.wait() == 0:
+        logging.info('Connected!')
+        self._started = True
+        return True
+      time.sleep(_ATTACH_RETRY_INTERVAL)
+
+    logging.error('Timeout limit reached.')
+
+    raise FuchsiaTargetException('Couldn\'t connect using SSH.')
+
+  def _GetSshConfigPath(self, path):
+    raise NotImplementedError
+
+  def GetAmberRepo(self):
+    """Returns an AmberRepo instance which serves packages for this Target.
+    Callers should typically call GetAmberRepo() in a |with| statement, and
+    install and execute commands inside the |with| block, so that the returned
+    AmberRepo can teardown correctly, if necessary.
+    """
+    pass
+
+  def InstallPackage(self, package_paths):
+    """Installs a package and it's dependencies on the device. If the package is
+    already installed then it will be updated to the new version.
+
+    package_paths: Paths to the .far files to install."""
+
+    with self.GetAmberRepo() as amber_repo:
+      # Publish all packages to the serving TUF repository under |tuf_root|.
+      for package_path in package_paths:
+        amber_repo.PublishPackage(package_path)
+
+      # Resolve all packages, to have them pulled into the device/VM cache.
+      for package_path in package_paths:
+        package_name, package_version = _GetPackageInfo(package_path)
+        logging.info('Resolving %s into cache.' % (package_name))
+        return_code = self.RunCommand(
+            ['pkgctl', 'resolve',
+             _GetPackageUri(package_name), '>/dev/null'],
+            timeout_secs=_INSTALL_TIMEOUT_SECS)
+        if return_code != 0:
+          raise Exception('Error while resolving %s.' % package_name)
+
+      # Verify that the newly resolved versions of packages are reported.
+      for package_path in package_paths:
+        # Use pkgctl get-hash to determine which version will be resolved.
+        package_name, package_version = _GetPackageInfo(package_path)
+        pkgctl = self.RunCommandPiped(
+            ['pkgctl', 'get-hash',
+             _GetPackageUri(package_name)],
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE)
+        pkgctl_out, pkgctl_err = pkgctl.communicate()
+
+        # Read the expected version from the meta.far Merkel hash file alongside
+        # the package's FAR.
+        meta_far_path = os.path.join(os.path.dirname(package_path), 'meta.far')
+        meta_far_merkel = subprocess.check_output(
+            [common.GetHostToolPathFromPlatform('merkleroot'),
+             meta_far_path]).split()[0]
+        if pkgctl_out != meta_far_merkel:
+          raise Exception('Hash mismatch for %s after resolve (%s vs %s).' %
+                          (package_name, pkgctl_out, meta_far_merkel))
diff --git a/src/build/fuchsia/test_runner.py b/src/build/fuchsia/test_runner.py
new file mode 100755
index 0000000..afecddd
--- /dev/null
+++ b/src/build/fuchsia/test_runner.py
@@ -0,0 +1,257 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Deploys and runs a test package on a Fuchsia target."""
+
+import argparse
+import os
+import runner_logs
+import sys
+
+from common_args import AddCommonArgs, AddTargetSpecificArgs, \
+                        ConfigureLogging, GetDeploymentTargetForArgs
+from net_test_server import SetupTestServer
+from run_test_package import RunTestPackage, RunTestPackageArgs, SystemLogReader
+from runner_exceptions import HandleExceptionAndReturnExitCode
+from runner_logs import RunnerLogManager
+from symbolizer import BuildIdsPaths
+
+DEFAULT_TEST_SERVER_CONCURRENCY = 4
+
+TEST_DATA_DIR = '/tmp'
+TEST_FILTER_PATH = TEST_DATA_DIR + '/test_filter.txt'
+TEST_LLVM_PROFILE_PATH = TEST_DATA_DIR + '/llvm-profile'
+TEST_PERF_RESULT_PATH = TEST_DATA_DIR + '/test_perf_summary.json'
+TEST_RESULT_PATH = TEST_DATA_DIR + '/test_summary.json'
+
+TEST_REALM_NAME = 'chromium_tests'
+
+
+def AddTestExecutionArgs(arg_parser):
+  test_args = arg_parser.add_argument_group('testing',
+                                            'Test execution arguments')
+  test_args.add_argument('--gtest_filter',
+                         help='GTest filter to use in place of any default.')
+  test_args.add_argument(
+      '--gtest_repeat',
+      help='GTest repeat value to use. This also disables the '
+      'test launcher timeout.')
+  test_args.add_argument(
+      '--test-launcher-retry-limit',
+      help='Number of times that test suite will retry failing '
+      'tests. This is multiplicative with --gtest_repeat.')
+  test_args.add_argument('--test-launcher-shard-index',
+                         type=int,
+                         default=os.environ.get('GTEST_SHARD_INDEX'),
+                         help='Index of this instance amongst swarming shards.')
+  test_args.add_argument('--test-launcher-total-shards',
+                         type=int,
+                         default=os.environ.get('GTEST_TOTAL_SHARDS'),
+                         help='Total number of swarming shards of this suite.')
+  test_args.add_argument('--gtest_break_on_failure',
+                         action='store_true',
+                         default=False,
+                         help='Should GTest break on failure; useful with '
+                         '--gtest_repeat.')
+  test_args.add_argument('--single-process-tests',
+                         action='store_true',
+                         default=False,
+                         help='Runs the tests and the launcher in the same '
+                         'process. Useful for debugging.')
+  test_args.add_argument('--test-launcher-batch-limit',
+                         type=int,
+                         help='Sets the limit of test batch to run in a single '
+                         'process.')
+  # --test-launcher-filter-file is specified relative to --out-dir,
+  # so specifying type=os.path.* will break it.
+  test_args.add_argument(
+      '--test-launcher-filter-file',
+      default=None,
+      help='Override default filter file passed to target test '
+      'process. Set an empty path to disable filtering.')
+  test_args.add_argument('--test-launcher-jobs',
+                         type=int,
+                         help='Sets the number of parallel test jobs.')
+  test_args.add_argument('--test-launcher-summary-output',
+                         help='Where the test launcher will output its json.')
+  test_args.add_argument('--enable-test-server',
+                         action='store_true',
+                         default=False,
+                         help='Enable Chrome test server spawner.')
+  test_args.add_argument(
+      '--test-launcher-bot-mode',
+      action='store_true',
+      default=False,
+      help='Informs the TestLauncher to that it should enable '
+      'special allowances for running on a test bot.')
+  test_args.add_argument('--isolated-script-test-output',
+                         help='If present, store test results on this path.')
+  test_args.add_argument(
+      '--isolated-script-test-perf-output',
+      help='If present, store chartjson results on this path.')
+  test_args.add_argument('--use-run-test-component',
+                         default=False,
+                         action='store_true',
+                         help='Run the test package hermetically using '
+                         'run-test-component, rather than run.')
+  test_args.add_argument(
+      '--code-coverage',
+      default=False,
+      action='store_true',
+      help='Gather code coverage information and place it in '
+      'the output directory.')
+  test_args.add_argument('--code-coverage-dir',
+                         default=os.getcwd(),
+                         help='Directory to place code coverage information. '
+                         'Only relevant when --code-coverage set to true. '
+                         'Defaults to current directory.')
+  test_args.add_argument('--child-arg',
+                         action='append',
+                         help='Arguments for the test process.')
+  test_args.add_argument('child_args',
+                         nargs='*',
+                         help='Arguments for the test process.')
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  AddTestExecutionArgs(parser)
+  AddCommonArgs(parser)
+  AddTargetSpecificArgs(parser)
+  args = parser.parse_args()
+
+  # Flag out_dir is required for tests launched with this script.
+  if not args.out_dir:
+    raise ValueError("out-dir must be specified.")
+
+  # Code coverage uses runtests, which calls run_test_component.
+  if args.code_coverage:
+    args.use_run_test_component = True
+
+  ConfigureLogging(args)
+
+  child_args = []
+  if args.test_launcher_shard_index != None:
+    child_args.append(
+        '--test-launcher-shard-index=%d' % args.test_launcher_shard_index)
+  if args.test_launcher_total_shards != None:
+    child_args.append(
+        '--test-launcher-total-shards=%d' % args.test_launcher_total_shards)
+  if args.single_process_tests:
+    child_args.append('--single-process-tests')
+  if args.test_launcher_bot_mode:
+    child_args.append('--test-launcher-bot-mode')
+  if args.test_launcher_batch_limit:
+    child_args.append('--test-launcher-batch-limit=%d' %
+                       args.test_launcher_batch_limit)
+
+  # Only set --test-launcher-jobs if the caller specifies it, in general.
+  # If the caller enables the test-server then we need to launch the right
+  # number of instances to match the maximum number of parallel test jobs, so
+  # in that case we set --test-launcher-jobs based on the number of CPU cores
+  # specified for the emulator to use.
+  test_concurrency = None
+  if args.test_launcher_jobs:
+    test_concurrency = args.test_launcher_jobs
+  elif args.enable_test_server:
+    if args.device == 'device':
+      test_concurrency = DEFAULT_TEST_SERVER_CONCURRENCY
+    else:
+      test_concurrency = args.cpu_cores
+  if test_concurrency:
+    child_args.append('--test-launcher-jobs=%d' % test_concurrency)
+
+  if args.gtest_filter:
+    child_args.append('--gtest_filter=' + args.gtest_filter)
+  if args.gtest_repeat:
+    child_args.append('--gtest_repeat=' + args.gtest_repeat)
+    child_args.append('--test-launcher-timeout=-1')
+  if args.test_launcher_retry_limit:
+    child_args.append(
+        '--test-launcher-retry-limit=' + args.test_launcher_retry_limit)
+  if args.gtest_break_on_failure:
+    child_args.append('--gtest_break_on_failure')
+  if args.test_launcher_summary_output:
+    child_args.append('--test-launcher-summary-output=' + TEST_RESULT_PATH)
+  if args.isolated_script_test_output:
+    child_args.append('--isolated-script-test-output=' + TEST_RESULT_PATH)
+  if args.isolated_script_test_perf_output:
+    child_args.append('--isolated-script-test-perf-output=' +
+                      TEST_PERF_RESULT_PATH)
+
+  if args.child_arg:
+    child_args.extend(args.child_arg)
+  if args.child_args:
+    child_args.extend(args.child_args)
+
+  test_realms = []
+  if args.use_run_test_component:
+    test_realms = [TEST_REALM_NAME]
+
+  try:
+    with GetDeploymentTargetForArgs(args) as target, \
+         SystemLogReader() as system_logger, \
+         RunnerLogManager(args.runner_logs_dir, BuildIdsPaths(args.package)):
+      target.Start()
+
+      if args.system_log_file and args.system_log_file != '-':
+        system_logger.Start(target, args.package, args.system_log_file)
+
+      if args.test_launcher_filter_file:
+        target.PutFile(args.test_launcher_filter_file,
+                       TEST_FILTER_PATH,
+                       for_package=args.package_name,
+                       for_realms=test_realms)
+        child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH)
+
+      test_server = None
+      if args.enable_test_server:
+        assert test_concurrency
+        test_server = SetupTestServer(target, test_concurrency,
+                                      args.package_name, test_realms)
+
+      run_package_args = RunTestPackageArgs.FromCommonArgs(args)
+      if args.use_run_test_component:
+        run_package_args.test_realm_label = TEST_REALM_NAME
+        run_package_args.use_run_test_component = True
+      returncode = RunTestPackage(args.out_dir, target, args.package,
+                                  args.package_name, child_args,
+                                  run_package_args)
+
+      if test_server:
+        test_server.Stop()
+
+      if args.code_coverage:
+        # Copy all the files in the profile directory. /* is used instead
+        # of recursively copying due to permission issues for the latter.
+        target.GetFile(TEST_LLVM_PROFILE_PATH + '/*', args.code_coverage_dir)
+
+      if args.test_launcher_summary_output:
+        target.GetFile(TEST_RESULT_PATH,
+                       args.test_launcher_summary_output,
+                       for_package=args.package_name,
+                       for_realms=test_realms)
+
+      if args.isolated_script_test_output:
+        target.GetFile(TEST_RESULT_PATH,
+                       args.isolated_script_test_output,
+                       for_package=args.package_name,
+                       for_realms=test_realms)
+
+      if args.isolated_script_test_perf_output:
+        target.GetFile(TEST_PERF_RESULT_PATH,
+                       args.isolated_script_test_perf_output,
+                       for_package=args.package_name,
+                       for_realms=test_realms)
+
+      return returncode
+
+  except:
+    return HandleExceptionAndReturnExitCode()
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fuchsia/update_images.py b/src/build/fuchsia/update_images.py
new file mode 100755
index 0000000..79b8e49
--- /dev/null
+++ b/src/build/fuchsia/update_images.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os'
+entry so that it only runs when .gclient's target_os includes 'fuchsia'."""
+
+import argparse
+import itertools
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+
+from common import GetHostOsFromPlatform, GetHostArchFromPlatform, \
+                   DIR_SOURCE_ROOT, IMAGES_ROOT
+from update_sdk import DownloadAndUnpackFromCloudStorage, \
+                       GetOverrideCloudStorageBucket, GetSdkHash, \
+                       MakeCleanDirectory, SDK_SIGNATURE_FILE
+
+
+def GetSdkSignature(sdk_hash, boot_images):
+  return 'gn:{sdk_hash}:{boot_images}:'.format(sdk_hash=sdk_hash,
+                                               boot_images=boot_images)
+
+
+def GetAllImages(boot_image_names):
+  if not boot_image_names:
+    return
+
+  all_device_types = ['generic', 'qemu']
+  all_archs = ['x64', 'arm64']
+
+  images_to_download = set()
+
+  for boot_image in boot_image_names.split(','):
+    components = boot_image.split('.')
+    if len(components) != 2:
+      continue
+
+    device_type, arch = components
+    device_images = all_device_types if device_type == '*' else [device_type]
+    arch_images = all_archs if arch == '*' else [arch]
+    images_to_download.update(itertools.product(device_images, arch_images))
+  return images_to_download
+
+
+def DownloadSdkBootImages(bucket, sdk_hash, boot_image_names, image_root_dir):
+  images_to_download = GetAllImages(boot_image_names)
+  for image_to_download in images_to_download:
+    device_type = image_to_download[0]
+    arch = image_to_download[1]
+    image_output_dir = os.path.join(image_root_dir, arch, device_type)
+    if os.path.exists(image_output_dir):
+      continue
+
+    logging.info('Downloading Fuchsia boot images for %s.%s...' %
+                 (device_type, arch))
+    if bucket == 'fuchsia-sdk':
+      images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\
+          '{device_type}.{arch}.tgz'.format(
+              bucket=bucket, sdk_hash=sdk_hash,
+              device_type=device_type, arch=arch)
+    else:
+      images_tarball_url = 'gs://{bucket}/development/{sdk_hash}/images/'\
+          '{device_type}-{arch}.tgz'.format(
+              bucket=bucket, sdk_hash=sdk_hash,
+              device_type=device_type, arch=arch)
+    DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir)
+
+
+def GetNewSignature(sdk_hash, boot_images):
+  return GetSdkSignature(sdk_hash, boot_images)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--verbose',
+                      '-v',
+                      action='store_true',
+                      help='Enable debug-level logging.')
+  parser.add_argument(
+      '--boot-images',
+      type=str,
+      required=True,
+      help='List of boot images to download, represented as a comma separated '
+      'list. Wildcards are allowed. ')
+  parser.add_argument(
+      '--default-bucket',
+      type=str,
+      default='fuchsia',
+      help='The Google Cloud Storage bucket in which the Fuchsia images are '
+      'stored. Entry in sdk-bucket.txt will override this flag.')
+  parser.add_argument(
+      '--image-root-dir',
+      default=IMAGES_ROOT,
+      help='Specify the root directory of the downloaded images. Optional')
+  args = parser.parse_args()
+
+  logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
+
+  # If no boot images need to be downloaded, exit.
+  if not args.boot_images:
+    return 0
+
+  # Check whether there's SDK support for this platform.
+  GetHostOsFromPlatform()
+
+  # Use the bucket in sdk-bucket.txt if an entry exists.
+  # Otherwise use the default bucket.
+  bucket = GetOverrideCloudStorageBucket() or args.default_bucket
+
+  sdk_hash = GetSdkHash(bucket)
+  if not sdk_hash:
+    return 1
+
+  signature_filename = os.path.join(args.image_root_dir, SDK_SIGNATURE_FILE)
+  current_signature = (open(signature_filename, 'r').read().strip()
+                       if os.path.exists(signature_filename) else '')
+  new_signature = GetNewSignature(sdk_hash, args.boot_images)
+  if current_signature != new_signature:
+    logging.info('Downloading Fuchsia images %s...' % sdk_hash)
+    MakeCleanDirectory(args.image_root_dir)
+
+    try:
+      DownloadSdkBootImages(bucket, sdk_hash, args.boot_images,
+                            args.image_root_dir)
+      with open(signature_filename, 'w') as f:
+        f.write(new_signature)
+
+    except subprocess.CalledProcessError as e:
+      logging.error(("command '%s' failed with status %d.%s"), " ".join(e.cmd),
+                    e.returncode, " Details: " + e.output if e.output else "")
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/fuchsia/update_sdk.py b/src/build/fuchsia/update_sdk.py
new file mode 100755
index 0000000..086fac8
--- /dev/null
+++ b/src/build/fuchsia/update_sdk.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os'
+entry so that it only runs when .gclient's target_os includes 'fuchsia'."""
+
+import argparse
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+
+from common import GetHostOsFromPlatform, GetHostArchFromPlatform, \
+                   DIR_SOURCE_ROOT, SDK_ROOT
+
+sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build'))
+import find_depot_tools
+
+SDK_SIGNATURE_FILE = '.hash'
+SDK_TARBALL_PATH_TEMPLATE = (
+    'gs://{bucket}/development/{sdk_hash}/sdk/{platform}-amd64/gn.tar.gz')
+
+
+def ReadFile(filename):
+  with open(os.path.join(os.path.dirname(__file__), filename), 'r') as f:
+    return f.read()
+
+
+# TODO(crbug.com/1138433): Investigate whether we can deprecate
+# use of sdk_bucket.txt.
+def GetOverrideCloudStorageBucket():
+  """Read bucket entry from sdk_bucket.txt"""
+  return ReadFile('sdk-bucket.txt').strip()
+
+
+def GetSdkHash(bucket):
+  hashes = GetSdkHashList()
+  return max(hashes, key=lambda sdk:GetSdkGeneration(bucket, sdk)) if hashes else None
+
+
+def GetSdkHashList():
+  """Read filename entries from sdk-hash-files.list (one per line), substitute
+  {platform} in each entry if present, and read from each filename."""
+  platform = GetHostOsFromPlatform()
+  filenames = [
+      line.strip() for line in ReadFile('sdk-hash-files.list').replace(
+          '{platform}', platform).splitlines()
+  ]
+  sdk_hashes = [ReadFile(filename).strip() for filename in filenames]
+  return sdk_hashes
+
+
+def GetSdkGeneration(bucket, hash):
+  if not hash:
+    return None
+
+  sdk_path = GetSdkTarballPath(bucket, hash)
+  cmd = [
+      os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'ls', '-L',
+      sdk_path
+  ]
+  logging.debug("Running '%s'", " ".join(cmd))
+  sdk_details = subprocess.check_output(cmd)
+  m = re.search('Generation:\s*(\d*)', sdk_details)
+  if not m:
+    raise RuntimeError('Could not find SDK generation for {sdk_path}'.format(
+        sdk_path=sdk_path))
+  return int(m.group(1))
+
+
+def GetSdkTarballPath(bucket, sdk_hash):
+  return SDK_TARBALL_PATH_TEMPLATE.format(
+      bucket=bucket, sdk_hash=sdk_hash, platform=GetHostOsFromPlatform())
+
+
+# Updates the modification timestamps of |path| and its contents to the
+# current time.
+def UpdateTimestampsRecursive():
+  for root, dirs, files in os.walk(SDK_ROOT):
+    for f in files:
+      os.utime(os.path.join(root, f), None)
+    for d in dirs:
+      os.utime(os.path.join(root, d), None)
+
+
+# Fetches a tarball from GCS and uncompresses it to |output_dir|.
+def DownloadAndUnpackFromCloudStorage(url, output_dir):
+  # Pass the compressed stream directly to 'tarfile'; don't bother writing it
+  # to disk first.
+  cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
+         'cp', url, '-']
+  logging.debug('Running "%s"', ' '.join(cmd))
+  task = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+  try:
+    tarfile.open(mode='r|gz', fileobj=task.stdout).extractall(path=output_dir)
+  except tarfile.ReadError:
+    task.wait()
+    stderr = task.stderr.read()
+    raise subprocess.CalledProcessError(task.returncode, cmd,
+      "Failed to read a tarfile from gsutil.py.{}".format(
+        stderr if stderr else ""))
+  task.wait()
+  if task.returncode:
+    raise subprocess.CalledProcessError(task.returncode, cmd,
+                                        task.stderr.read())
+
+
+def MakeCleanDirectory(directory_name):
+  if (os.path.exists(directory_name)):
+    shutil.rmtree(directory_name)
+  os.mkdir(directory_name)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--verbose', '-v',
+    action='store_true',
+    help='Enable debug-level logging.')
+  parser.add_argument(
+      '--default-bucket',
+      type=str,
+      default='fuchsia',
+      help='The Google Cloud Storage bucket in which the Fuchsia SDK is '
+      'stored. Entry in sdk-bucket.txt will override this flag.')
+  args = parser.parse_args()
+
+  logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
+
+  # Quietly exit if there's no SDK support for this platform.
+  try:
+    GetHostOsFromPlatform()
+  except:
+    return 0
+
+  # Use the bucket in sdk-bucket.txt if an entry exists.
+  # Otherwise use the default bucket.
+  bucket = GetOverrideCloudStorageBucket() or args.default_bucket
+
+  sdk_hash = GetSdkHash(bucket)
+  if not sdk_hash:
+    return 1
+
+  signature_filename = os.path.join(SDK_ROOT, SDK_SIGNATURE_FILE)
+  current_signature = (open(signature_filename, 'r').read().strip()
+                       if os.path.exists(signature_filename) else '')
+  if current_signature != sdk_hash:
+    logging.info('Downloading GN SDK %s...' % sdk_hash)
+
+    MakeCleanDirectory(SDK_ROOT)
+    DownloadAndUnpackFromCloudStorage(GetSdkTarballPath(bucket, sdk_hash),
+                                      SDK_ROOT)
+
+  with open(signature_filename, 'w') as f:
+    f.write(sdk_hash)
+
+  UpdateTimestampsRecursive()
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/gdb-add-index b/src/build/gdb-add-index
new file mode 100755
index 0000000..73367c8
--- /dev/null
+++ b/src/build/gdb-add-index
@@ -0,0 +1,184 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+function usage_exit {
+  echo "Usage: $0 [-f] [-r] [-n] <paths-to-binaries>..."
+  echo "  -f forces replacement of an existing index."
+  echo "  -r removes the index section."
+  echo "  -n don't extract the dependencies of each binary with lld."
+  echo "       e.g., $0 -n out/Debug/lib.unstripped/lib*"
+  echo
+  echo "  Set TOOLCHAIN_PREFIX to use a non-default set of binutils."
+  exit 1
+}
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+  trap "" EXIT USR1  # Avoid reentrancy.
+
+  local jobs=$(jobs -p)
+  if [ -n "$jobs" ]; then
+    echo -n "Killing outstanding index jobs..."
+    kill -KILL $(jobs -p)
+    wait
+    echo "done"
+  fi
+
+  if [ -d "$directory" ]; then
+    echo -n "Removing temp directory $directory..."
+    rm -rf "$directory"
+    echo done
+  fi
+}
+
+# Add index to one binary.
+function index_one_file {
+  local file=$1
+  local basename=$(basename "$file")
+  local should_index_this_file="${should_index}"
+
+  local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    if $remove_index; then
+      ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
+      echo "Removed index from $basename."
+    else
+      echo "Skipped $basename -- already contains index."
+      should_index_this_file=false
+    fi
+  fi
+
+  if $should_index_this_file; then
+    local start=$(date +"%s%N")
+    echo "Adding index to $basename..."
+
+    ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \
+      -ex "quit"
+    local index_file="$directory/$basename.gdb-index"
+    if [ -f "$index_file" ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
+        --set-section-flags .gdb_index=readonly "$file" "$file"
+      local finish=$(date +"%s%N")
+      local elapsed=$(((finish - start) / 1000000))
+      echo "   ...$basename indexed. [${elapsed}ms]"
+    else
+      echo "   ...$basename unindexable."
+    fi
+  fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+  # Start a background subshell to run the index command.
+  {
+    index_one_file $1
+    kill -SIGUSR1 $$  # $$ resolves to the parent script.
+    exit 129  # See comment above wait loop at bottom.
+  } &
+}
+
+cur_file_num=0
+function index_next {
+  if ((cur_file_num >= ${#files_to_index[@]})); then
+    return
+  fi
+
+  async_index "${files_to_index[cur_file_num]}"
+  ((cur_file_num += 1)) || true
+}
+
+########
+### Main body of the script.
+
+remove_index=false
+should_index=true
+should_index_deps=true
+files_to_index=()
+while (($# > 0)); do
+  case "$1" in
+    -h)
+      usage_exit
+      ;;
+    -f)
+      remove_index=true
+      ;;
+    -r)
+      remove_index=true
+      should_index=false
+      ;;
+    -n)
+      should_index_deps=false
+      ;;
+    -*)
+      echo "Invalid option: $1" >&2
+      usage_exit
+      ;;
+    *)
+      if [[ ! -f "$1" ]]; then
+        echo "Path $1 does not exist."
+        exit 1
+      fi
+      files_to_index+=("$1")
+      ;;
+  esac
+  shift
+done
+
+if ((${#files_to_index[@]} == 0)); then
+  usage_exit
+fi
+
+dependencies=()
+if $should_index_deps; then
+  for file in "${files_to_index[@]}"; do
+      # Append the shared library dependencies of this file that
+      # have the same dirname. The dirname is a signal that these
+      # shared libraries were part of the same build as the binary.
+      dependencies+=( \
+        $(ldd "$file" 2>/dev/null \
+          | grep $(dirname "$file") \
+          | sed "s/.*[ \t]\(.*\) (.*/\1/") \
+      )
+  done
+fi
+files_to_index+=("${dependencies[@]}")
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT INT
+
+# We're good to go! Create temp directory for index files.
+directory=$(mktemp -d)
+echo "Made temp directory $directory."
+
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+index_tasks=${INDEX_TASKS:-4}
+for ((i = 0; i < index_tasks; i++)); do
+  index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (($? > 128)); do
+  wait
+done
diff --git a/src/build/get_landmines.py b/src/build/get_landmines.py
new file mode 100755
index 0000000..a32ab99
--- /dev/null
+++ b/src/build/get_landmines.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+from __future__ import print_function
+
+import sys
+
+import landmine_utils
+
+
+host_os = landmine_utils.host_os
+
+
+def print_landmines():
+  """
+  ALL LANDMINES ARE EMITTED FROM HERE.
+  """
+  # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
+  # bandaid fix if a CL that got landed has a build dependency bug and all bots
+  # need to be cleaned up. If you're writing a new CL that causes build
+  # dependency problems, fix the dependency problems instead of adding a
+  # landmine.
+  #
+  # Before adding or changing a landmine consider the consequences of doing so.
+  # Doing so will wipe out every output directory on every Chrome developer's
+  # machine. This can be particularly problematic on Windows where the directory
+  # deletion may well fail (locked files, command prompt in the directory,
+  # etc.), and generated .sln and .vcxproj files will be deleted.
+  #
+  # This output directory deletion will be repeated when going back and forth
+  # across the change that added the landmine, adding to the cost. There are
+  # usually less troublesome alternatives.
+
+  if host_os() == 'win':
+    print('Compile on cc_unittests fails due to symbols removed in r185063.')
+  if host_os() == 'linux':
+    print('Builders switching from make to ninja will clobber on this.')
+  if host_os() == 'mac':
+    print('Switching from bundle to unbundled dylib (issue 14743002).')
+  if host_os() in ('win', 'mac'):
+    print('Improper dependency for create_nmf.py broke in r240802, '
+          'fixed in r240860.')
+  if host_os() == 'win':
+    print('Switch to VS2015 Update 3, 14393 SDK')
+  print('Need to clobber everything due to an IDL change in r154579 (blink)')
+  print('Need to clobber everything due to gen file moves in r175513 (Blink)')
+  print('Clobber to get rid of obselete test plugin after r248358')
+  print('Clobber to rebuild GN files for V8')
+  print('Clobber to get rid of stale generated mojom.h files')
+  print('Need to clobber everything due to build_nexe change in nacl r13424')
+  print(
+      '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...')
+  print('blink_resources.grd changed: crbug.com/400860')
+  print('ninja dependency cycle: crbug.com/408192')
+  print('Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).')
+  print('Another clobber for missing NaCl gyp deps (crbug.com/427427).')
+  print(
+      'Clobber to fix GN not picking up increased ID range (crbug.com/444902)')
+  print('Remove NaCl toolchains from the output dir (crbug.com/456902)')
+  if host_os() == 'win':
+    print('Clobber to delete stale generated files (crbug.com/510086)')
+  if host_os() == 'mac':
+    print('Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)')
+  if host_os() == 'mac':
+    print('Clobber to remove libsystem.dylib. See crbug.com/620075')
+  if host_os() == 'mac':
+    print('Clobber to get past mojo gen build error (crbug.com/679607)')
+  if host_os() == 'win':
+    print('Clobber Windows to fix strange PCH-not-rebuilt errors.')
+  print('CLobber all to fix GN breakage (crbug.com/736215)')
+  print('The Great Blink mv for source files (crbug.com/768828)')
+  if host_os() == 'linux':
+    print('Clobber to workaround buggy .ninja_deps cycle (crbug.com/934404)')
+
+
+def main():
+  print_landmines()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/get_symlink_targets.py b/src/build/get_symlink_targets.py
new file mode 100755
index 0000000..3285ff1
--- /dev/null
+++ b/src/build/get_symlink_targets.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# Copyright (c) 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints the target paths of the given symlinks.
+
+Prints out each target in the order that the links were passed in.
+"""
+
+import os
+import sys
+
+
+def main():
+  for link_name in sys.argv[1:]:
+    if not os.path.islink(link_name):
+      sys.stderr.write("%s is not a link" % link_name)
+      return 1
+    target = os.readlink(link_name)
+    if not os.path.isabs(target):
+      target = os.path.join(os.path.dirname(link_name), target)
+    print(os.path.realpath(target))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/gn_helpers.py b/src/build/gn_helpers.py
new file mode 100644
index 0000000..c6e4129
--- /dev/null
+++ b/src/build/gn_helpers.py
@@ -0,0 +1,542 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that integrate with GN.
+
+The main functions are ToGNString() and FromGNString(), to convert between
+serialized GN veriables and Python variables.
+
+To use in an arbitrary Python file in the build:
+
+  import os
+  import sys
+
+  sys.path.append(os.path.join(os.path.dirname(__file__),
+                               os.pardir, os.pardir, 'build'))
+  import gn_helpers
+
+Where the sequence of parameters to join is the relative path from your source
+file to the build directory.
+"""
+
+import json
+import os
+import re
+import sys
+
+
+_CHROMIUM_ROOT = os.path.join(os.path.dirname(__file__), os.pardir)
+
+BUILD_VARS_FILENAME = 'build_vars.json'
+IMPORT_RE = re.compile(r'^import\("//(\S+)"\)')
+
+
+class GNError(Exception):
+  pass
+
+
+# Computes ASCII code of an element of encoded Python 2 str / Python 3 bytes.
+_Ord = ord if sys.version_info.major < 3 else lambda c: c
+
+
+def _TranslateToGnChars(s):
+  for decoded_ch in s.encode('utf-8'):  # str in Python 2, bytes in Python 3.
+    code = _Ord(decoded_ch)  # int
+    if code in (34, 36, 92):  # For '"', '$', or '\\'.
+      yield '\\' + chr(code)
+    elif 32 <= code < 127:
+      yield chr(code)
+    else:
+      yield '$0x%02X' % code
+
+
+def ToGNString(value, pretty=False):
+  """Returns a stringified GN equivalent of a Python value.
+
+  Args:
+    value: The Python value to convert.
+    pretty: Whether to pretty print. If true, then non-empty lists are rendered
+        recursively with one item per line, with indents. Otherwise lists are
+        rendered without new line.
+  Returns:
+    The stringified GN equivalent to |value|.
+
+  Raises:
+    GNError: |value| cannot be printed to GN.
+  """
+
+  if sys.version_info.major < 3:
+    basestring_compat = basestring
+  else:
+    basestring_compat = str
+
+  # Emits all output tokens without intervening whitespaces.
+  def GenerateTokens(v, level):
+    if isinstance(v, basestring_compat):
+      yield '"' + ''.join(_TranslateToGnChars(v)) + '"'
+
+    elif isinstance(v, bool):
+      yield 'true' if v else 'false'
+
+    elif isinstance(v, int):
+      yield str(v)
+
+    elif isinstance(v, list):
+      yield '['
+      for i, item in enumerate(v):
+        if i > 0:
+          yield ','
+        for tok in GenerateTokens(item, level + 1):
+          yield tok
+      yield ']'
+
+    elif isinstance(v, dict):
+      if level > 0:
+        yield '{'
+      for key in sorted(v):
+        if not isinstance(key, basestring_compat):
+          raise GNError('Dictionary key is not a string.')
+        if not key or key[0].isdigit() or not key.replace('_', '').isalnum():
+          raise GNError('Dictionary key is not a valid GN identifier.')
+        yield key  # No quotations.
+        yield '='
+        for tok in GenerateTokens(v[key], level + 1):
+          yield tok
+      if level > 0:
+        yield '}'
+
+    else:  # Not supporting float: Add only when needed.
+      raise GNError('Unsupported type when printing to GN.')
+
+  can_start = lambda tok: tok and tok not in ',}]='
+  can_end = lambda tok: tok and tok not in ',{[='
+
+  # Adds whitespaces, trying to keep everything (except dicts) in 1 line.
+  def PlainGlue(gen):
+    prev_tok = None
+    for i, tok in enumerate(gen):
+      if i > 0:
+        if can_end(prev_tok) and can_start(tok):
+          yield '\n'  # New dict item.
+        elif prev_tok == '[' and tok == ']':
+          yield '  '  # Special case for [].
+        elif tok != ',':
+          yield ' '
+      yield tok
+      prev_tok = tok
+
+  # Adds whitespaces so non-empty lists can span multiple lines, with indent.
+  def PrettyGlue(gen):
+    prev_tok = None
+    level = 0
+    for i, tok in enumerate(gen):
+      if i > 0:
+        if can_end(prev_tok) and can_start(tok):
+          yield '\n' + '  ' * level  # New dict item.
+        elif tok == '=' or prev_tok in '=':
+          yield ' '  # Separator before and after '=', on same line.
+      if tok in ']}':
+        level -= 1
+      # Exclude '[]' and '{}' cases.
+      if int(prev_tok == '[') + int(tok == ']') == 1 or \
+         int(prev_tok == '{') + int(tok == '}') == 1:
+        yield '\n' + '  ' * level
+      yield tok
+      if tok in '[{':
+        level += 1
+      if tok == ',':
+        yield '\n' + '  ' * level
+      prev_tok = tok
+
+  token_gen = GenerateTokens(value, 0)
+  ret = ''.join((PrettyGlue if pretty else PlainGlue)(token_gen))
+  # Add terminating '\n' for dict |value| or multi-line output.
+  if isinstance(value, dict) or '\n' in ret:
+    return ret + '\n'
+  return ret
+
+
+def FromGNString(input_string):
+  """Converts the input string from a GN serialized value to Python values.
+
+  For details on supported types see GNValueParser.Parse() below.
+
+  If your GN script did:
+    something = [ "file1", "file2" ]
+    args = [ "--values=$something" ]
+  The command line would look something like:
+    --values="[ \"file1\", \"file2\" ]"
+  Which when interpreted as a command line gives the value:
+    [ "file1", "file2" ]
+
+  You can parse this into a Python list using GN rules with:
+    input_values = FromGNValues(options.values)
+  Although the Python 'ast' module will parse many forms of such input, it
+  will not handle GN escaping properly, nor GN booleans. You should use this
+  function instead.
+
+
+  A NOTE ON STRING HANDLING:
+
+  If you just pass a string on the command line to your Python script, or use
+  string interpolation on a string variable, the strings will not be quoted:
+    str = "asdf"
+    args = [ str, "--value=$str" ]
+  Will yield the command line:
+    asdf --value=asdf
+  The unquoted asdf string will not be valid input to this function, which
+  accepts only quoted strings like GN scripts. In such cases, you can just use
+  the Python string literal directly.
+
+  The main use cases for this is for other types, in particular lists. When
+  using string interpolation on a list (as in the top example) the embedded
+  strings will be quoted and escaped according to GN rules so the list can be
+  re-parsed to get the same result.
+  """
+  parser = GNValueParser(input_string)
+  return parser.Parse()
+
+
+def FromGNArgs(input_string):
+  """Converts a string with a bunch of gn arg assignments into a Python dict.
+
+  Given a whitespace-separated list of
+
+    <ident> = (integer | string | boolean | <list of the former>)
+
+  gn assignments, this returns a Python dict, i.e.:
+
+    FromGNArgs('foo=true\nbar=1\n') -> { 'foo': True, 'bar': 1 }.
+
+  Only simple types and lists supported; variables, structs, calls
+  and other, more complicated things are not.
+
+  This routine is meant to handle only the simple sorts of values that
+  arise in parsing --args.
+  """
+  parser = GNValueParser(input_string)
+  return parser.ParseArgs()
+
+
+def UnescapeGNString(value):
+  """Given a string with GN escaping, returns the unescaped string.
+
+  Be careful not to feed with input from a Python parsing function like
+  'ast' because it will do Python unescaping, which will be incorrect when
+  fed into the GN unescaper.
+
+  Args:
+    value: Input string to unescape.
+  """
+  result = ''
+  i = 0
+  while i < len(value):
+    if value[i] == '\\':
+      if i < len(value) - 1:
+        next_char = value[i + 1]
+        if next_char in ('$', '"', '\\'):
+          # These are the escaped characters GN supports.
+          result += next_char
+          i += 1
+        else:
+          # Any other backslash is a literal.
+          result += '\\'
+    else:
+      result += value[i]
+    i += 1
+  return result
+
+
+def _IsDigitOrMinus(char):
+  return char in '-0123456789'
+
+
+class GNValueParser(object):
+  """Duplicates GN parsing of values and converts to Python types.
+
+  Normally you would use the wrapper function FromGNValue() below.
+
+  If you expect input as a specific type, you can also call one of the Parse*
+  functions directly. All functions throw GNError on invalid input.
+  """
+
+  def __init__(self, string, checkout_root=_CHROMIUM_ROOT):
+    self.input = string
+    self.cur = 0
+    self.checkout_root = checkout_root
+
+  def IsDone(self):
+    return self.cur == len(self.input)
+
+  def ReplaceImports(self):
+    """Replaces import(...) lines with the contents of the imports.
+
+    Recurses on itself until there are no imports remaining, in the case of
+    nested imports.
+    """
+    lines = self.input.splitlines()
+    if not any(line.startswith('import(') for line in lines):
+      return
+    for line in lines:
+      if not line.startswith('import('):
+        continue
+      regex_match = IMPORT_RE.match(line)
+      if not regex_match:
+        raise GNError('Not a valid import string: %s' % line)
+      import_path = os.path.join(self.checkout_root, regex_match.group(1))
+      with open(import_path) as f:
+        imported_args = f.read()
+      self.input = self.input.replace(line, imported_args)
+    # Call ourselves again if we've just replaced an import() with additional
+    # imports.
+    self.ReplaceImports()
+
+
+  def _ConsumeWhitespace(self):
+    while not self.IsDone() and self.input[self.cur] in ' \t\n':
+      self.cur += 1
+
+  def ConsumeCommentAndWhitespace(self):
+    self._ConsumeWhitespace()
+
+    # Consume each comment, line by line.
+    while not self.IsDone() and self.input[self.cur] == '#':
+      # Consume the rest of the comment, up until the end of the line.
+      while not self.IsDone() and self.input[self.cur] != '\n':
+        self.cur += 1
+      # Move the cursor to the next line (if there is one).
+      if not self.IsDone():
+        self.cur += 1
+
+      self._ConsumeWhitespace()
+
+  def Parse(self):
+    """Converts a string representing a printed GN value to the Python type.
+
+    See additional usage notes on FromGNString() above.
+
+    * GN booleans ('true', 'false') will be converted to Python booleans.
+
+    * GN numbers ('123') will be converted to Python numbers.
+
+    * GN strings (double-quoted as in '"asdf"') will be converted to Python
+      strings with GN escaping rules. GN string interpolation (embedded
+      variables preceded by $) are not supported and will be returned as
+      literals.
+
+    * GN lists ('[1, "asdf", 3]') will be converted to Python lists.
+
+    * GN scopes ('{ ... }') are not supported.
+
+    Raises:
+      GNError: Parse fails.
+    """
+    result = self._ParseAllowTrailing()
+    self.ConsumeCommentAndWhitespace()
+    if not self.IsDone():
+      raise GNError("Trailing input after parsing:\n  " + self.input[self.cur:])
+    return result
+
+  def ParseArgs(self):
+    """Converts a whitespace-separated list of ident=literals to a dict.
+
+    See additional usage notes on FromGNArgs(), above.
+
+    Raises:
+      GNError: Parse fails.
+    """
+    d = {}
+
+    self.ReplaceImports()
+    self.ConsumeCommentAndWhitespace()
+
+    while not self.IsDone():
+      ident = self._ParseIdent()
+      self.ConsumeCommentAndWhitespace()
+      if self.input[self.cur] != '=':
+        raise GNError("Unexpected token: " + self.input[self.cur:])
+      self.cur += 1
+      self.ConsumeCommentAndWhitespace()
+      val = self._ParseAllowTrailing()
+      self.ConsumeCommentAndWhitespace()
+      d[ident] = val
+
+    return d
+
+  def _ParseAllowTrailing(self):
+    """Internal version of Parse() that doesn't check for trailing stuff."""
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError("Expected input to parse.")
+
+    next_char = self.input[self.cur]
+    if next_char == '[':
+      return self.ParseList()
+    elif next_char == '{':
+      return self.ParseScope()
+    elif _IsDigitOrMinus(next_char):
+      return self.ParseNumber()
+    elif next_char == '"':
+      return self.ParseString()
+    elif self._ConstantFollows('true'):
+      return True
+    elif self._ConstantFollows('false'):
+      return False
+    else:
+      raise GNError("Unexpected token: " + self.input[self.cur:])
+
+  def _ParseIdent(self):
+    ident = ''
+
+    next_char = self.input[self.cur]
+    if not next_char.isalpha() and not next_char=='_':
+      raise GNError("Expected an identifier: " + self.input[self.cur:])
+
+    ident += next_char
+    self.cur += 1
+
+    next_char = self.input[self.cur]
+    while next_char.isalpha() or next_char.isdigit() or next_char=='_':
+      ident += next_char
+      self.cur += 1
+      next_char = self.input[self.cur]
+
+    return ident
+
+  def ParseNumber(self):
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Expected number but got nothing.')
+
+    begin = self.cur
+
+    # The first character can include a negative sign.
+    if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]):
+      self.cur += 1
+    while not self.IsDone() and self.input[self.cur].isdigit():
+      self.cur += 1
+
+    number_string = self.input[begin:self.cur]
+    if not len(number_string) or number_string == '-':
+      raise GNError('Not a valid number.')
+    return int(number_string)
+
+  def ParseString(self):
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Expected string but got nothing.')
+
+    if self.input[self.cur] != '"':
+      raise GNError('Expected string beginning in a " but got:\n  ' +
+                    self.input[self.cur:])
+    self.cur += 1  # Skip over quote.
+
+    begin = self.cur
+    while not self.IsDone() and self.input[self.cur] != '"':
+      if self.input[self.cur] == '\\':
+        self.cur += 1  # Skip over the backslash.
+        if self.IsDone():
+          raise GNError('String ends in a backslash in:\n  ' + self.input)
+      self.cur += 1
+
+    if self.IsDone():
+      raise GNError('Unterminated string:\n  ' + self.input[begin:])
+
+    end = self.cur
+    self.cur += 1  # Consume trailing ".
+
+    return UnescapeGNString(self.input[begin:end])
+
+  def ParseList(self):
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Expected list but got nothing.')
+
+    # Skip over opening '['.
+    if self.input[self.cur] != '[':
+      raise GNError('Expected [ for list but got:\n  ' + self.input[self.cur:])
+    self.cur += 1
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Unterminated list:\n  ' + self.input)
+
+    list_result = []
+    previous_had_trailing_comma = True
+    while not self.IsDone():
+      if self.input[self.cur] == ']':
+        self.cur += 1  # Skip over ']'.
+        return list_result
+
+      if not previous_had_trailing_comma:
+        raise GNError('List items not separated by comma.')
+
+      list_result += [ self._ParseAllowTrailing() ]
+      self.ConsumeCommentAndWhitespace()
+      if self.IsDone():
+        break
+
+      # Consume comma if there is one.
+      previous_had_trailing_comma = self.input[self.cur] == ','
+      if previous_had_trailing_comma:
+        # Consume comma.
+        self.cur += 1
+        self.ConsumeCommentAndWhitespace()
+
+    raise GNError('Unterminated list:\n  ' + self.input)
+
+  def ParseScope(self):
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Expected scope but got nothing.')
+
+    # Skip over opening '{'.
+    if self.input[self.cur] != '{':
+      raise GNError('Expected { for scope but got:\n ' + self.input[self.cur:])
+    self.cur += 1
+    self.ConsumeCommentAndWhitespace()
+    if self.IsDone():
+      raise GNError('Unterminated scope:\n ' + self.input)
+
+    scope_result = {}
+    while not self.IsDone():
+      if self.input[self.cur] == '}':
+        self.cur += 1
+        return scope_result
+
+      ident = self._ParseIdent()
+      self.ConsumeCommentAndWhitespace()
+      if self.input[self.cur] != '=':
+        raise GNError("Unexpected token: " + self.input[self.cur:])
+      self.cur += 1
+      self.ConsumeCommentAndWhitespace()
+      val = self._ParseAllowTrailing()
+      self.ConsumeCommentAndWhitespace()
+      scope_result[ident] = val
+
+    raise GNError('Unterminated scope:\n ' + self.input)
+
+  def _ConstantFollows(self, constant):
+    """Checks and maybe consumes a string constant at current input location.
+
+    Param:
+      constant: The string constant to check.
+
+    Returns:
+      True if |constant| follows immediately at the current location in the
+      input. In this case, the string is consumed as a side effect. Otherwise,
+      returns False and the current position is unchanged.
+    """
+    end = self.cur + len(constant)
+    if end > len(self.input):
+      return False  # Not enough room.
+    if self.input[self.cur:end] == constant:
+      self.cur = end
+      return True
+    return False
+
+
+def ReadBuildVars(output_directory):
+  """Parses $output_directory/build_vars.json into a dict."""
+  with open(os.path.join(output_directory, BUILD_VARS_FILENAME)) as f:
+    return json.load(f)
diff --git a/src/build/gn_helpers_unittest.py b/src/build/gn_helpers_unittest.py
new file mode 100644
index 0000000..f4b756c
--- /dev/null
+++ b/src/build/gn_helpers_unittest.py
@@ -0,0 +1,315 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import mock
+import sys
+import textwrap
+import unittest
+
+import gn_helpers
+
+
+class UnitTest(unittest.TestCase):
+  def test_ToGNString(self):
+    test_cases = [
+        (42, '42', '42'), ('foo', '"foo"', '"foo"'), (True, 'true', 'true'),
+        (False, 'false', 'false'), ('', '""', '""'),
+        ('\\$"$\\', '"\\\\\\$\\"\\$\\\\"', '"\\\\\\$\\"\\$\\\\"'),
+        (' \t\r\n', '" $0x09$0x0D$0x0A"', '" $0x09$0x0D$0x0A"'),
+        (u'\u2713', '"$0xE2$0x9C$0x93"', '"$0xE2$0x9C$0x93"'),
+        ([], '[  ]', '[]'), ([1], '[ 1 ]', '[\n  1\n]\n'),
+        ([3, 1, 4, 1], '[ 3, 1, 4, 1 ]', '[\n  3,\n  1,\n  4,\n  1\n]\n'),
+        (['a', True, 2], '[ "a", true, 2 ]', '[\n  "a",\n  true,\n  2\n]\n'),
+        ({
+            'single': 'item'
+        }, 'single = "item"\n', 'single = "item"\n'),
+        ({
+            'kEy': 137,
+            '_42A_Zaz_': [False, True]
+        }, '_42A_Zaz_ = [ false, true ]\nkEy = 137\n',
+         '_42A_Zaz_ = [\n  false,\n  true\n]\nkEy = 137\n'),
+        ([1, 'two',
+          ['"thr,.$\\', True, False, [],
+           u'(\u2713)']], '[ 1, "two", [ "\\"thr,.\\$\\\\", true, false, ' +
+         '[  ], "($0xE2$0x9C$0x93)" ] ]', '''[
+  1,
+  "two",
+  [
+    "\\"thr,.\\$\\\\",
+    true,
+    false,
+    [],
+    "($0xE2$0x9C$0x93)"
+  ]
+]
+'''),
+        ({
+            's': 'foo',
+            'n': 42,
+            'b': True,
+            'a': [3, 'x']
+        }, 'a = [ 3, "x" ]\nb = true\nn = 42\ns = "foo"\n',
+         'a = [\n  3,\n  "x"\n]\nb = true\nn = 42\ns = "foo"\n'),
+        (
+            [[[], [[]]], []],
+            '[ [ [  ], [ [  ] ] ], [  ] ]',
+            '[\n  [\n    [],\n    [\n      []\n    ]\n  ],\n  []\n]\n',
+        ),
+        (
+            [{
+                'a': 1,
+                'c': {
+                    'z': 8
+                },
+                'b': []
+            }],
+            '[ { a = 1\nb = [  ]\nc = { z = 8 } } ]\n',
+            '[\n  {\n    a = 1\n    b = []\n    c = {\n' +
+            '      z = 8\n    }\n  }\n]\n',
+        )
+    ]
+    for obj, exp_ugly, exp_pretty in test_cases:
+      out_ugly = gn_helpers.ToGNString(obj)
+      self.assertEqual(exp_ugly, out_ugly)
+      out_pretty = gn_helpers.ToGNString(obj, pretty=True)
+      self.assertEqual(exp_pretty, out_pretty)
+
+  def test_UnescapeGNString(self):
+    # Backslash followed by a \, $, or " means the folling character without
+    # the special meaning. Backslash followed by everything else is a literal.
+    self.assertEqual(
+        gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'),
+        '\\as$\\asd"')
+
+  def test_FromGNString(self):
+    self.assertEqual(
+        gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'),
+        [ 1, -20, True, False, [ 'as"', [] ] ])
+
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('123 456')
+      parser.Parse()
+
+  def test_ParseBool(self):
+    parser = gn_helpers.GNValueParser('true')
+    self.assertEqual(parser.Parse(), True)
+
+    parser = gn_helpers.GNValueParser('false')
+    self.assertEqual(parser.Parse(), False)
+
+  def test_ParseNumber(self):
+    parser = gn_helpers.GNValueParser('123')
+    self.assertEqual(parser.ParseNumber(), 123)
+
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('')
+      parser.ParseNumber()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('a123')
+      parser.ParseNumber()
+
+  def test_ParseString(self):
+    parser = gn_helpers.GNValueParser('"asdf"')
+    self.assertEqual(parser.ParseString(), 'asdf')
+
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('')  # Empty.
+      parser.ParseString()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('asdf')  # Unquoted.
+      parser.ParseString()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('"trailing')  # Unterminated.
+      parser.ParseString()
+
+  def test_ParseList(self):
+    parser = gn_helpers.GNValueParser('[1,]')  # Optional end comma OK.
+    self.assertEqual(parser.ParseList(), [ 1 ])
+
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('')  # Empty.
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('asdf')  # No [].
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('[1, 2')  # Unterminated
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('[1 2]')  # No separating comma.
+      parser.ParseList()
+
+  def test_ParseScope(self):
+    parser = gn_helpers.GNValueParser('{a = 1}')
+    self.assertEqual(parser.ParseScope(), {'a': 1})
+
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('')  # Empty.
+      parser.ParseScope()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('asdf')  # No {}.
+      parser.ParseScope()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('{a = 1')  # Unterminated.
+      parser.ParseScope()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('{"a" = 1}')  # Not identifier.
+      parser.ParseScope()
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser('{a = }')  # No value.
+      parser.ParseScope()
+
+  def test_FromGNArgs(self):
+    # Booleans and numbers should work; whitespace is allowed works.
+    self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'),
+                     {'foo': True, 'bar': 1})
+
+    # Whitespace is not required; strings should also work.
+    self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'),
+                     {'foo': 'bar baz'})
+
+    # Comments should work (and be ignored).
+    gn_args_lines = [
+        '# Top-level comment.',
+        'foo = true',
+        'bar = 1  # In-line comment followed by whitespace.',
+        ' ',
+        'baz = false',
+    ]
+    self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), {
+        'foo': True,
+        'bar': 1,
+        'baz': False
+    })
+
+    # Lists should work.
+    self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'),
+                     {'foo': [1, 2, 3]})
+
+    # Empty strings should return an empty dict.
+    self.assertEqual(gn_helpers.FromGNArgs(''), {})
+    self.assertEqual(gn_helpers.FromGNArgs(' \n '), {})
+
+    # Comments should work everywhere (and be ignored).
+    gn_args_lines = [
+        '# Top-level comment.',
+        '',
+        '# Variable comment.',
+        'foo = true',
+        'bar = [',
+        '    # Value comment in list.',
+        '    1,',
+        '    2,',
+        ']',
+        '',
+        'baz # Comment anywhere, really',
+        '  = # also here',
+        '    4',
+    ]
+    self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), {
+        'foo': True,
+        'bar': [1, 2],
+        'baz': 4
+    })
+
+    # Scope should be parsed, even empty ones.
+    gn_args_lines = [
+        'foo = {',
+        '  a = 1',
+        '  b = [',
+        '    { },',
+        '    {',
+        '      c = 1',
+        '    },',
+        '  ]',
+        '}',
+    ]
+    self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)),
+                     {'foo': {
+                         'a': 1,
+                         'b': [
+                             {},
+                             {
+                                 'c': 1,
+                             },
+                         ]
+                     }})
+
+    # Non-identifiers should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      gn_helpers.FromGNArgs('123 = true')
+
+    # References to other variables should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      gn_helpers.FromGNArgs('foo = bar')
+
+    # References to functions should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")')
+
+    # Underscores in identifiers should work.
+    self.assertEqual(gn_helpers.FromGNArgs('_foo = true'),
+                     {'_foo': True})
+    self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'),
+                     {'foo_bar': True})
+    self.assertEqual(gn_helpers.FromGNArgs('foo_=true'),
+                     {'foo_': True})
+
+  def test_ReplaceImports(self):
+    # Should be a no-op on args inputs without any imports.
+    parser = gn_helpers.GNValueParser(
+        textwrap.dedent("""
+        some_arg1 = "val1"
+        some_arg2 = "val2"
+    """))
+    parser.ReplaceImports()
+    self.assertEqual(
+        parser.input,
+        textwrap.dedent("""
+        some_arg1 = "val1"
+        some_arg2 = "val2"
+    """))
+
+    # A single "import(...)" line should be replaced with the contents of the
+    # file being imported.
+    parser = gn_helpers.GNValueParser(
+        textwrap.dedent("""
+        some_arg1 = "val1"
+        import("//some/args/file.gni")
+        some_arg2 = "val2"
+    """))
+    fake_import = 'some_imported_arg = "imported_val"'
+    builtin_var = '__builtin__' if sys.version_info.major < 3 else 'builtins'
+    open_fun = '{}.open'.format(builtin_var)
+    with mock.patch(open_fun, mock.mock_open(read_data=fake_import)):
+      parser.ReplaceImports()
+    self.assertEqual(
+        parser.input,
+        textwrap.dedent("""
+        some_arg1 = "val1"
+        some_imported_arg = "imported_val"
+        some_arg2 = "val2"
+    """))
+
+    # No trailing parenthesis should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser(
+          textwrap.dedent('import("//some/args/file.gni"'))
+      parser.ReplaceImports()
+
+    # No double quotes should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser(
+          textwrap.dedent('import(//some/args/file.gni)'))
+      parser.ReplaceImports()
+
+    # A path that's not source absolute should raise an exception.
+    with self.assertRaises(gn_helpers.GNError):
+      parser = gn_helpers.GNValueParser(
+          textwrap.dedent('import("some/relative/args/file.gni")'))
+      parser.ReplaceImports()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/gn_logs.gni b/src/build/gn_logs.gni
new file mode 100644
index 0000000..79a92b9
--- /dev/null
+++ b/src/build/gn_logs.gni
@@ -0,0 +1,8 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+
+# Log lines for gn_logs.txt that originate from within //build.
+build_gn_logs = [ "#### get_concurrent_links.py ####" ] + concurrent_links_logs
diff --git a/src/build/gn_run_binary.py b/src/build/gn_run_binary.py
new file mode 100644
index 0000000..d7e2926
--- /dev/null
+++ b/src/build/gn_run_binary.py
@@ -0,0 +1,36 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+  python gn_run_binary.py <binary_name> [args ...]
+"""
+
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+
+# This script is designed to run binaries produced by the current build. We
+# may prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = sys.argv[1]
+if not os.path.isabs(path):
+  path = './' + path
+
+# The rest of the arguments are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+ret = subprocess.call(args)
+if ret != 0:
+  if ret <= -100:
+    # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to
+    # recognize and differentiate in hex. In order to print them as unsigned
+    # hex we need to add 4 Gig to them.
+    print('%s failed with exit code 0x%08X' % (sys.argv[1], ret + (1 << 32)))
+  else:
+    print('%s failed with exit code %d' % (sys.argv[1], ret))
+sys.exit(ret)
diff --git a/src/build/gyp_helper.py b/src/build/gyp_helper.py
deleted file mode 100644
index 63debcd..0000000
--- a/src/build/gyp_helper.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file helps gyp_chromium and landmines correctly set up the gyp
-# environment from chromium.gyp_env on disk
-
-import os
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-CHROME_SRC = os.path.dirname(SCRIPT_DIR)
-
-
-def apply_gyp_environment_from_file(file_path):
-  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
-  if not os.path.exists(file_path):
-    return
-  with open(file_path, 'rU') as f:
-    file_contents = f.read()
-  try:
-    file_data = eval(file_contents, {'__builtins__': None}, None)
-  except SyntaxError, e:
-    e.filename = os.path.abspath(file_path)
-    raise
-  supported_vars = (
-      'CC',
-      'CHROMIUM_GYP_FILE',
-      'CHROMIUM_GYP_SYNTAX_CHECK',
-      'CXX',
-      'GYP_DEFINES',
-      'GYP_GENERATOR_FLAGS',
-      'GYP_GENERATOR_OUTPUT',
-      'GYP_GENERATORS',
-  )
-  for var in supported_vars:
-    file_val = file_data.get(var)
-    if file_val:
-      if var in os.environ:
-        print 'INFO: Environment value for "%s" overrides value in %s.' % (
-            var, os.path.abspath(file_path)
-        )
-      else:
-        os.environ[var] = file_val
-
-
-def apply_chromium_gyp_env():
-  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
-    # Update the environment based on chromium.gyp_env
-    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
-    apply_gyp_environment_from_file(path)
diff --git a/src/build/install-build-deps-android.sh b/src/build/install-build-deps-android.sh
new file mode 100755
index 0000000..882e7be
--- /dev/null
+++ b/src/build/install-build-deps-android.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
+# See https://www.chromium.org/developers/how-tos/android-build-instructions
+
+args="$@"
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+# Exit if any commands fail.
+set -e
+
+lsb_release=$(lsb_release --codename --short)
+
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+  --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
+
+# Fix deps
+sudo apt-get -f install
+
+# common
+sudo apt-get -y install lib32z1 lighttpd xvfb x11-utils
+
+# Some binaries in the Android SDK require 32-bit libraries on the host.
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
+sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
+
+# Required for apk-patch-size-estimator
+sudo apt-get -y install bsdiff
+
+echo "install-build-deps-android.sh complete."
diff --git a/src/build/install-build-deps.sh b/src/build/install-build-deps.sh
new file mode 100755
index 0000000..407915c
--- /dev/null
+++ b/src/build/install-build-deps.sh
@@ -0,0 +1,805 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux/build_instructions.md
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+       "fonts"
+  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+       "building standalone NaCl and all its toolchains"
+  echo "--[no-]backwards-compatible: enable or disable installation of packages
+        that are no longer currently needed and have been removed from this
+        script.  Useful for bisection."
+  echo "--no-prompt: silently select standard options/defaults"
+  echo "--quick-check: quickly try to determine if dependencies are installed"
+  echo "               (this avoids interactive prompts and sudo commands,"
+  echo "               so might not be 100% accurate)"
+  echo "--unsupported: attempt installation even on unsupported systems"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+# Build list of apt packages in dpkg --get-selections format.
+build_apt_package_list() {
+  echo "Building apt package list." >&2
+  apt-cache dumpavail | \
+    python3 -c '\
+      import re,sys; \
+      o = sys.stdin.read(); \
+      p = {"i386": ":i386"}; \
+      f = re.M | re.S; \
+      r = re.compile(r"^Package: (.+?)$.+?^Architecture: (.+?)$", f); \
+      m = ["%s%s" % (x, p.get(y, "")) for x, y in re.findall(r, o)]; \
+      print("\n".join(m))'
+}
+
+# Checks whether a particular package is available in the repos.
+# Uses pre-formatted ${apt_package_list}.
+# USAGE: $ package_exists <package name>
+package_exists() {
+  if [ -z "${apt_package_list}" ]; then
+    echo "Call build_apt_package_list() prior to calling package_exists()" >&2
+    apt_package_list=$(build_apt_package_list)
+  fi
+  # `grep` takes a regex string, so the +'s in package names, e.g. "libstdc++",
+  # need to be escaped.
+  local escaped="$(echo $1 | sed 's/[\~\+\.\:-]/\\&/g')"
+  [ ! -z "$(grep "^${escaped}$" <<< "${apt_package_list}")" ]
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode.  Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while [ "$1" != "" ]
+do
+  case "$1" in
+  --syms)                    do_inst_syms=1;;
+  --no-syms)                 do_inst_syms=0;;
+  --lib32)                   do_inst_lib32=1;;
+  --arm)                     do_inst_arm=1;;
+  --no-arm)                  do_inst_arm=0;;
+  --chromeos-fonts)          do_inst_chromeos_fonts=1;;
+  --no-chromeos-fonts)       do_inst_chromeos_fonts=0;;
+  --nacl)                    do_inst_nacl=1;;
+  --no-nacl)                 do_inst_nacl=0;;
+  --backwards-compatible)    do_inst_backwards_compatible=1;;
+  --no-backwards-compatible) do_inst_backwards_compatible=0;;
+  --add-cross-tool-repo)     add_cross_tool_repo=1;;
+  --no-prompt)               do_default=1
+                             do_quietly="-qq --assume-yes"
+    ;;
+  --quick-check)             do_quick_check=1;;
+  --unsupported)             do_unsupported=1;;
+  *) usage;;
+  esac
+  shift
+done
+
+if [ "$do_inst_arm" = "1" ]; then
+  do_inst_lib32=1
+fi
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+  echo "ERROR: lsb_release not found in \$PATH" >&2
+  echo "try: sudo apt-get install lsb-release" >&2
+  exit 1;
+fi
+
+distro_codename=$(lsb_release --codename --short)
+distro_id=$(lsb_release --id --short)
+supported_codenames="(trusty|xenial|bionic|disco|eoan|focal|groovy)"
+supported_ids="(Debian)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [[ ! $distro_codename =~ $supported_codenames &&
+        ! $distro_id =~ $supported_ids ]]; then
+    echo -e "ERROR: The only supported distros are\n" \
+      "\tUbuntu 14.04 LTS (trusty with EoL April 2022)\n" \
+      "\tUbuntu 16.04 LTS (xenial with EoL April 2024)\n" \
+      "\tUbuntu 18.04 LTS (bionic with EoL April 2028)\n" \
+      "\tUbuntu 20.04 LTS (focal with Eol April 2030)\n" \
+      "\tUbuntu 20.10 (groovy)\n" \
+      "\tDebian 8 (jessie) or later" >&2
+    exit 1
+  fi
+
+  if ! uname -m | egrep -q "i686|x86_64"; then
+    echo "Only x86 architectures are currently supported" >&2
+    exit
+  fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+if [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [ "$do_inst_lib32" = "1" ] || [ "$do_inst_nacl" = "1" ]; then
+    sudo dpkg --add-architecture i386
+  fi
+  sudo apt-get update
+fi
+
+# Populate ${apt_package_list} for package_exists() parsing.
+apt_package_list=$(build_apt_package_list)
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev mesa-common-dev"
+
+if package_exists realpath; then
+  chromeos_dev_list="${chromeos_dev_list} realpath"
+fi
+
+# Packages needed for development
+dev_list="\
+  binutils
+  bison
+  bzip2
+  cdbs
+  curl
+  dbus-x11
+  dpkg-dev
+  elfutils
+  devscripts
+  fakeroot
+  flex
+  git-core
+  gperf
+  libappindicator3-dev
+  libasound2-dev
+  libatspi2.0-dev
+  libbrlapi-dev
+  libbz2-dev
+  libcairo2-dev
+  libcap-dev
+  libc6-dev
+  libcups2-dev
+  libcurl4-gnutls-dev
+  libdrm-dev
+  libelf-dev
+  libevdev-dev
+  libffi-dev
+  libgbm-dev
+  libglib2.0-dev
+  libglu1-mesa-dev
+  libgtk-3-dev
+  libkrb5-dev
+  libnspr4-dev
+  libnss3-dev
+  libpam0g-dev
+  libpci-dev
+  libpulse-dev
+  libsctp-dev
+  libspeechd-dev
+  libsqlite3-dev
+  libssl-dev
+  libudev-dev
+  libva-dev
+  libwww-perl
+  libxshmfence-dev
+  libxslt1-dev
+  libxss-dev
+  libxt-dev
+  libxtst-dev
+  locales
+  openbox
+  p7zip
+  patch
+  perl
+  pkg-config
+  python-setuptools
+  rpm
+  ruby
+  subversion
+  uuid-dev
+  wdiff
+  x11-utils
+  xcompmgr
+  xz-utils
+  zip
+  $chromeos_dev_list
+"
+
+if package_exists python-is-python2; then
+  dev_list="${dev_list} python-is-python2 python2-dev"
+else
+  dev_list="${dev_list} python python-dev"
+fi
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+  dev_list="${dev_list} libc6-i386 lib32stdc++6"
+
+  # lib32gcc-s1 used to be called lib32gcc1 in older distros.
+  if package_exists lib32gcc-s1; then
+    dev_list="${dev_list} lib32gcc-s1"
+  elif package_exists lib32gcc1; then
+    dev_list="${dev_list} lib32gcc1"
+  fi
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# List of required run-time libraries
+common_lib_list="\
+  libappindicator3-1
+  libasound2
+  libatk1.0-0
+  libatspi2.0-0
+  libc6
+  libcairo2
+  libcap2
+  libcups2
+  libdrm2
+  libevdev2
+  libexpat1
+  libfontconfig1
+  libfreetype6
+  libgbm1
+  libglib2.0-0
+  libgtk-3-0
+  libpam0g
+  libpango-1.0-0
+  libpci3
+  libpcre3
+  libpixman-1-0
+  libspeechd2
+  libstdc++6
+  libsqlite3-0
+  libuuid1
+  libwayland-egl1-mesa
+  libx11-6
+  libx11-xcb1
+  libxau6
+  libxcb1
+  libxcomposite1
+  libxcursor1
+  libxdamage1
+  libxdmcp6
+  libxext6
+  libxfixes3
+  libxi6
+  libxinerama1
+  libxrandr2
+  libxrender1
+  libxtst6
+  zlib1g
+"
+
+if package_exists libffi7; then
+  common_lib_list="${common_lib_list} libffi7"
+elif package_exists libffi6; then
+  common_lib_list="${common_lib_list} libffi6"
+fi
+
+# Full list of required run-time libraries
+lib_list="\
+  $common_lib_list
+  $chromeos_lib_list
+"
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
+lib32_list="linux-libc-dev:i386 libpci3:i386"
+
+# 32-bit libraries needed for a 32-bit build
+lib32_list="$lib32_list libx11-xcb1:i386"
+
+# Packages that have been removed from this script.  Regardless of configuration
+# or options passed to this script, whenever a package is removed, it should be
+# added here.
+backwards_compatible_list="\
+  7za
+  fonts-indic
+  fonts-ipafont
+  fonts-stix
+  fonts-thai-tlwg
+  fonts-tlwg-garuda
+  g++
+  git-svn
+  language-pack-da
+  language-pack-fr
+  language-pack-he
+  language-pack-zh-hant
+  libappindicator-dev
+  libappindicator1
+  libdconf-dev
+  libdconf1
+  libdconf1:i386
+  libexif-dev
+  libexif12
+  libexif12:i386
+  libgbm-dev
+  libgconf-2-4:i386
+  libgconf2-dev
+  libgl1-mesa-dev
+  libgl1-mesa-glx:i386
+  libgles2-mesa-dev
+  libgtk-3-0:i386
+  libgtk2.0-0
+  libgtk2.0-0:i386
+  libgtk2.0-dev
+  mesa-common-dev
+  msttcorefonts
+  ttf-dejavu-core
+  ttf-indic-fonts
+  ttf-kochi-gothic
+  ttf-kochi-mincho
+  ttf-mscorefonts-installer
+  xfonts-mathml
+"
+if package_exists python-is-python2; then
+  backwards_compatible_list="${backwards_compatible_list} python-dev"
+fi
+
+case $distro_codename in
+  trusty)
+    backwards_compatible_list+=" \
+      libgbm-dev-lts-trusty
+      libgl1-mesa-dev-lts-trusty
+      libgl1-mesa-glx-lts-trusty:i386
+      libgles2-mesa-dev-lts-trusty
+      mesa-common-dev-lts-trusty"
+    ;;
+  xenial)
+    backwards_compatible_list+=" \
+      libgbm-dev-lts-xenial
+      libgl1-mesa-dev-lts-xenial
+      libgl1-mesa-glx-lts-xenial:i386
+      libgles2-mesa-dev-lts-xenial
+      mesa-common-dev-lts-xenial"
+    ;;
+esac
+
+# arm cross toolchain packages needed to build chrome on armhf
+EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
+EM_SOURCE=$(cat <<EOF
+# Repo added by Chromium $0
+${EM_REPO}
+# deb-src http://emdebian.org/tools/debian/ jessie main
+EOF
+)
+EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
+GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
+case $distro_codename in
+  jessie)
+    eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
+    CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
+    arm_list="libc6-dev:armhf
+              linux-libc-dev:armhf"
+    if [ "$do_inst_arm" = "1" ]; then
+      if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
+        arm_list+=" ${GPP_ARM_PACKAGE}"
+      else
+        if [ "${add_cross_tool_repo}" = "1" ]; then
+          gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
+          gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
+          if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
+            echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
+          fi
+          arm_list+=" ${GPP_ARM_PACKAGE}"
+        else
+          echo "The Debian Cross-toolchains repository is necessary to"
+          echo "cross-compile Chromium for arm."
+          echo "Rerun with --add-deb-cross-tool-repo to have it added for you."
+        fi
+      fi
+    fi
+    ;;
+  # All necessary ARM packages are available on the default repos on
+  # Debian 9 and later.
+  *)
+    arm_list="libc6-dev-armhf-cross
+              linux-libc-dev-armhf-cross
+              ${GPP_ARM_PACKAGE}"
+    ;;
+esac
+
+# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
+case $distro_codename in
+  trusty)
+    arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
+                gcc-4.8-multilib-arm-linux-gnueabihf"
+    ;;
+  xenial|bionic)
+    arm_list+=" g++-5-multilib-arm-linux-gnueabihf
+                gcc-5-multilib-arm-linux-gnueabihf
+                gcc-arm-linux-gnueabihf"
+    ;;
+  disco|eoan)
+    arm_list+=" g++-9-multilib-arm-linux-gnueabihf
+                gcc-9-multilib-arm-linux-gnueabihf
+                gcc-arm-linux-gnueabihf"
+    ;;
+  focal)
+    arm_list+=" g++-10-multilib-arm-linux-gnueabihf
+                gcc-10-multilib-arm-linux-gnueabihf
+                gcc-arm-linux-gnueabihf"
+    ;;
+  groovy)
+    arm_list+=" g++-10-multilib-arm-linux-gnueabihf
+                gcc-10-multilib-arm-linux-gnueabihf
+                gcc-arm-linux-gnueabihf
+                g++-10-arm-linux-gnueabihf
+                gcc-10-arm-linux-gnueabihf"
+    ;;
+esac
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="\
+  g++-mingw-w64-i686
+  lib32z1-dev
+  libasound2:i386
+  libcap2:i386
+  libelf-dev:i386
+  libfontconfig1:i386
+  libglib2.0-0:i386
+  libgpm2:i386
+  libncurses5:i386
+  lib32ncurses5-dev
+  libnss3:i386
+  libpango-1.0-0:i386
+  libssl-dev:i386
+  libtinfo-dev
+  libtinfo-dev:i386
+  libtool
+  libuuid1:i386
+  libxcomposite1:i386
+  libxcursor1:i386
+  libxdamage1:i386
+  libxi6:i386
+  libxrandr2:i386
+  libxss1:i386
+  libxtst6:i386
+  texinfo
+  xvfb
+  ${naclports_list}
+"
+
+# Some package names have changed over time
+if package_exists libssl1.1; then
+  nacl_list="${nacl_list} libssl1.1:i386"
+elif package_exists libssl1.0.2; then
+  nacl_list="${nacl_list} libssl1.0.2:i386"
+else
+  nacl_list="${nacl_list} libssl1.0.0:i386"
+fi
+if package_exists libtinfo5; then
+  nacl_list="${nacl_list} libtinfo5"
+fi
+if package_exists libpng16-16; then
+  lib_list="${lib_list} libpng16-16"
+else
+  lib_list="${lib_list} libpng12-0"
+fi
+if package_exists libnspr4; then
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+  dev_list="${dev_list} libjpeg-dev"
+else
+  dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+  dev_list="${dev_list} libudev1"
+  nacl_list="${nacl_list} libudev1:i386"
+else
+  dev_list="${dev_list} libudev0"
+  nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.8; then
+  dev_list="${dev_list} libbrlapi0.8"
+elif package_exists libbrlapi0.7; then
+  dev_list="${dev_list} libbrlapi0.7"
+elif package_exists libbrlapi0.6; then
+  dev_list="${dev_list} libbrlapi0.6"
+else
+  dev_list="${dev_list} libbrlapi0.5"
+fi
+if package_exists apache2.2-bin; then
+  dev_list="${dev_list} apache2.2-bin"
+else
+  dev_list="${dev_list} apache2-bin"
+fi
+if package_exists libav-tools; then
+  dev_list="${dev_list} libav-tools"
+fi
+if package_exists php7.4-cgi; then
+  dev_list="${dev_list} php7.4-cgi libapache2-mod-php7.4"
+elif package_exists php7.3-cgi; then
+  dev_list="${dev_list} php7.3-cgi libapache2-mod-php7.3"
+elif package_exists php7.2-cgi; then
+  dev_list="${dev_list} php7.2-cgi libapache2-mod-php7.2"
+elif package_exists php7.1-cgi; then
+  dev_list="${dev_list} php7.1-cgi libapache2-mod-php7.1"
+elif package_exists php7.0-cgi; then
+  dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0"
+else
+  dev_list="${dev_list} php5-cgi libapache2-mod-php5"
+fi
+
+# Most python 2 packages are removed in Ubuntu 20.10, but the build doesn't seem
+# to need them, so only install them if they're available.
+if package_exists python-crypto; then
+  dev_list="${dev_list} python-crypto"
+fi
+if package_exists python-numpy; then
+  dev_list="${dev_list} python-numpy"
+fi
+if package_exists python-openssl; then
+  dev_list="${dev_list} python-openssl"
+fi
+if package_exists python-psutil; then
+  dev_list="${dev_list} python-psutil"
+fi
+if package_exists python-yaml; then
+  dev_list="${dev_list} python-yaml"
+fi
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+if package_exists libgnome-keyring0; then
+  lib_list="${lib_list} libgnome-keyring0"
+fi
+if package_exists libgnome-keyring-dev; then
+  lib_list="${lib_list} libgnome-keyring-dev"
+fi
+if package_exists libvulkan-dev; then
+  dev_list="${dev_list} libvulkan-dev"
+fi
+if package_exists libvulkan1; then
+  lib_list="${lib_list} libvulkan1"
+fi
+if package_exists libinput10; then
+  lib_list="${lib_list} libinput10"
+fi
+if package_exists libinput-dev; then
+    dev_list="${dev_list} libinput-dev"
+fi
+if package_exists snapcraft; then
+    dev_list="${dev_list} snapcraft"
+fi
+
+# Cross-toolchain strip is needed for building the sysroots.
+if package_exists binutils-arm-linux-gnueabihf; then
+  dev_list="${dev_list} binutils-arm-linux-gnueabihf"
+fi
+if package_exists binutils-aarch64-linux-gnu; then
+  dev_list="${dev_list} binutils-aarch64-linux-gnu"
+fi
+if package_exists binutils-mipsel-linux-gnu; then
+  dev_list="${dev_list} binutils-mipsel-linux-gnu"
+fi
+if package_exists binutils-mips64el-linux-gnuabi64; then
+  dev_list="${dev_list} binutils-mips64el-linux-gnuabi64"
+fi
+
+# When cross building for arm/Android on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+  # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
+  # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
+  # appropriate value of X and Y by seeing what version the current
+  # distribution's g++-multilib package depends on.
+  multilib_package=$(apt-cache depends g++-multilib --important | \
+      grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
+  lib32_list="$lib32_list $multilib_package"
+fi
+
+if [ "$do_inst_syms" = "1" ]; then
+  echo "Including debugging symbols."
+
+  # Debian is in the process of transitioning to automatic debug packages, which
+  # have the -dbgsym suffix (https://wiki.debian.org/AutomaticDebugPackages).
+  # Untransitioned packages have the -dbg suffix.  And on some systems, neither
+  # will be available, so exclude the ones that are missing.
+  dbg_package_name() {
+    if package_exists "$1-dbgsym"; then
+      echo "$1-dbgsym"
+    elif package_exists "$1-dbg"; then
+      echo "$1-dbg"
+    fi
+  }
+
+  for package in "${common_lib_list}"; do
+    dbg_list="$dbg_list $(dbg_package_name ${package})"
+  done
+
+  # Debugging symbols packages not following common naming scheme
+  if [ "$(dbg_package_name libstdc++6)" == "" ]; then
+    if package_exists libstdc++6-8-dbg; then
+      dbg_list="${dbg_list} libstdc++6-8-dbg"
+    elif package_exists libstdc++6-7-dbg; then
+      dbg_list="${dbg_list} libstdc++6-7-dbg"
+    elif package_exists libstdc++6-6-dbg; then
+      dbg_list="${dbg_list} libstdc++6-6-dbg"
+    elif package_exists libstdc++6-5-dbg; then
+      dbg_list="${dbg_list} libstdc++6-5-dbg"
+    elif package_exists libstdc++6-4.9-dbg; then
+      dbg_list="${dbg_list} libstdc++6-4.9-dbg"
+    elif package_exists libstdc++6-4.8-dbg; then
+      dbg_list="${dbg_list} libstdc++6-4.8-dbg"
+    elif package_exists libstdc++6-4.7-dbg; then
+      dbg_list="${dbg_list} libstdc++6-4.7-dbg"
+    elif package_exists libstdc++6-4.6-dbg; then
+      dbg_list="${dbg_list} libstdc++6-4.6-dbg"
+    fi
+  fi
+  if [ "$(dbg_package_name libatk1.0-0)" == "" ]; then
+    dbg_list="$dbg_list $(dbg_package_name libatk1.0)"
+  fi
+  if [ "$(dbg_package_name libpango-1.0-0)" == "" ]; then
+    dbg_list="$dbg_list $(dbg_package_name libpango1.0-dev)"
+  fi
+else
+  echo "Skipping debugging symbols."
+  dbg_list=
+fi
+
+if [ "$do_inst_lib32" = "1" ]; then
+  echo "Including 32-bit libraries."
+else
+  echo "Skipping 32-bit libraries."
+  lib32_list=
+fi
+
+if [ "$do_inst_arm" = "1" ]; then
+  echo "Including ARM cross toolchain."
+else
+  echo "Skipping ARM cross toolchain."
+  arm_list=
+fi
+
+if [ "$do_inst_nacl" = "1" ]; then
+  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+  nacl_list=
+fi
+
+filtered_backwards_compatible_list=
+if [ "$do_inst_backwards_compatible" = "1" ]; then
+  echo "Including backwards compatible packages."
+  for package in ${backwards_compatible_list}; do
+    if package_exists ${package}; then
+      filtered_backwards_compatible_list+=" ${package}"
+    fi
+  done
+fi
+
+# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
+# confusing dpkg-query (crbug.com/446172).
+packages="$(
+  echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}" \
+       "${nacl_list}" ${filtered_backwards_compatible_list} | tr " " "\n" | \
+       sort -u | sort -r -s -t: -k2 | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+  if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then
+    # Distinguish between packages that actually aren't available to the
+    # system (i.e. not in any repo) and packages that just aren't known to
+    # dpkg (i.e. managed by apt).
+    missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')"
+    not_installed=""
+    unknown=""
+    for p in ${missing_packages}; do
+      if apt-cache show ${p} > /dev/null 2>&1; then
+        not_installed="${p}\n${not_installed}"
+      else
+        unknown="${p}\n${unknown}"
+      fi
+    done
+    if [ -n "${not_installed}" ]; then
+      echo "WARNING: The following packages are not installed:"
+      echo -e "${not_installed}" | sed -e "s/^/  /"
+    fi
+    if [ -n "${unknown}" ]; then
+      echo "WARNING: The following packages are unknown to your system"
+      echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+      echo -e "${unknown}" | sed -e "s/^/  /"
+    fi
+    exit 1
+  fi
+  exit 0
+fi
+
+echo "Finding missing packages..."
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+query_cmd="apt-get --just-print install $(echo $packages)"
+if cmd_output="$(LANGUAGE=en LANG=C $query_cmd)"; then
+  new_list=$(echo "$cmd_output" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d' |
+    sed 's/ *$//')
+  upgrade_list=$(echo "$cmd_output" |
+    sed -e '1,/The following packages will be upgraded:/d;s/^  //;t;d' |
+    sed 's/ *$//')
+  if [ -z "$new_list" ] && [ -z "$upgrade_list" ]; then
+    echo "No missing packages, and the packages are up to date."
+  else
+    echo "Installing and upgrading packages: $new_list $upgrade_list."
+    sudo apt-get install ${do_quietly-} ${new_list} ${upgrade_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around query_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${query_cmd}
+  echo
+  echo "It produced the following output:"
+  echo "$cmd_output"
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if [ "$do_inst_chromeos_fonts" != "0" ]; then
+  echo
+  echo "Installing Chrome OS fonts."
+  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+  if ! sudo $dir/linux/install-chromeos-fonts.py; then
+    echo "ERROR: The installation of the Chrome OS default fonts failed."
+    if [ `stat -f -c %T $dir` == "nfs" ]; then
+      echo "The reason is that your repo is installed on a remote file system."
+    else
+      echo "This is expected if your repo is installed on a remote file system."
+    fi
+    echo "It is recommended to install your repo on a local file system."
+    echo "You can skip the installation of the Chrome OS default founts with"
+    echo "the command line option: --no-chromeos-fonts."
+    exit 1
+  fi
+else
+  echo "Skipping installation of Chrome OS fonts."
+fi
+
+echo "Installing locales."
+CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8"
+LOCALE_GEN=/etc/locale.gen
+if [ -e ${LOCALE_GEN} ]; then
+  OLD_LOCALE_GEN="$(cat /etc/locale.gen)"
+  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+    sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN}
+  done
+  # Regenerating locales can take a while, so only do it if we need to.
+  if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then
+    echo "Locales already up-to-date."
+  else
+    sudo locale-gen
+  fi
+else
+  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+    sudo locale-gen ${CHROMIUM_LOCALE}
+  done
+fi
diff --git a/src/build/install-chroot.sh b/src/build/install-chroot.sh
new file mode 100755
index 0000000..d76d535
--- /dev/null
+++ b/src/build/install-chroot.sh
@@ -0,0 +1,888 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the preferred name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-b dir       additional directories that should be bind mounted,"
+  echo '             or "NONE".'
+  echo "             Default: if local filesystems present, ask user for help"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
+  echo "-l           List all installed chroot environments"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":b:g:lm:sch" OPTNAME; do
+    case "$OPTNAME" in
+      b)
+        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+          bind_mounts="${OPTARG}"
+        else
+          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+               ! -d "${OPTARG}" ]; then
+            echo "Invalid -b option(s)"
+            usage
+            exit 1
+          fi
+          bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+        fi
+        ;;
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      l)
+        list_all_chroots
+        exit
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+list_all_chroots() {
+  for i in /var/lib/chroot/*; do
+    i="${i##*/}"
+    [ "${i}" = "*" ] && continue
+    [ -x "/usr/local/bin/${i%bit}" ] || continue
+    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+    [ -r "/etc/schroot/script-${i}" -a \
+      -r "/etc/schroot/mount-${i}" ] || continue
+    echo "${i%bit}"
+  done
+}
+
+getkey() {
+  (
+    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+    stty -echo iuclc -icanon 2>/dev/null
+    dd count=1 bs=1 2>/dev/null
+  )
+}
+
+chr() {
+  printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+  stat -c %T -f "$1/" 2>/dev/null |
+    egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+  echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  while :; do
+    echo "This chroot already exists on your machine."
+    if schroot -l --all-sessions 2>&1 |
+       sed 's/^session://' |
+       grep -qs "^${target%bit}-"; then
+      echo "And it appears to be in active use. Terminate all programs that"
+      echo "are currently using the chroot environment and then re-run this"
+      echo "script."
+      echo "If you still get an error message, you might have stale mounts"
+      echo "that you forgot to delete. You can always clean up mounts by"
+      echo "executing \"${target%bit} -c\"."
+      exit 1
+    fi
+    echo "I can abort installation, I can overwrite the existing chroot,"
+    echo "or I can delete the old one and then exit. What would you like to"
+    printf "do (a/o/d)? "
+    read choice
+    case "${choice}" in
+      a|A) exit 1;;
+      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
+                       "/usr/local/bin/${target%bit}"   \
+                       "/etc/schroot/mount-${target}"   \
+                       "/etc/schroot/script-${target}"  \
+                       "/etc/schroot/${target}"
+           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+                       "/etc/schroot/schroot.conf"
+           trap '' INT TERM QUIT HUP
+           trap '' EXIT
+           echo "Deleted!"
+           exit 0;;
+    esac
+  done
+  echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    printf "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+                 $2 !~ "^/media" && $2 !~ "^/run" &&
+                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+                   print $2
+                 }' /proc/mounts |
+            head -n26)"
+  if [ -n "${mounts}" ]; then
+    echo "You appear to have non-standard mount points that you"
+    echo "might want to import into the chroot environment:"
+    echo
+    sel=
+    while :; do
+      # Print a menu, listing all non-default mounts of local or network
+      # file systems.
+      j=1; for m in ${mounts}; do
+        c="$(printf $(printf '\\%03o' $((64+$j))))"
+        echo "$sel" | grep -qs $c &&
+          state="mounted in chroot" || state="$(tput el)"
+        printf "   $c) %-40s${state}\n" "$m"
+        j=$(($j+1))
+      done
+      # Allow user to interactively (de-)select any of the entries
+      echo
+      printf "Select mount points that you want to be included or press %s" \
+             "SPACE to continue"
+      c="$(getkey | tr a-z A-Z)"
+      [ "$c" == " " ] && { echo; echo; break; }
+      if [ -z "$c" ] ||
+         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+          # Invalid input, ring the console bell
+          tput bel
+      else
+        # Toggle the selection for the given entry
+        if echo "$sel" | grep -qs $c; then
+          sel="$(printf "$sel" | sed "s/$c//")"
+        else
+          sel="$sel$c"
+        fi
+      fi
+      # Reposition cursor to the top of the list of entries
+      tput cuu $(($j + 1))
+      echo
+    done
+  fi
+  j=1; for m in ${mounts}; do
+    c="$(chr $(($j + 64)))"
+    if echo "$sel" | grep -qs $c; then
+      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+    fi
+    j=$(($j+1))
+  done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="${admin},$(id -gn)"
+fi
+
+if [ -d '/etc/schroot/default' ]; then
+  new_version=1
+  fstab="/etc/schroot/${target}/fstab"
+else
+  new_version=0
+  fstab="/etc/schroot/mount-${target}"
+fi
+
+if [ "$new_version" = "1" ]; then
+  sudo cp -ar /etc/schroot/default /etc/schroot/${target}
+
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+profile=${target}
+
+EOF
+  [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+    printf "${bind_mounts}" |
+      sudo sh -c "cat >>${fstab}"
+else
+  # Older versions of schroot wanted a "priority=" line, whereas recent
+  # versions deprecate "priority=" and warn if they see it. We don't have
+  # a good feature test, but scanning for the string "priority=" in the
+  # existing "schroot.conf" file is a good indication of what to do.
+  priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+           echo 'priority=3' || :)
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+  # Set up a list of mount points that is specific to this
+  # chroot environment.
+  sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
+           /etc/schroot/script-defaults |
+    sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+  sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+    /etc/schroot/mount-defaults |
+    sudo sh -c "cat > ${fstab}"
+fi
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+  printf "${bind_mounts}" |
+    sudo sh -c 'cat >>'"${fstab}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+   ! grep -qs '^/media' "${fstab}"; then
+  echo '/media /media none rw,rbind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' "${fstab}" ||
+  echo '/dev/shm /dev/shm none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run' "${fstab}"; then
+  echo '/run /run none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+if ! grep -qs '^/run/shm' "${fstab}"; then
+  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+  sudo sh -c 'cat >>'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+  # insert the same number of spaces as the number of characters in the
+  # parameter(s) passed to this function.
+  # If the "fold" program cannot be found, or if the actual width of the
+  # terminal cannot be determined, this function doesn't attempt to do any
+  # wrapping.
+  local f="$(type -P fold)"
+  [ -z "${f}" ] && { cat; return; }
+  local c="$(stty -a </dev/tty 2>/dev/null |
+             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+  [ -z "${c}" ] && { cat; return; }
+  local i="$(echo "$*"|sed 's/./ /g')"
+  local j="$(printf %s "${i}"|wc -c)"
+  if [ "${c}" -gt "${j}" ]; then
+    dd bs=1 count="${j}" 2>/dev/null
+    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+  else
+    "${f}" -sw "${c}"
+  fi
+}
+
+help() {
+  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+  echo "  help:      print this message"                                                | wrap "             "
+  echo "  list:      list all known chroot environments"                                | wrap "             "
+  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
+  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
+  exit 0
+}
+
+clean() {
+  local s t rc
+  rc=0
+  for s in $(schroot -l --all-sessions); do
+    if [ -n "$1" ]; then
+      t="${s#session:}"
+      [ "${t#${chroot}-}" == "${t}" ] && continue
+    fi
+    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+       fgrep -qs "/var/lib/schroot/mount/${t}"; then
+      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+      rc=1
+      continue
+    fi
+    sudo schroot -c "${s}" -e || rc=1
+  done
+  exit ${rc}
+}
+
+list() {
+  for e in $(schroot -l); do
+    e="${e#chroot:}"
+    [ -x "/usr/local/bin/${e}" ] || continue
+    if schroot -l --all-sessions 2>/dev/null |
+       sed 's/^session://' |
+       grep -qs "^${e}-"; then
+      echo "${e} is currently active"
+    else
+      echo "${e}"
+    fi
+  done
+  exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+  case "$1" in
+    --)             shift; break;;
+    -h|--help)      shift; help;;
+    -l|--list)      shift; list;;
+    -c|--clean)     shift; clean "${chroot}";;
+    -C|--clean-all) shift; clean;;
+    *)              break;;
+  esac
+done
+
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
+session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+# Set GOMA_TMP_DIR for better handling of goma inside chroot.
+export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
+mkdir -p "$GOMA_TMP_DIR"
+
+if [ $# -eq 0 ]; then
+  # Run an interactive shell session
+  schroot -c "${session}" -r -p
+else
+  # Run a command inside of the chroot environment
+  p="$1"; shift
+  schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+# Compute the inode of the root directory inside of the chroot environment.
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+     awk '{ print $1 }') 2>/dev/null
+other_pids=
+while [ -n "$i" ]; do
+  # Identify processes by the inode number of their root directory. Then
+  # remove all processes that we know belong to other sessions. We use
+  # "sort | uniq -u" to do what amounts to a "set subtraction operation".
+  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
+         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+                 t
+                 d';
+         echo "${other_pids}";
+         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+  # Kill all processes that are still left running in the session. This is
+  # typically an assortment of daemon processes that were started
+  # automatically. They result in us being unable to tear down the session
+  # cleanly.
+  [ -z "${pids}" ] && break
+  for j in $pids; do
+    # Unfortunately, the way that schroot sets up sessions has the
+    # side-effect of being unable to tell one session apart from another.
+    # This can result in us attempting to kill processes in other sessions.
+    # We make a best-effort to avoid doing so.
+    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+      other_pids="${other_pids}
+${j}"
+      continue
+    fi
+    kill -9 $pids
+  done
+done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
+schroot -c "${session}" -e
+# Since no goma processes are running, we can remove goma directory.
+rm -rf "$GOMA_TMP_DIR"
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+   >&/dev/null; then
+  sudo sh -c '
+    echo "deb http://archive.canonical.com/ubuntu" \
+         "'"${distname}"' partner" \
+      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+             s/^deb\([^-]\)/deb-src\1/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+  sudo sh -c '
+    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+  sudo sed -i 's/ / [arch=amd64,i386] /' \
+              "/var/lib/chroot/${target}/etc/apt/sources.list"
+  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+    echo foreign-architecture \
+        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+      sudo sh -c \
+        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  lsof strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
+  dep=
+  for i in binutils gdb; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep -s "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+      sudo cp $path/$lib* \
+              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+    done
+  done
+  for i in gdb ld; do
+    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+  /usr/local/lib/amd64/$i "\$@"
+EOF
+    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+  done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+  while :; do
+    echo
+    echo "If you plan on building Chrome inside of the new chroot environment,"
+    echo "you now have to install the build dependencies. Do you want me to"
+    printf "start the script that does this for you (y/n)? "
+    read install_deps
+    case "${install_deps}" in
+      y|Y)
+        echo
+        # We prefer running the script in-place, but this might not be
+        # possible, if it lives on a network filesystem that denies
+        # access to root.
+        tmp_script=
+        if ! sudo /usr/local/bin/"${target%bit}" \
+            sh -c "[ -x '${script}' ]" >&/dev/null; then
+          tmp_script="/tmp/${script##*/}"
+          cp "${script}" "${tmp_script}"
+        fi
+        # Some distributions automatically start an instance of the system-
+        # wide dbus daemon, cron daemon or of the logging daemon, when
+        # installing the Chrome build depencies. This prevents the chroot
+        # session from being closed.  So, we always try to shut down any running
+        # instance of dbus and rsyslog.
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script};
+              rc=$?;
+              /etc/init.d/cron stop >/dev/null 2>&1 || :;
+              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+              exit $rc"
+        rc=$?
+        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+        [ $rc -ne 0 ] && exit $rc
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+     is_network_drive "${HOME}/chroot"; } &&
+   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+  echo "${HOME}/chroot is currently located on the same device as your"
+  echo "home directory."
+  echo "This might not be what you want. Do you want me to move it somewhere"
+  echo "else?"
+  # If the computer has multiple spindles, many users configure all or part of
+  # the secondary hard disk to be writable by the primary user of this machine.
+  # Make some reasonable effort to detect this type of configuration and
+  # then offer a good location for where to put the ~/chroot directory.
+  suggest=
+  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+       ! is_network_drive "$i"; then
+      suggest="$i"
+    else
+      for j in "$i/"*; do
+        if [ -d "$j" -a -w "$j" -a \
+             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+           ! is_network_drive "$j"; then
+          suggest="$j"
+        else
+          for k in "$j/"*; do
+            if [ -d "$k" -a -w "$k" -a \
+                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+               ! is_network_drive "$k"; then
+              suggest="$k"
+              break
+            fi
+          done
+        fi
+        [ -n "${suggest}" ] && break
+      done
+    fi
+    [ -n "${suggest}" ] && break
+  done
+  def_suggest="${HOME}"
+  if [ -n "${suggest}" ]; then
+    # For home directories that reside on network drives, make our suggestion
+    # the default option. For home directories that reside on a local drive,
+    # require that the user manually enters the new location.
+    if is_network_drive "${HOME}"; then
+      def_suggest="${suggest}"
+    else
+      echo "A good location would probably be in \"${suggest}\""
+    fi
+  fi
+  while :; do
+    printf "Physical location [${def_suggest}]: "
+    read dir
+    [ -z "${dir}" ] && dir="${def_suggest}"
+    [ "${dir%%/}" == "${HOME%%/}" ] && break
+    if ! [ -d "${dir}" -a -w "${dir}" ] ||
+       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+      echo "Cannot write to ${dir}/chroot. Please try again"
+    else
+      mv "${HOME}/chroot" "${dir}/chroot"
+      ln -s "${dir}/chroot" "${HOME}/chroot"
+      for i in $(list_all_chroots); do
+        sudo "$i" mkdir -p "${dir}/chroot"
+      done
+      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+      break
+    fi
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/src/build/internal/README.chromium b/src/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/src/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/src/build/ios/intent_definition.gni b/src/build/ios/intent_definition.gni
new file mode 100644
index 0000000..259f287
--- /dev/null
+++ b/src/build/ios/intent_definition.gni
@@ -0,0 +1,121 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+
+# Template to compile an .intentdefinition file.
+#
+# Arguments
+#
+#   intent_file
+#     path to .intentdefinition file that must be compiled
+#
+#   intent_names
+#     list of string corresponding to the intent defined in intent_file;
+#     must include all the intents as only the intents listed will be
+#     compiled
+#
+#   developer_dir
+#     (optional) path to developer_dir to use
+#
+template("intent_definition") {
+  assert(defined(invoker.intent_file) && invoker.intent_file != "",
+         "intent_file must be defined for $target_name")
+
+  assert(defined(invoker.intent_names) && invoker.intent_names != [],
+         "intent_names must be defined for $target_name")
+
+  assert(xcode_version_int >= 1140,
+         "intent_definition requires Xcode 11.4 or higher")
+
+  _compile_intent_target = "${target_name}_compile_intent"
+  _compile_intent_output =
+      "$target_gen_dir/" + get_path_info(invoker.intent_file, "file")
+
+  action(_compile_intent_target) {
+    script = "//build/apple/xcrun.py"
+    inputs = [ invoker.intent_file ]
+    outputs = [ _compile_intent_output ]
+    args = [
+      "intentbuilderc",
+      "compile",
+      "-input",
+      rebase_path(invoker.intent_file, root_build_dir),
+      "-output",
+      rebase_path(target_gen_dir, root_build_dir),
+    ]
+
+    if (defined(invoker.developer_dir)) {
+      args += [
+        "--developer-dir",
+        rebase_path(invoker.developer_dir, root_build_dir),
+      ]
+    }
+  }
+
+  _compile_intent_bundle = "${target_name}_compile_intent_bundle"
+  bundle_data(_compile_intent_bundle) {
+    sources = [ _compile_intent_output ]
+    outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+    public_deps = [ ":$_compile_intent_target" ]
+  }
+
+  _generate_source_target = "${target_name}_generate_source"
+  _generate_source_output = []
+  foreach(_intent, invoker.intent_names) {
+    _generate_source_output += [
+      "$target_gen_dir/$_intent.h",
+      "$target_gen_dir/$_intent.m",
+    ]
+  }
+
+  action(_generate_source_target) {
+    script = "//build/apple/xcrun.py"
+    inputs = [ invoker.intent_file ]
+    outputs = _generate_source_output
+    args = [
+      "intentbuilderc",
+      "generate",
+      "-input",
+      rebase_path(invoker.intent_file, root_build_dir),
+      "-output",
+      rebase_path(target_gen_dir, root_build_dir),
+      "-language",
+      "Objective-C",
+    ]
+
+    if (defined(invoker.developer_dir)) {
+      args += [
+        "--developer-dir",
+        rebase_path(invoker.developer_dir, root_build_dir),
+      ]
+    }
+  }
+
+  source_set(target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "developer_dir",
+                             "intent_file",
+                             "intent_names",
+                             "sources",
+                           ])
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(frameworks)) {
+      frameworks = []
+    }
+
+    sources = _generate_source_output
+    configs += [ "//build/config/compiler:enable_arc" ]
+    deps += [
+      ":$_compile_intent_bundle",
+      ":$_generate_source_target",
+    ]
+    frameworks += [ "Intents.framework" ]
+  }
+}
diff --git a/src/build/lacros/BUILD.gn b/src/build/lacros/BUILD.gn
new file mode 100644
index 0000000..216010a
--- /dev/null
+++ b/src/build/lacros/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+import("//build/util/process_version.gni")
+
+python_library("lacros_resource_sizes_py") {
+  pydeps_file = "lacros_resource_sizes.pydeps"
+  data = [ "//buildtools/third_party/eu-strip/bin/eu-strip" ]
+  data_deps = [ "//third_party/catapult/tracing:convert_chart_json" ]
+}
+
+process_version("lacros_version_metadata") {
+  sources = [ "//chrome/VERSION" ]
+
+  template_file = "metadata.json.in"
+  output = "$root_out_dir/metadata.json"
+  process_only = true
+}
diff --git a/src/build/lacros/lacros_resource_sizes.gni b/src/build/lacros/lacros_resource_sizes.gni
new file mode 100644
index 0000000..0c50e0f
--- /dev/null
+++ b/src/build/lacros/lacros_resource_sizes.gni
@@ -0,0 +1,21 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/generate_wrapper.gni")
+
+# Generates a script in the bin directory that runs
+# //build/lacros/lacros_resource_sizes.py for the provided configuration.
+template("lacros_resource_sizes_test") {
+  generate_wrapper(target_name) {
+    forward_variables_from(invoker, [ "data_deps" ])
+    executable = "//build/lacros/lacros_resource_sizes.py"
+    wrapper_script = "$root_out_dir/bin/run_${target_name}"
+
+    deps = [ "//build/lacros:lacros_resource_sizes_py" ]
+    executable_args = [
+      "--chromium-output-directory",
+      "@WrappedPath(.)",
+    ]
+  }
+}
diff --git a/src/build/lacros/lacros_resource_sizes.py b/src/build/lacros/lacros_resource_sizes.py
new file mode 100755
index 0000000..bf791c6
--- /dev/null
+++ b/src/build/lacros/lacros_resource_sizes.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Reports binary size metrics for LaCrOS build artifacts.
+
+More information at //docs/speed/binary_size/metrics.md.
+"""
+
+import argparse
+import collections
+import contextlib
+import json
+import logging
+import os
+import subprocess
+import sys
+import tempfile
+
+
+@contextlib.contextmanager
+def _SysPath(path):
+  """Library import context that temporarily appends |path| to |sys.path|."""
+  if path and path not in sys.path:
+    sys.path.insert(0, path)
+  else:
+    path = None  # Indicates that |sys.path| is not modified.
+  try:
+    yield
+  finally:
+    if path:
+      sys.path.pop(0)
+
+
+DIR_SOURCE_ROOT = os.environ.get(
+    'CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(
+        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
+
+BUILD_COMMON_PATH = os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                                 'common')
+
+TRACING_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'catapult',
+                            'tracing')
+
+EU_STRIP_PATH = os.path.join(DIR_SOURCE_ROOT, 'buildtools', 'third_party',
+                             'eu-strip', 'bin', 'eu-strip')
+
+with _SysPath(BUILD_COMMON_PATH):
+  import perf_tests_results_helper  # pylint: disable=import-error
+
+with _SysPath(TRACING_PATH):
+  from tracing.value import convert_chart_json  # pylint: disable=import-error
+
+_BASE_CHART = {
+    'format_version': '0.1',
+    'benchmark_name': 'resource_sizes',
+    'benchmark_description': 'LaCrOS resource size information.',
+    'trace_rerun_options': [],
+    'charts': {}
+}
+
+_KEY_RAW = 'raw'
+_KEY_GZIPPED = 'gzipped'
+_KEY_STRIPPED = 'stripped'
+_KEY_STRIPPED_GZIPPED = 'stripped_then_gzipped'
+
+
+class _Group:
+  """A group of build artifacts whose file sizes are summed and tracked.
+
+  Build artifacts for size tracking fall under these categories:
+  * File: A single file.
+  * Group: A collection of files.
+  * Dir: All files under a directory.
+
+  Attributes:
+    paths: A list of files or directories to be tracked together.
+    title: The display name of the group.
+    track_stripped: Whether to also track summed stripped ELF sizes.
+    track_compressed: Whether to also track summed compressed sizes.
+  """
+
+  def __init__(self, paths, title, track_stripped=False,
+               track_compressed=False):
+    self.paths = paths
+    self.title = title
+    self.track_stripped = track_stripped
+    self.track_compressed = track_compressed
+
+
+# List of disjoint build artifact groups for size tracking. This list should be
+# synched with lacros-amd64-generic-binary-size-rel builder contents (specified
+# in # //infra/config/subprojects/chromium/ci.star) and
+# chromeos-amd64-generic-lacros-internal builder (specified in src-internal).
+_TRACKED_GROUPS = [
+    _Group(paths=['chrome'],
+           title='File: chrome',
+           track_stripped=True,
+           track_compressed=True),
+    _Group(paths=['crashpad_handler'], title='File: crashpad_handler'),
+    _Group(paths=['icudtl.dat'], title='File: icudtl.dat'),
+    _Group(paths=['nacl_helper'], title='File: nacl_helper'),
+    _Group(paths=['nacl_irt_x86_64.nexe'], title='File: nacl_irt_x86_64.nexe'),
+    _Group(paths=['resources.pak'], title='File: resources.pak'),
+    _Group(paths=[
+        'chrome_100_percent.pak', 'chrome_200_percent.pak', 'headless_lib.pak'
+    ],
+           title='Group: Other PAKs'),
+    _Group(paths=['snapshot_blob.bin'], title='Group: Misc'),
+    _Group(paths=['locales/'], title='Dir: locales'),
+    _Group(paths=['swiftshader/'], title='Dir: swiftshader'),
+    _Group(paths=['WidevineCdm/'], title='Dir: WidevineCdm'),
+]
+
+
+def _visit_paths(base_dir, paths):
+  """Itemizes files specified by a list of paths.
+
+  Args:
+    base_dir: Base directory for all elements in |paths|.
+    paths: A list of filenames or directory names to specify files whose sizes
+      to be counted. Directories are recursed. There's no de-duping effort.
+      Non-existing files or directories are ignored (with warning message).
+  """
+  for path in paths:
+    full_path = os.path.join(base_dir, path)
+    if os.path.exists(full_path):
+      if os.path.isdir(full_path):
+        for dirpath, _, filenames in os.walk(full_path):
+          for filename in filenames:
+            yield os.path.join(dirpath, filename)
+      else:  # Assume is file.
+        yield full_path
+    else:
+      logging.critical('Not found: %s', path)
+
+
+def _is_probably_elf(filename):
+  """Heuristically decides whether |filename| is ELF via magic signature."""
+  with open(filename, 'rb') as fh:
+    return fh.read(4) == '\x7FELF'
+
+
+def _is_unstrippable_elf(filename):
+  """Identifies known-unstrippable ELF files to denoise the system."""
+  return filename.endswith('.nexe') or filename.endswith('libwidevinecdm.so')
+
+
+def _get_filesize(filename):
+  """Returns the size of a file, or 0 if file is not found."""
+  try:
+    return os.path.getsize(filename)
+  except OSError:
+    logging.critical('Failed to get size: %s', filename)
+  return 0
+
+
+def _get_gzipped_filesize(filename):
+  """Returns the gzipped size of a file, or 0 if file is not found."""
+  BUFFER_SIZE = 65536
+  if not os.path.isfile(filename):
+    return 0
+  try:
+    # Call gzip externally instead of using gzip package since it's > 2x faster.
+    cmd = ['gzip', '-c', filename]
+    p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+    # Manually counting bytes instead of using len(p.communicate()[0]) to avoid
+    # buffering the entire compressed data (can be ~100 MB).
+    ret = 0
+    while True:
+      chunk = len(p.stdout.read(BUFFER_SIZE))
+      if chunk == 0:
+        break
+      ret += chunk
+    return ret
+  except OSError:
+    logging.critical('Failed to get gzipped size: %s', filename)
+  return 0
+
+
+def _get_catagorized_filesizes(filename):
+  """Measures |filename| sizes under various transforms.
+
+  Returns: A Counter (keyed by _Key_* constants) that stores measured sizes.
+  """
+  sizes = collections.Counter()
+  sizes[_KEY_RAW] = _get_filesize(filename)
+  sizes[_KEY_GZIPPED] = _get_gzipped_filesize(filename)
+
+  # Pre-assign values for non-ELF, or in case of failure for ELF.
+  sizes[_KEY_STRIPPED] = sizes[_KEY_RAW]
+  sizes[_KEY_STRIPPED_GZIPPED] = sizes[_KEY_GZIPPED]
+
+  if _is_probably_elf(filename) and not _is_unstrippable_elf(filename):
+    try:
+      fd, temp_file = tempfile.mkstemp()
+      os.close(fd)
+      cmd = [EU_STRIP_PATH, filename, '-o', temp_file]
+      subprocess.check_output(cmd)
+      sizes[_KEY_STRIPPED] = _get_filesize(temp_file)
+      sizes[_KEY_STRIPPED_GZIPPED] = _get_gzipped_filesize(temp_file)
+      if sizes[_KEY_STRIPPED] > sizes[_KEY_RAW]:
+        # This weird case has been observed for libwidevinecdm.so.
+        logging.critical('Stripping made things worse for %s' % filename)
+    except subprocess.CalledProcessError:
+      logging.critical('Failed to strip file: %s' % filename)
+    finally:
+      os.unlink(temp_file)
+  return sizes
+
+
+def _dump_chart_json(output_dir, chartjson):
+  """Writes chart histogram to JSON files.
+
+  Output files:
+    results-chart.json contains the chart JSON.
+    perf_results.json contains histogram JSON for Catapult.
+
+  Args:
+    output_dir: Directory to place the JSON files.
+    chartjson: Source JSON data for output files.
+  """
+  results_path = os.path.join(output_dir, 'results-chart.json')
+  logging.critical('Dumping chartjson to %s', results_path)
+  with open(results_path, 'w') as json_file:
+    json.dump(chartjson, json_file, indent=2)
+
+  # We would ideally generate a histogram set directly instead of generating
+  # chartjson then converting. However, perf_tests_results_helper is in
+  # //build, which doesn't seem to have any precedent for depending on
+  # anything in Catapult. This can probably be fixed, but since this doesn't
+  # need to be super fast or anything, converting is a good enough solution
+  # for the time being.
+  histogram_result = convert_chart_json.ConvertChartJson(results_path)
+  if histogram_result.returncode != 0:
+    raise Exception('chartjson conversion failed with error: ' +
+                    histogram_result.stdout)
+
+  histogram_path = os.path.join(output_dir, 'perf_results.json')
+  logging.critical('Dumping histograms to %s', histogram_path)
+  with open(histogram_path, 'w') as json_file:
+    json_file.write(histogram_result.stdout)
+
+
+def _run_resource_sizes(args):
+  """Main flow to extract and output size data."""
+  chartjson = _BASE_CHART.copy()
+  report_func = perf_tests_results_helper.ReportPerfResult
+  total_sizes = collections.Counter()
+
+  def report_sizes(sizes, title, track_stripped, track_compressed):
+    report_func(chart_data=chartjson,
+                graph_title=title,
+                trace_title='size',
+                value=sizes[_KEY_RAW],
+                units='bytes')
+
+    if track_stripped:
+      report_func(chart_data=chartjson,
+                  graph_title=title + ' (Stripped)',
+                  trace_title='size',
+                  value=sizes[_KEY_STRIPPED],
+                  units='bytes')
+
+    if track_compressed:
+      report_func(chart_data=chartjson,
+                  graph_title=title + ' (Gzipped)',
+                  trace_title='size',
+                  value=sizes[_KEY_GZIPPED],
+                  units='bytes')
+
+    if track_stripped and track_compressed:
+      report_func(chart_data=chartjson,
+                  graph_title=title + ' (Stripped, Gzipped)',
+                  trace_title='size',
+                  value=sizes[_KEY_STRIPPED_GZIPPED],
+                  units='bytes')
+
+  for g in _TRACKED_GROUPS:
+    sizes = sum(
+        map(_get_catagorized_filesizes, _visit_paths(args.out_dir, g.paths)),
+        collections.Counter())
+    report_sizes(sizes, g.title, g.track_stripped, g.track_compressed)
+
+    # Total compressed size is summed over individual compressed sizes, instead
+    # of concatanating first, then compress everything. This is done for
+    # simplicity. It also gives a conservative size estimate (assuming file
+    # metadata and overheads are negligible).
+    total_sizes += sizes
+
+  report_sizes(total_sizes, 'Total', True, True)
+
+  _dump_chart_json(args.output_dir, chartjson)
+
+
+def main():
+  """Parses arguments and runs high level flows."""
+  argparser = argparse.ArgumentParser(description='Writes LaCrOS size metrics.')
+
+  argparser.add_argument('--chromium-output-directory',
+                         dest='out_dir',
+                         required=True,
+                         type=os.path.realpath,
+                         help='Location of the build artifacts.')
+
+  output_group = argparser.add_mutually_exclusive_group()
+
+  output_group.add_argument('--output-dir',
+                            default='.',
+                            help='Directory to save chartjson to.')
+
+  # Accepted to conform to the isolated script interface, but ignored.
+  argparser.add_argument('--isolated-script-test-filter',
+                         help=argparse.SUPPRESS)
+  argparser.add_argument('--isolated-script-test-perf-output',
+                         type=os.path.realpath,
+                         help=argparse.SUPPRESS)
+
+  output_group.add_argument(
+      '--isolated-script-test-output',
+      type=os.path.realpath,
+      help='File to which results will be written in the simplified JSON '
+      'output format.')
+
+  args = argparser.parse_args()
+
+  isolated_script_output = {'valid': False, 'failures': []}
+  if args.isolated_script_test_output:
+    test_name = 'lacros_resource_sizes'
+    args.output_dir = os.path.join(
+        os.path.dirname(args.isolated_script_test_output), test_name)
+    if not os.path.exists(args.output_dir):
+      os.makedirs(args.output_dir)
+
+  try:
+    _run_resource_sizes(args)
+    isolated_script_output = {'valid': True, 'failures': []}
+  finally:
+    if args.isolated_script_test_output:
+      results_path = os.path.join(args.output_dir, 'test_results.json')
+      with open(results_path, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+      with open(args.isolated_script_test_output, 'w') as output_file:
+        json.dump(isolated_script_output, output_file)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/lacros/lacros_resource_sizes.pydeps b/src/build/lacros/lacros_resource_sizes.pydeps
new file mode 100644
index 0000000..3707458
--- /dev/null
+++ b/src/build/lacros/lacros_resource_sizes.pydeps
@@ -0,0 +1,11 @@
+# Generated by running:
+#   build/print_python_deps.py --root build/lacros --output build/lacros/lacros_resource_sizes.pydeps build/lacros/lacros_resource_sizes.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
+../util/lib/common/perf_result_data_type.py
+../util/lib/common/perf_tests_results_helper.py
+lacros_resource_sizes.py
diff --git a/src/build/lacros/metadata.json.in b/src/build/lacros/metadata.json.in
new file mode 100644
index 0000000..3fceff2
--- /dev/null
+++ b/src/build/lacros/metadata.json.in
@@ -0,0 +1,6 @@
+{
+  "content": {
+    "version": "@MAJOR@.@MINOR@.@BUILD@.@PATCH@"
+  },
+  "metadata_version": 1
+}
diff --git a/src/build/lacros/mojo_connection_lacros_launcher.py b/src/build/lacros/mojo_connection_lacros_launcher.py
new file mode 100755
index 0000000..786176e
--- /dev/null
+++ b/src/build/lacros/mojo_connection_lacros_launcher.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env vpython3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helps launch lacros-chrome with mojo connection established on Linux
+  or Chrome OS. Use on Chrome OS is for dev purposes.
+
+  The main use case is to be able to launch lacros-chrome in a debugger.
+
+  Please first launch an ash-chrome in the background as usual except without
+  the '--lacros-chrome-path' argument and with an additional
+  '--lacros-mojo-socket-for-testing' argument pointing to a socket path:
+
+  XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime ./out/ash/chrome \\
+      --user-data-dir=/tmp/ash-chrome --enable-wayland-server \\
+      --no-startup-window --enable-features=LacrosSupport \\
+      --lacros-mojo-socket-for-testing=/tmp/lacros.sock
+
+  Then, run this script with '-s' pointing to the same socket path used to
+  launch ash-chrome, followed by a command one would use to launch lacros-chrome
+  inside a debugger:
+
+  EGL_PLATFORM=surfaceless XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime \\
+  ./build/lacros/mojo_connection_lacros_launcher.py -s /tmp/lacros.sock
+  gdb --args ./out/lacros-release/chrome --user-data-dir=/tmp/lacros-chrome
+"""
+
+import argparse
+import array
+import contextlib
+import os
+import pathlib
+import socket
+import sys
+import subprocess
+
+
+_NUM_FDS_MAX = 3
+
+
+# contextlib.nullcontext is introduced in 3.7, while Python version on
+# CrOS is still 3.6. This is for backward compatibility.
+class NullContext:
+  def __init__(self, enter_ret=None):
+    self.enter_ret = enter_ret
+
+  def __enter__(self):
+    return self.enter_ret
+
+  def __exit__(self, exc_type, exc_value, trace):
+    pass
+
+
+def _ReceiveFDs(sock):
+  """Receives FDs from ash-chrome that will be used to launch lacros-chrome.
+
+  Args:
+    sock: A connected unix domain socket.
+
+  Returns:
+    File objects for the mojo connection and maybe startup data file.
+  """
+  # This function is borrowed from with modifications:
+  # https://docs.python.org/3/library/socket.html#socket.socket.recvmsg
+  fds = array.array("i")  # Array of ints
+  # Along with the file descriptor, ash-chrome also sends the version in the
+  # regular data.
+  version, ancdata, _, _ = sock.recvmsg(
+      1, socket.CMSG_LEN(fds.itemsize * _NUM_FDS_MAX))
+  for cmsg_level, cmsg_type, cmsg_data in ancdata:
+    if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS:
+      # There are three versions currently this script supports.
+      # The oldest one: ash-chrome returns one FD, the mojo connection of
+      # old bootstrap procedure (i.e., it will be BrowserService).
+      # The middle one: ash-chrome returns two FDs, the mojo connection of
+      # old bootstrap procedure, and the second for the start up data FD.
+      # The newest one: ash-chrome returns three FDs, the mojo connection of
+      # old bootstrap procedure, the second for the start up data FD, and
+      # the third for another mojo connection of new bootstrap procedure.
+      # TODO(crbug.com/1156033): Clean up the code to drop the support of
+      # oldest one after M91.
+      # TODO(crbug.com/1180712): Clean up the mojo procedure support of the
+      # the middle one after M92.
+      cmsg_len_candidates = [(i + 1) * fds.itemsize
+                             for i in range(_NUM_FDS_MAX)]
+      assert len(cmsg_data) in cmsg_len_candidates, (
+          'CMSG_LEN is unexpected: %d' % (len(cmsg_data), ))
+      fds.frombytes(cmsg_data[:])
+
+  assert version == b'\x00', 'Expecting version code to be 0'
+  assert len(fds) in (1, 2, 3), 'Expecting exactly 1, 2, or 3 FDs'
+  legacy_mojo_fd = os.fdopen(fds[0])
+  startup_fd = None if len(fds) < 2 else os.fdopen(fds[1])
+  mojo_fd = None if len(fds) < 3 else os.fdopen(fds[2])
+  return legacy_mojo_fd, startup_fd, mojo_fd
+
+
+def _MaybeClosing(fileobj):
+  """Returns closing context manager, if given fileobj is not None.
+
+  If the given fileobj is none, return nullcontext.
+  """
+  return (contextlib.closing if fileobj else NullContext)(fileobj)
+
+
+def Main():
+  arg_parser = argparse.ArgumentParser()
+  arg_parser.usage = __doc__
+  arg_parser.add_argument(
+      '-s',
+      '--socket-path',
+      type=pathlib.Path,
+      required=True,
+      help='Absolute path to the socket that were used to start ash-chrome, '
+      'for example: "/tmp/lacros.socket"')
+  flags, args = arg_parser.parse_known_args()
+
+  assert 'XDG_RUNTIME_DIR' in os.environ
+  assert os.environ.get('EGL_PLATFORM') == 'surfaceless'
+
+  with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
+    sock.connect(flags.socket_path.as_posix())
+    legacy_mojo_connection, startup_connection, mojo_connection = (
+        _ReceiveFDs(sock))
+
+  with _MaybeClosing(legacy_mojo_connection), \
+       _MaybeClosing(startup_connection), \
+       _MaybeClosing(mojo_connection):
+    cmd = args[:]
+    pass_fds = []
+    if legacy_mojo_connection:
+      cmd.append('--mojo-platform-channel-handle=%d' %
+                 legacy_mojo_connection.fileno())
+      pass_fds.append(legacy_mojo_connection.fileno())
+    if startup_connection:
+      cmd.append('--cros-startup-data-fd=%d' % startup_connection.fileno())
+      pass_fds.append(startup_connection.fileno())
+    if mojo_connection:
+      cmd.append('--crosapi-mojo-platform-channel-handle=%d' %
+                 mojo_connection.fileno())
+      pass_fds.append(mojo_connection.fileno())
+    proc = subprocess.Popen(cmd, pass_fds=pass_fds)
+
+  return proc.wait()
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/lacros/test_runner.py b/src/build/lacros/test_runner.py
new file mode 100755
index 0000000..44ea131
--- /dev/null
+++ b/src/build/lacros/test_runner.py
@@ -0,0 +1,466 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""This script facilitates running tests for lacros on Linux.
+
+  In order to run lacros tests on Linux, please first follow bit.ly/3juQVNJ
+  to setup build directory with the lacros-chrome-on-linux build configuration,
+  and corresponding test targets are built successfully.
+
+  * Example usages:
+
+  ./build/lacros/test_runner.py test out/lacros/url_unittests
+  ./build/lacros/test_runner.py test out/lacros/browser_tests
+
+  The commands above run url_unittests and browser_tests respecitively, and more
+  specifically, url_unitests is executed directly while browser_tests is
+  executed with the latest version of prebuilt ash-chrome, and the behavior is
+  controlled by |_TARGETS_REQUIRE_ASH_CHROME|, and it's worth noting that the
+  list is maintained manually, so if you see something is wrong, please upload a
+  CL to fix it.
+
+  ./build/lacros/test_runner.py test out/lacros/browser_tests \\
+      --gtest_filter=BrowserTest.Title
+
+  The above command only runs 'BrowserTest.Title', and any argument accepted by
+  the underlying test binary can be specified in the command.
+
+  ./build/lacros/test_runner.py test out/lacros/browser_tests \\
+    --ash-chrome-version=793554
+
+  The above command runs tests with a given version of ash-chrome, which is
+  useful to reproduce test failures, the version corresponds to the commit
+  position of commits on the master branch, and a list of prebuilt versions can
+  be found at: gs://ash-chromium-on-linux-prebuilts/x86_64.
+
+  ./testing/xvfb.py ./build/lacros/test_runner.py test out/lacros/browser_tests
+
+  The above command starts ash-chrome with xvfb instead of an X11 window, and
+  it's useful when running tests without a display attached, such as sshing.
+"""
+
+import argparse
+import os
+import logging
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import zipfile
+
+_SRC_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
+sys.path.append(os.path.join(_SRC_ROOT, 'third_party', 'depot_tools'))
+
+# Base GS URL to store prebuilt ash-chrome.
+_GS_URL_BASE = 'gs://ash-chromium-on-linux-prebuilts/x86_64'
+
+# Latest file version.
+_GS_URL_LATEST_FILE = _GS_URL_BASE + '/latest/ash-chromium.txt'
+
+# GS path to the zipped ash-chrome build with any given version.
+_GS_ASH_CHROME_PATH = 'ash-chromium.zip'
+
+# Directory to cache downloaded ash-chrome versions to avoid re-downloading.
+_PREBUILT_ASH_CHROME_DIR = os.path.join(os.path.dirname(__file__),
+                                        'prebuilt_ash_chrome')
+
+# Number of seconds to wait for ash-chrome to start.
+ASH_CHROME_TIMEOUT_SECONDS = 10
+
+# List of targets that require ash-chrome as a Wayland server in order to run.
+_TARGETS_REQUIRE_ASH_CHROME = [
+    'app_shell_unittests',
+    'aura_unittests',
+    'browser_tests',
+    'components_unittests',
+    'compositor_unittests',
+    'content_unittests',
+    'dbus_unittests',
+    'extensions_unittests',
+    'media_unittests',
+    'message_center_unittests',
+    'snapshot_unittests',
+    'sync_integration_tests',
+    'unit_tests',
+    'views_unittests',
+    'wm_unittests',
+
+    # regex patterns.
+    '.*_browsertests',
+    '.*interactive_ui_tests'
+]
+
+# List of targets that require ash-chrome to support crosapi mojo APIs.
+_TARGETS_REQUIRE_MOJO_CROSAPI = [
+    # TODO(jamescook): Add 'browser_tests' after multiple crosapi connections
+    # are allowed. For now we only enable crosapi in targets that run tests
+    # serially.
+    'interactive_ui_tests',
+    'lacros_chrome_browsertests'
+]
+
+
+def _GetAshChromeDirPath(version):
+  """Returns a path to the dir storing the downloaded version of ash-chrome."""
+  return os.path.join(_PREBUILT_ASH_CHROME_DIR, version)
+
+
+def _remove_unused_ash_chrome_versions(version_to_skip):
+  """Removes unused ash-chrome versions to save disk space.
+
+  Currently, when an ash-chrome zip is downloaded and unpacked, the atime/mtime
+  of the dir and the files are NOW instead of the time when they were built, but
+  there is no garanteen it will always be the behavior in the future, so avoid
+  removing the current version just in case.
+
+  Args:
+    version_to_skip (str): the version to skip removing regardless of its age.
+  """
+  days = 7
+  expiration_duration = 60 * 60 * 24 * days
+
+  for f in os.listdir(_PREBUILT_ASH_CHROME_DIR):
+    if f == version_to_skip:
+      continue
+
+    p = os.path.join(_PREBUILT_ASH_CHROME_DIR, f)
+    if os.path.isfile(p):
+      # The prebuilt ash-chrome dir is NOT supposed to contain any files, remove
+      # them to keep the directory clean.
+      os.remove(p)
+      continue
+    chrome_path = os.path.join(p, 'chrome')
+    if not os.path.exists(chrome_path):
+      chrome_path = p
+    age = time.time() - os.path.getatime(chrome_path)
+    if age > expiration_duration:
+      logging.info(
+          'Removing ash-chrome: "%s" as it hasn\'t been used in the '
+          'past %d days', p, days)
+      shutil.rmtree(p)
+
+def _GsutilCopyWithRetry(gs_path, local_name, retry_times=3):
+  """Gsutil copy with retry.
+
+  Args:
+    gs_path: The gs path for remote location.
+    local_name: The local file name.
+    retry_times: The total try times if the gsutil call fails.
+
+  Raises:
+    RuntimeError: If failed to download the specified version, for example,
+        if the version is not present on gcs.
+  """
+  import download_from_google_storage
+  gsutil = download_from_google_storage.Gsutil(
+      download_from_google_storage.GSUTIL_DEFAULT_PATH)
+  exit_code = 1
+  retry = 0
+  while exit_code and retry < retry_times:
+    retry += 1
+    exit_code = gsutil.call('cp', gs_path, local_name)
+  if exit_code:
+    raise RuntimeError('Failed to download: "%s"' % gs_path)
+
+
+def _DownloadAshChromeIfNecessary(version):
+  """Download a given version of ash-chrome if not already exists.
+
+  Args:
+    version: A string representing the version, such as "793554".
+
+  Raises:
+      RuntimeError: If failed to download the specified version, for example,
+          if the version is not present on gcs.
+  """
+
+  def IsAshChromeDirValid(ash_chrome_dir):
+    # This function assumes that once 'chrome' is present, other dependencies
+    # will be present as well, it's not always true, for example, if the test
+    # runner process gets killed in the middle of unzipping (~2 seconds), but
+    # it's unlikely for the assumption to break in practice.
+    return os.path.isdir(ash_chrome_dir) and os.path.isfile(
+        os.path.join(ash_chrome_dir, 'chrome'))
+
+  ash_chrome_dir = _GetAshChromeDirPath(version)
+  if IsAshChromeDirValid(ash_chrome_dir):
+    return
+
+  shutil.rmtree(ash_chrome_dir, ignore_errors=True)
+  os.makedirs(ash_chrome_dir)
+  with tempfile.NamedTemporaryFile() as tmp:
+    logging.info('Ash-chrome version: %s', version)
+    gs_path = _GS_URL_BASE + '/' + version + '/' + _GS_ASH_CHROME_PATH
+    _GsutilCopyWithRetry(gs_path, tmp.name)
+
+    # https://bugs.python.org/issue15795. ZipFile doesn't preserve permissions.
+    # And in order to workaround the issue, this function is created and used
+    # instead of ZipFile.extractall().
+    # The solution is copied from:
+    # https://stackoverflow.com/questions/42326428/zipfile-in-python-file-permission
+    def ExtractFile(zf, info, extract_dir):
+      zf.extract(info.filename, path=extract_dir)
+      perm = info.external_attr >> 16
+      os.chmod(os.path.join(extract_dir, info.filename), perm)
+
+    with zipfile.ZipFile(tmp.name, 'r') as zf:
+      # Extra all files instead of just 'chrome' binary because 'chrome' needs
+      # other resources and libraries to run.
+      for info in zf.infolist():
+        ExtractFile(zf, info, ash_chrome_dir)
+
+  _remove_unused_ash_chrome_versions(version)
+
+
+def _GetLatestVersionOfAshChrome():
+  """Returns the latest version of uploaded ash-chrome."""
+  with tempfile.NamedTemporaryFile() as tmp:
+    _GsutilCopyWithRetry(_GS_URL_LATEST_FILE, tmp.name)
+    with open(tmp.name, 'r') as f:
+      return f.read().strip()
+
+
+def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file,
+                             enable_mojo_crosapi):
+  """Waits for Ash-Chrome to be up and running and returns a boolean indicator.
+
+  Determine whether ash-chrome is up and running by checking whether two files
+  (lock file + socket) have been created in the |XDG_RUNTIME_DIR| and the lacros
+  mojo socket file has been created if enabling the mojo "crosapi" interface.
+  TODO(crbug.com/1107966): Figure out a more reliable hook to determine the
+  status of ash-chrome, likely through mojo connection.
+
+  Args:
+    tmp_xdg_dir (str): Path to the XDG_RUNTIME_DIR.
+    lacros_mojo_socket_file (str): Path to the lacros mojo socket file.
+    enable_mojo_crosapi (bool): Whether to bootstrap the crosapi mojo interface
+        between ash and the lacros test binary.
+
+  Returns:
+    A boolean indicating whether Ash-chrome is up and running.
+  """
+
+  def IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
+                       enable_mojo_crosapi):
+    return (len(os.listdir(tmp_xdg_dir)) >= 2
+            and (not enable_mojo_crosapi
+                 or os.path.exists(lacros_mojo_socket_file)))
+
+  time_counter = 0
+  while not IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
+                             enable_mojo_crosapi):
+    time.sleep(0.5)
+    time_counter += 0.5
+    if time_counter > ASH_CHROME_TIMEOUT_SECONDS:
+      break
+
+  return IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file,
+                          enable_mojo_crosapi)
+
+
+def _RunTestWithAshChrome(args, forward_args):
+  """Runs tests with ash-chrome.
+
+  Args:
+    args (dict): Args for this script.
+    forward_args (dict): Args to be forwarded to the test command.
+  """
+  if args.ash_chrome_path:
+    ash_chrome_file = args.ash_chrome_path
+  else:
+    ash_chrome_version = (args.ash_chrome_version
+                          or _GetLatestVersionOfAshChrome())
+    _DownloadAshChromeIfNecessary(ash_chrome_version)
+    logging.info('Ash-chrome version: %s', ash_chrome_version)
+
+    ash_chrome_file = os.path.join(_GetAshChromeDirPath(ash_chrome_version),
+                                   'chrome')
+  try:
+    # Starts Ash-Chrome.
+    tmp_xdg_dir_name = tempfile.mkdtemp()
+    tmp_ash_data_dir_name = tempfile.mkdtemp()
+
+    # Please refer to below file for how mojo connection is set up in testing.
+    # //chrome/browser/ash/crosapi/test_mojo_connection_manager.h
+    lacros_mojo_socket_file = '%s/lacros.sock' % tmp_ash_data_dir_name
+    lacros_mojo_socket_arg = ('--lacros-mojo-socket-for-testing=%s' %
+                              lacros_mojo_socket_file)
+    enable_mojo_crosapi = any(t == os.path.basename(args.command)
+                              for t in _TARGETS_REQUIRE_MOJO_CROSAPI)
+
+    ash_process = None
+    ash_env = os.environ.copy()
+    ash_env['XDG_RUNTIME_DIR'] = tmp_xdg_dir_name
+    ash_cmd = [
+        ash_chrome_file,
+        '--user-data-dir=%s' % tmp_ash_data_dir_name,
+        '--enable-wayland-server',
+        '--no-startup-window',
+        '--use-fake-ml-service-for-test',
+    ]
+    if enable_mojo_crosapi:
+      ash_cmd.append(lacros_mojo_socket_arg)
+
+    ash_process_has_started = False
+    total_tries = 3
+    num_tries = 0
+    while not ash_process_has_started and num_tries < total_tries:
+      num_tries += 1
+      ash_process = subprocess.Popen(ash_cmd, env=ash_env)
+      ash_process_has_started = _WaitForAshChromeToStart(
+          tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi)
+      if ash_process_has_started:
+        break
+
+      logging.warning('Starting ash-chrome timed out after %ds',
+                      ASH_CHROME_TIMEOUT_SECONDS)
+      logging.warning('Printing the output of "ps aux" for debugging:')
+      subprocess.call(['ps', 'aux'])
+      if ash_process and ash_process.poll() is None:
+        ash_process.kill()
+
+    if not ash_process_has_started:
+      raise RuntimeError('Timed out waiting for ash-chrome to start')
+
+    # Starts tests.
+    if enable_mojo_crosapi:
+      forward_args.append(lacros_mojo_socket_arg)
+
+      reason_of_jobs_1 = (
+          'multiple clients crosapi is not supported yet (crbug.com/1124490), '
+          'lacros_chrome_browsertests has to run tests serially')
+
+      if any('--test-launcher-jobs' in arg for arg in forward_args):
+        raise RuntimeError(
+            'Specifying "--test-launcher-jobs" is not allowed because %s. '
+            'Please remove it and this script will automatically append '
+            '"--test-launcher-jobs=1"' % reason_of_jobs_1)
+
+      # TODO(crbug.com/1124490): Run lacros_chrome_browsertests in parallel once
+      # the bug is fixed.
+      logging.warning('Appending "--test-launcher-jobs=1" because %s',
+                      reason_of_jobs_1)
+      forward_args.append('--test-launcher-jobs=1')
+
+    test_env = os.environ.copy()
+    test_env['EGL_PLATFORM'] = 'surfaceless'
+    test_env['XDG_RUNTIME_DIR'] = tmp_xdg_dir_name
+    test_process = subprocess.Popen([args.command] + forward_args, env=test_env)
+    return test_process.wait()
+
+  finally:
+    if ash_process and ash_process.poll() is None:
+      ash_process.terminate()
+      # Allow process to do cleanup and exit gracefully before killing.
+      time.sleep(0.5)
+      ash_process.kill()
+
+    shutil.rmtree(tmp_xdg_dir_name, ignore_errors=True)
+    shutil.rmtree(tmp_ash_data_dir_name, ignore_errors=True)
+
+
+def _RunTestDirectly(args, forward_args):
+  """Runs tests by invoking the test command directly.
+
+  args (dict): Args for this script.
+  forward_args (dict): Args to be forwarded to the test command.
+  """
+  try:
+    p = None
+    p = subprocess.Popen([args.command] + forward_args)
+    return p.wait()
+  finally:
+    if p and p.poll() is None:
+      p.terminate()
+      time.sleep(0.5)
+      p.kill()
+
+
+def _HandleSignal(sig, _):
+  """Handles received signals to make sure spawned test process are killed.
+
+  sig (int): An integer representing the received signal, for example SIGTERM.
+  """
+  logging.warning('Received signal: %d, killing spawned processes', sig)
+
+  # Don't do any cleanup here, instead, leave it to the finally blocks.
+  # Assumption is based on https://docs.python.org/3/library/sys.html#sys.exit:
+  # cleanup actions specified by finally clauses of try statements are honored.
+
+  # https://tldp.org/LDP/abs/html/exitcodes.html:
+  # Exit code 128+n -> Fatal error signal "n".
+  sys.exit(128 + sig)
+
+
+def _RunTest(args, forward_args):
+  """Runs tests with given args.
+
+  args (dict): Args for this script.
+  forward_args (dict): Args to be forwarded to the test command.
+
+  Raises:
+      RuntimeError: If the given test binary doesn't exist or the test runner
+          doesn't know how to run it.
+  """
+
+  if not os.path.isfile(args.command):
+    raise RuntimeError('Specified test command: "%s" doesn\'t exist' %
+                       args.command)
+
+  # |_TARGETS_REQUIRE_ASH_CHROME| may not always be accurate as it is updated
+  # with a best effort only, therefore, allow the invoker to override the
+  # behavior with a specified ash-chrome version, which makes sure that
+  # automated CI/CQ builders would always work correctly.
+  requires_ash_chrome = any(
+      re.match(t, os.path.basename(args.command))
+      for t in _TARGETS_REQUIRE_ASH_CHROME)
+  if not requires_ash_chrome and not args.ash_chrome_version:
+    return _RunTestDirectly(args, forward_args)
+
+  return _RunTestWithAshChrome(args, forward_args)
+
+
+def Main():
+  for sig in (signal.SIGTERM, signal.SIGINT):
+    signal.signal(sig, _HandleSignal)
+
+  logging.basicConfig(level=logging.INFO)
+  arg_parser = argparse.ArgumentParser()
+  arg_parser.usage = __doc__
+
+  subparsers = arg_parser.add_subparsers()
+
+  test_parser = subparsers.add_parser('test', help='Run tests')
+  test_parser.set_defaults(func=_RunTest)
+
+  test_parser.add_argument(
+      'command',
+      help='A single command to invoke the tests, for example: '
+      '"./url_unittests". Any argument unknown to this test runner script will '
+      'be forwarded to the command, for example: "--gtest_filter=Suite.Test"')
+
+  version_group = test_parser.add_mutually_exclusive_group()
+  version_group.add_argument(
+      '--ash-chrome-version',
+      type=str,
+      help='Version of an prebuilt ash-chrome to use for testing, for example: '
+      '"793554", and the version corresponds to the commit position of commits '
+      'on the main branch. If not specified, will use the latest version '
+      'available')
+  version_group.add_argument(
+      '--ash-chrome-path',
+      type=str,
+      help='Path to an locally built ash-chrome to use for testing.')
+
+  args = arg_parser.parse_known_args()
+  return args[0].func(args[0], args[1])
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/lacros/test_runner_test.py b/src/build/lacros/test_runner_test.py
new file mode 100755
index 0000000..4e06e0a
--- /dev/null
+++ b/src/build/lacros/test_runner_test.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+
+import mock
+from parameterized import parameterized
+
+import test_runner
+
+
+class TestRunnerTest(unittest.TestCase):
+  def setUp(self):
+    logging.disable(logging.CRITICAL)
+    time.sleep = mock.Mock()
+
+  def tearDown(self):
+    logging.disable(logging.NOTSET)
+
+  @parameterized.expand([
+      'url_unittests',
+      './url_unittests',
+      'out/release/url_unittests',
+      './out/release/url_unittests',
+  ])
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that the test runner doesn't attempt to download ash-chrome if not
+  # required.
+  def test_do_not_require_ash_chrome(self, command, mock_popen, mock_download,
+                                     _):
+    args = ['script_name', 'test', command]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      self.assertEqual(1, mock_popen.call_count)
+      mock_popen.assert_called_with([command])
+      self.assertFalse(mock_download.called)
+
+  @parameterized.expand([
+      'browser_tests',
+      'components_browsertests',
+      'content_browsertests',
+      'lacros_chrome_browsertests',
+  ])
+  @mock.patch.object(os,
+                     'listdir',
+                     return_value=['wayland-0', 'wayland-0.lock'])
+  @mock.patch.object(tempfile,
+                     'mkdtemp',
+                     side_effect=['/tmp/xdg', '/tmp/ash-data'])
+  @mock.patch.object(os.environ, 'copy', side_effect=[{}, {}])
+  @mock.patch.object(os.path, 'exists', return_value=True)
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner,
+                     '_GetLatestVersionOfAshChrome',
+                     return_value='793554')
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that the test runner downloads and spawns ash-chrome if ash-chrome is
+  # required.
+  def test_require_ash_chrome(self, command, mock_popen, mock_download, *_):
+    args = ['script_name', 'test', command]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      mock_download.assert_called_with('793554')
+      self.assertEqual(2, mock_popen.call_count)
+
+      ash_chrome_args = mock_popen.call_args_list[0][0][0]
+      self.assertTrue(ash_chrome_args[0].endswith(
+          'build/lacros/prebuilt_ash_chrome/793554/chrome'))
+      expected_ash_chrome_args = [
+          '--user-data-dir=/tmp/ash-data',
+          '--enable-wayland-server',
+          '--no-startup-window',
+          '--use-fake-ml-service-for-test',
+      ]
+      if command == 'lacros_chrome_browsertests':
+        expected_ash_chrome_args.append(
+            '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock')
+      self.assertListEqual(expected_ash_chrome_args, ash_chrome_args[1:])
+      ash_chrome_env = mock_popen.call_args_list[0][1].get('env', {})
+      self.assertDictEqual({'XDG_RUNTIME_DIR': '/tmp/xdg'}, ash_chrome_env)
+
+      test_args = mock_popen.call_args_list[1][0][0]
+      if command == 'lacros_chrome_browsertests':
+        self.assertListEqual([
+            command,
+            '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock',
+            '--test-launcher-jobs=1'
+        ], test_args)
+      else:
+        self.assertListEqual([command], test_args)
+
+      test_env = mock_popen.call_args_list[1][1].get('env', {})
+      self.assertDictEqual(
+          {
+              'XDG_RUNTIME_DIR': '/tmp/xdg',
+              'EGL_PLATFORM': 'surfaceless'
+          }, test_env)
+
+
+  @mock.patch.object(os,
+                     'listdir',
+                     return_value=['wayland-0', 'wayland-0.lock'])
+  @mock.patch.object(os.path, 'exists', return_value=True)
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner,
+                     '_GetLatestVersionOfAshChrome',
+                     return_value='793554')
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that when a ash-chrome version is specified, that version is used
+  # instead of the latest one.
+  def test_specify_ash_chrome_version(self, mock_popen, mock_download, *_):
+    args = [
+        'script_name', 'test', 'browser_tests', '--ash-chrome-version', '781122'
+    ]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      mock_download.assert_called_with('781122')
+
+  @mock.patch.object(os,
+                     'listdir',
+                     return_value=['wayland-0', 'wayland-0.lock'])
+  @mock.patch.object(os.path, 'exists', return_value=True)
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that if a ash-chrome version is specified, uses ash-chrome to run
+  # tests anyway even if |_TARGETS_REQUIRE_ASH_CHROME| indicates an ash-chrome
+  # is not required.
+  def test_overrides_do_not_require_ash_chrome(self, mock_popen, mock_download,
+                                               *_):
+    args = [
+        'script_name', 'test', './url_unittests', '--ash-chrome-version',
+        '793554'
+    ]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      mock_download.assert_called_with('793554')
+      self.assertEqual(2, mock_popen.call_count)
+
+  @mock.patch.object(os,
+                     'listdir',
+                     return_value=['wayland-0', 'wayland-0.lock'])
+  @mock.patch.object(os.path, 'exists', return_value=True)
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner, '_GetLatestVersionOfAshChrome')
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that when an ash-chrome path is specified, the test runner doesn't try
+  # to download prebuilt ash-chrome.
+  def test_specify_ash_chrome_path(self, mock_popen, mock_download,
+                                   mock_get_latest_version, *_):
+    args = [
+        'script_name',
+        'test',
+        'browser_tests',
+        '--ash-chrome-path',
+        '/ash/chrome',
+    ]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      self.assertFalse(mock_get_latest_version.called)
+      self.assertFalse(mock_download.called)
+
+  @mock.patch.object(os.path, 'isfile', return_value=True)
+  @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
+  @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
+  # Tests that arguments not known to the test runner are forwarded to the
+  # command that invokes tests.
+  def test_command_arguments(self, mock_popen, mock_download, _):
+    args = [
+        'script_name', 'test', './url_unittests', '--gtest_filter=Suite.Test'
+    ]
+    with mock.patch.object(sys, 'argv', args):
+      test_runner.Main()
+      mock_popen.assert_called_with(
+          ['./url_unittests', '--gtest_filter=Suite.Test'])
+      self.assertFalse(mock_download.called)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/landmine_utils.py b/src/build/landmine_utils.py
new file mode 100644
index 0000000..a3f21ff
--- /dev/null
+++ b/src/build/landmine_utils.py
@@ -0,0 +1,33 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+
+
+def IsWindows():
+  return sys.platform in ['win32', 'cygwin']
+
+
+def IsLinux():
+  return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd'))
+
+
+def IsMac():
+  return sys.platform == 'darwin'
+
+
+def host_os():
+  """
+  Returns a string representing the host_os of the current system.
+  Possible values: 'win', 'mac', 'linux', 'unknown'.
+  """
+  if IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  elif IsMac():
+    return 'mac'
+  else:
+    return 'unknown'
diff --git a/src/build/landmines.py b/src/build/landmines.py
new file mode 100755
index 0000000..f5adf80
--- /dev/null
+++ b/src/build/landmines.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every gclient runhooks as the first hook (See DEPS). If it
+detects that the build should be clobbered, it will delete the contents of the
+build directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+
+Before adding or changing a landmine consider the consequences of doing so.
+Doing so will wipe out every output directory on every Chrome developer's
+machine. This can be particularly problematic on Windows where the directory
+deletion may well fail (locked files, command prompt in the directory, etc.),
+and generated .sln and .vcxproj files will be deleted.
+
+This output directory deletion will be repeated when going back and forth across
+the change that added the landmine, adding to the cost. There are usually less
+troublesome alternatives.
+"""
+
+import difflib
+import errno
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import clobber
+import landmine_utils
+
+
+def get_build_dir(src_dir):
+  r"""
+  Returns the absolute path to the directory containing the build directories.
+  Examples:
+    'C:\src\out'
+    '/b/s/w/ir/cache/builder/src/out'
+  """
+  if 'CHROMIUM_OUT_DIR' in os.environ:
+    output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+    if not output_dir:
+      raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+  else:
+    output_dir = 'out'
+  return os.path.abspath(os.path.join(src_dir, output_dir))
+
+
+def clobber_if_necessary(new_landmines, src_dir, landmines_path):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_build_dir(src_dir)
+  try:
+    os.makedirs(out_dir)
+  except OSError as e:
+    if e.errno == errno.EEXIST:
+      pass
+
+  if os.path.exists(landmines_path):
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+      sys.stdout.write('Clobbering due to:\n')
+      sys.stdout.writelines(diff)
+      sys.stdout.flush()
+
+      clobber.clobber(out_dir)
+
+  # Save current set of landmines for next time.
+  with open(landmines_path, 'w') as f:
+    f.writelines(new_landmines)
+
+
+def process_options():
+  """Returns an options object containing the configuration for this script."""
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '-s', '--landmine-scripts', action='append',
+      help='Path to the script which emits landmines to stdout. The target '
+           'is passed to this script via option -t. Note that an extra '
+           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+  parser.add_option('-d', '--src-dir',
+      help='Path of the source root dir. Overrides the default location of the '
+           'source root dir when calculating the build directory.')
+  parser.add_option(
+      '-l',
+      '--landmines-path',
+      help='Path to the landmines file to use (defaults to .landmines)')
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  if options.src_dir:
+    if not os.path.isdir(options.src_dir):
+      parser.error('Cannot find source root dir at %s' % options.src_dir)
+    logging.debug('Overriding source root dir. Using: %s', options.src_dir)
+  else:
+    options.src_dir = \
+        os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+  if not options.landmine_scripts:
+    options.landmine_scripts = [os.path.join(options.src_dir, 'build',
+                                             'get_landmines.py')]
+
+  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+  if extra_script:
+    options.landmine_scripts += [extra_script]
+
+  return options
+
+
+def main():
+  options = process_options()
+
+  landmines = []
+  for s in options.landmine_scripts:
+    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE,
+                            universal_newlines=True)
+    output, _ = proc.communicate()
+    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+  if options.landmines_path:
+    landmines_path = options.landmines_path
+  else:
+    landmines_path = os.path.join(options.src_dir, '.landmines')
+  clobber_if_necessary(landmines, options.src_dir,
+                       os.path.normpath(landmines_path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/linux/BUILD.gn b/src/build/linux/BUILD.gn
new file mode 100644
index 0000000..54314c7
--- /dev/null
+++ b/src/build/linux/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/freetype/freetype.gni")
+import("//build/config/linux/pkg_config.gni")
+
+if (use_gio) {
+  pkg_config("gio_config") {
+    packages = [ "gio-2.0" ]
+
+    defines = [ "USE_GIO" ]
+  }
+}
+
+# Looking for libspeechd? Use //third_party/speech-dispatcher
+
+if (use_system_freetype) {
+  assert(!is_chromecast)
+
+  # Only provided for distributions which prefer to keep linking to FreeType on
+  # the system, use with caution,for details see build/config/freetype/BUILD.gn.
+  pkg_config("freetype_from_pkgconfig") {
+    visibility = [
+      "//third_party:freetype_harfbuzz",
+      "//third_party/harfbuzz-ng:harfbuzz_source",
+    ]
+    packages = [ "freetype2" ]
+  }
+}
diff --git a/src/build/linux/chrome.map b/src/build/linux/chrome.map
new file mode 100644
index 0000000..718796f
--- /dev/null
+++ b/src/build/linux/chrome.map
@@ -0,0 +1,89 @@
+{
+global:
+  __bss_start;
+  __data_start;
+  data_start;
+  _edata;
+  _end;
+  _IO_stdin_used;
+
+  # Initialization and finalization functions for static global
+  # variables.
+  _fini;
+  _init;
+  __libc_csu_fini;
+  __libc_csu_init;
+
+  # Chrome's main function.  Exported for historical purposes.
+  ChromeMain;
+
+  # Program entry point.
+  _start;
+
+  # Memory allocation symbols.  We want chrome and any libraries to
+  # share the same heap, so it is correct to export these symbols.
+  calloc;
+  cfree;
+  free;
+  __free_hook;
+  __libc_calloc;
+  __libc_cfree;
+  __libc_free;
+  __libc_malloc;
+  __libc_memalign;
+  __libc_pvalloc;
+  __libc_realloc;
+  __libc_valloc;
+  mallinfo;
+  malloc;
+  __malloc_hook;
+  malloc_size;
+  malloc_stats;
+  malloc_usable_size;
+  mallopt;
+  memalign;
+  __memalign_hook;
+  __posix_memalign;
+  posix_memalign;
+  pvalloc;
+  realloc;
+  __realloc_hook;
+  valloc;
+
+  # Various flavors of operator new and operator delete.
+  _ZdaPv;
+  _ZdaPvm;
+  _ZdaPvmSt11align_val_t;
+  _ZdaPvRKSt9nothrow_t;
+  _ZdaPvSt11align_val_t;
+  _ZdaPvSt11align_val_tRKSt9nothrow_t;
+  _ZdlPv;
+  _ZdlPvm;
+  _ZdlPvmSt11align_val_t;
+  _ZdlPvRKSt9nothrow_t;
+  _ZdlPvSt11align_val_t;
+  _ZdlPvSt11align_val_tRKSt9nothrow_t;
+  _Znam;
+  _ZnamRKSt9nothrow_t;
+  _ZnamSt11align_val_t;
+  _ZnamSt11align_val_tRKSt9nothrow_t;
+  _Znwm;
+  _ZnwmRKSt9nothrow_t;
+  _ZnwmSt11align_val_t;
+  _ZnwmSt11align_val_tRKSt9nothrow_t;
+
+  # Various flavors of localtime().  These are exported by the chrome
+  # sandbox to intercept calls to localtime(), which would otherwise
+  # fail in untrusted processes that don't have permission to read
+  # /etc/localtime.  These overrides forward the request to the browser
+  # process, which uses dlsym(localtime) to make the real calls.
+  localtime;
+  localtime64;
+  localtime64_r;
+  localtime_r;
+
+  v8dbg_*;
+
+local:
+  *;
+};
diff --git a/src/build/linux/dump_app_syms.py b/src/build/linux/dump_app_syms.py
new file mode 100644
index 0000000..f156baf
--- /dev/null
+++ b/src/build/linux/dump_app_syms.py
@@ -0,0 +1,31 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+
+if len(sys.argv) != 5:
+  print("dump_app_syms.py <dump_syms_exe> <strip_binary>")
+  print("                 <binary_with_symbols> <symbols_output>")
+  sys.exit(1)
+
+dumpsyms = sys.argv[1]
+strip_binary = sys.argv[2]
+infile = sys.argv[3]
+outfile = sys.argv[4]
+
+# Dump only when the output file is out-of-date.
+if not os.path.isfile(outfile) or \
+   os.stat(outfile).st_mtime < os.stat(infile).st_mtime:
+  with open(outfile, 'w') as outfileobj:
+    subprocess.check_call([dumpsyms, infile], stdout=outfileobj)
+
+if strip_binary != '0':
+  subprocess.check_call(['strip', infile])
diff --git a/src/build/linux/extract_symbols.gni b/src/build/linux/extract_symbols.gni
new file mode 100644
index 0000000..722f60d
--- /dev/null
+++ b/src/build/linux/extract_symbols.gni
@@ -0,0 +1,40 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+# Extracts symbols from a binary into a symbol file using dump_app_syms.py.
+#
+# Args:
+#   binary: Path to the binary containing symbols to extract, e.g.:
+#       "$root_out_dir/chrome"
+#   symbol_file: Desired output file for symbols, e.g.:
+#       "$root_out_dir/chrome.breakpad.$current_cpu"
+template("extract_symbols") {
+  forward_variables_from(invoker,
+                         [
+                           "deps",
+                           "testonly",
+                         ])
+  action("${target_name}") {
+    dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)"
+    dump_syms_binary =
+        get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms"
+
+    script = "//build/linux/dump_app_syms.py"
+    inputs = [
+      invoker.binary,
+      dump_syms_binary,
+    ]
+    outputs = [ invoker.symbol_file ]
+    args = [
+      "./" + rebase_path(dump_syms_binary, root_build_dir),
+      "0",  # strip_binary = false
+      rebase_path(invoker.binary, root_build_dir),
+      rebase_path(invoker.symbol_file, root_build_dir),
+    ]
+
+    deps += [ dump_syms_label ]
+  }
+}
diff --git a/src/build/linux/install-chromeos-fonts.py b/src/build/linux/install-chromeos-fonts.py
new file mode 100755
index 0000000..da8fb40
--- /dev/null
+++ b/src/build/linux/install-chromeos-fonts.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install the Chrome OS fonts on Linux.
+# This script can be run manually (as root), but is also run as part
+# install-build-deps.sh.
+
+from __future__ import print_function
+
+import os
+import shutil
+import subprocess
+import sys
+
+URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
+                'distfiles/%(name)s-%(version)s.tar.bz2')
+
+# Taken from the media-fonts/<name> ebuilds in chromiumos-overlay.
+# noto-cjk used to be here, but is removed because fc-cache takes too long
+# regenerating the fontconfig cache (See crbug.com/697954.)
+# TODO(jshin): Add it back when the above issue can be avoided.
+SOURCES = [
+  {
+    'name': 'notofonts',
+    'version': '20161129'
+  }, {
+    'name': 'robotofonts',
+    'version': '2.132'
+  }
+]
+
+URLS = sorted([URL_TEMPLATE % d for d in SOURCES])
+FONTS_DIR = '/usr/local/share/fonts'
+
+def main(args):
+  if not sys.platform.startswith('linux'):
+    print("Error: %s must be run on Linux." % __file__)
+    return 1
+
+  if os.getuid() != 0:
+    print("Error: %s must be run as root." % __file__)
+    return 1
+
+  if not os.path.isdir(FONTS_DIR):
+    print("Error: Destination directory does not exist: %s" % FONTS_DIR)
+    return 1
+
+  dest_dir = os.path.join(FONTS_DIR, 'chromeos')
+
+  stamp = os.path.join(dest_dir, ".stamp02")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == '\n'.join(URLS):
+        print("Chrome OS fonts already up to date in %s." % dest_dir)
+        return 0
+
+  if os.path.isdir(dest_dir):
+    shutil.rmtree(dest_dir)
+  os.mkdir(dest_dir)
+  os.chmod(dest_dir, 0o755)
+
+  print("Installing Chrome OS fonts to %s." % dest_dir)
+  for url in URLS:
+    tarball = os.path.join(dest_dir, os.path.basename(url))
+    subprocess.check_call(['curl', '-L', url, '-o', tarball])
+    subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
+                           '-xf', tarball, '-C', dest_dir])
+    os.remove(tarball)
+
+  readme = os.path.join(dest_dir, "README")
+  with open(readme, 'w') as s:
+    s.write("This directory and its contents are auto-generated.\n")
+    s.write("It may be deleted and recreated. Do not modify.\n")
+    s.write("Script: %s\n" % __file__)
+
+  with open(stamp, 'w') as s:
+    s.write('\n'.join(URLS))
+
+  for base, dirs, files in os.walk(dest_dir):
+    for dir in dirs:
+      os.chmod(os.path.join(base, dir), 0o755)
+    for file in files:
+      os.chmod(os.path.join(base, file), 0o644)
+
+  print("""\
+
+Chrome OS font rendering settings are specified using Fontconfig. If your
+system's configuration doesn't match Chrome OS's (which vary for different
+devices), fonts may be rendered with different subpixel rendering, subpixel
+positioning, or hinting settings. This may affect font metrics.
+
+Chrome OS's settings are stored in the media-libs/fontconfig package, which is
+at src/third_party/chromiumos-overlay/media-libs/fontconfig in a Chrome OS
+checkout. You can configure your system to match Chrome OS's defaults by
+creating or editing a ~/.fonts.conf file:
+
+<?xml version="1.0"?>
+<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
+<fontconfig>
+  <match target="font">
+    <edit name="antialias" mode="assign"><bool>true</bool></edit>
+    <edit name="autohint" mode="assign"><bool>true</bool></edit>
+    <edit name="hinting" mode="assign"><bool>true</bool></edit>
+    <edit name="hintstyle" mode="assign"><const>hintslight</const></edit>
+    <edit name="rgba" mode="assign"><const>rgb</const></edit>
+  </match>
+</fontconfig>
+
+To load additional per-font configs (and assuming you have Chrome OS checked
+out), add the following immediately before the "</fontconfig>" line:
+
+  <include ignore_missing="yes">/path/to/src/third_party/chromiumos-overlay/media-libs/fontconfig/files/local.conf</include>
+""")
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/linux/libncursesw/DIR_METADATA b/src/build/linux/libncursesw/DIR_METADATA
new file mode 100644
index 0000000..6bbf490
--- /dev/null
+++ b/src/build/linux/libncursesw/DIR_METADATA
@@ -0,0 +1,5 @@
+monorail {
+  component: "Internals>Accessibility"
+}
+
+team_email: "chromium-accessibility@chromium.org"
diff --git a/src/build/linux/libpci/BUILD.gn b/src/build/linux/libpci/BUILD.gn
new file mode 100644
index 0000000..2d1e267
--- /dev/null
+++ b/src/build/linux/libpci/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+# This generates a target named "libpci".
+generate_library_loader("libpci") {
+  name = "LibPciLoader"
+  output_h = "libpci.h"
+  output_cc = "libpci_loader.cc"
+  header = "<pci/pci.h>"
+
+  functions = [
+    "pci_alloc",
+    "pci_init",
+    "pci_cleanup",
+    "pci_scan_bus",
+    "pci_fill_info",
+    "pci_lookup_name",
+  ]
+}
diff --git a/src/build/linux/libudev/BUILD.gn b/src/build/linux/libudev/BUILD.gn
new file mode 100644
index 0000000..dcd9f23
--- /dev/null
+++ b/src/build/linux/libudev/BUILD.gn
@@ -0,0 +1,66 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+libudev_functions = [
+  "udev_device_get_action",
+  "udev_device_get_devnode",
+  "udev_device_get_devtype",
+  "udev_device_get_parent",
+  "udev_device_get_parent_with_subsystem_devtype",
+  "udev_device_get_property_value",
+  "udev_device_get_subsystem",
+  "udev_device_get_sysattr_value",
+  "udev_device_get_sysname",
+  "udev_device_get_syspath",
+  "udev_device_new_from_devnum",
+  "udev_device_new_from_subsystem_sysname",
+  "udev_device_new_from_syspath",
+  "udev_device_unref",
+  "udev_enumerate_add_match_subsystem",
+  "udev_enumerate_get_list_entry",
+  "udev_enumerate_new",
+  "udev_enumerate_scan_devices",
+  "udev_enumerate_unref",
+  "udev_list_entry_get_next",
+  "udev_list_entry_get_name",
+  "udev_monitor_enable_receiving",
+  "udev_monitor_filter_add_match_subsystem_devtype",
+  "udev_monitor_get_fd",
+  "udev_monitor_new_from_netlink",
+  "udev_monitor_receive_device",
+  "udev_monitor_unref",
+  "udev_new",
+  "udev_set_log_fn",
+  "udev_set_log_priority",
+  "udev_unref",
+]
+
+# This generates a target named "udev0_loader".
+generate_library_loader("udev0_loader") {
+  name = "LibUdev0Loader"
+  output_h = "libudev0.h"
+  output_cc = "libudev0_loader.cc"
+  header = "\"third_party/libudev/libudev0.h\""
+
+  functions = libudev_functions
+}
+
+# This generates a target named "udev1_loader".
+generate_library_loader("udev1_loader") {
+  name = "LibUdev1Loader"
+  output_h = "libudev1.h"
+  output_cc = "libudev1_loader.cc"
+  header = "\"third_party/libudev/libudev1.h\""
+
+  functions = libudev_functions
+}
+
+group("libudev") {
+  public_deps = [
+    ":udev0_loader",
+    ":udev1_loader",
+  ]
+}
diff --git a/src/build/linux/rewrite_dirs.py b/src/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..17659c3
--- /dev/null
+++ b/src/build/linux/rewrite_dirs.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+from __future__ import print_function
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print(line)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/linux/strip_binary.gni b/src/build/linux/strip_binary.gni
new file mode 100644
index 0000000..ddc42cc
--- /dev/null
+++ b/src/build/linux/strip_binary.gni
@@ -0,0 +1,44 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+# Extracts symbols from a binary into a symbol file.
+#
+# Args:
+#   binary_input: Path to the binary containing symbols to extract, e.g.:
+#       "$root_out_dir/chrome"
+#   symbol_output: Desired output file for symbols, e.g.:
+#       "$root_out_dir/chrome.debug"
+#   stripped_binary_output: Desired output file for stripped file, e.g.:
+#       "$root_out_dir/chrome.stripped"
+template("strip_binary") {
+  forward_variables_from(invoker,
+                         [
+                           "deps",
+                           "testonly",
+                         ])
+  action("${target_name}") {
+    eu_strip_binary = "//buildtools/third_party/eu-strip/bin/eu-strip"
+    script = "//build/linux/strip_binary.py"
+    inputs = [
+      invoker.binary_input,
+      eu_strip_binary,
+    ]
+    outputs = [
+      invoker.symbol_output,
+      invoker.stripped_binary_output,
+    ]
+    args = [
+      "--eu-strip-binary-path",
+      rebase_path(eu_strip_binary, root_build_dir),
+      "--symbol-output",
+      rebase_path(invoker.symbol_output, root_build_dir),
+      "--stripped-binary-output",
+      rebase_path(invoker.stripped_binary_output, root_build_dir),
+      "--binary-input",
+      rebase_path(invoker.binary_input, root_build_dir),
+    ]
+  }
+}
diff --git a/src/build/linux/strip_binary.py b/src/build/linux/strip_binary.py
new file mode 100755
index 0000000..00b4089
--- /dev/null
+++ b/src/build/linux/strip_binary.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+#
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import subprocess
+import sys
+
+
+def main():
+  argparser = argparse.ArgumentParser(description='eu-strip binary.')
+
+  argparser.add_argument('--eu-strip-binary-path', help='eu-strip path.')
+  argparser.add_argument('--binary-input', help='exe file path.')
+  argparser.add_argument('--symbol-output', help='debug file path.')
+  argparser.add_argument('--stripped-binary-output', help='stripped file path.')
+  args = argparser.parse_args()
+
+  cmd_line = [
+      args.eu_strip_binary_path, '-o', args.stripped_binary_output, '-f',
+      args.symbol_output, args.binary_input
+  ]
+
+  process = subprocess.Popen(cmd_line)
+
+  return process.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/linux/sysroot_ld_path.sh b/src/build/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000..623d47b
--- /dev/null
+++ b/src/build/linux/sysroot_ld_path.sh
@@ -0,0 +1,99 @@
+#!/bin/sh
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+#  sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+  echo $0: $@
+  exit 1
+}
+
+process_entry() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_entry()"
+  fi
+  local root="$1"
+  local localpath="$2"
+
+  echo $localpath | grep -qs '^/'
+  if [ $? -ne 0 ]; then
+    log_error_and_exit $localpath does not start with /
+  fi
+  local entry="$root$localpath"
+  echo $entry
+}
+
+process_ld_so_conf() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_ld_so_conf()"
+  fi
+  local root="$1"
+  local ld_so_conf="$2"
+
+  # ld.so.conf may include relative include paths. pushd is a bashism.
+  local saved_pwd=$(pwd)
+  cd $(dirname "$ld_so_conf")
+
+  cat "$ld_so_conf" | \
+    while read ENTRY; do
+      echo "$ENTRY" | grep -qs ^include
+      if [ $? -eq 0 ]; then
+        local included_files=$(echo "$ENTRY" | sed 's/^include //')
+        echo "$included_files" | grep -qs ^/
+        if [ $? -eq 0 ]; then
+          if ls $root$included_files >/dev/null 2>&1 ; then
+            for inc_file in $root$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        else
+          if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+            for inc_file in $(pwd)/$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        fi
+        continue
+      fi
+
+      echo "$ENTRY" | grep -qs ^/
+      if [ $? -eq 0 ]; then
+        process_entry "$root" "$ENTRY"
+      fi
+    done
+
+  # popd is a bashism
+  cd "$saved_pwd"
+}
+
+# Main
+
+if [ $# -ne 1 ]; then
+  echo Usage $0 /abspath/to/sysroot
+  exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+  log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+  process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+  find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+  if [ $? -eq 0 ]; then
+    for entry in $LD_SO_CONF_D/*.conf; do
+      process_ld_so_conf "$1" "$entry"
+    done | xargs echo
+  fi
+fi
diff --git a/src/build/linux/sysroot_scripts/build_and_upload.py b/src/build/linux/sysroot_scripts/build_and_upload.py
new file mode 100755
index 0000000..1a24da2
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/build_and_upload.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Automates running BuildPackageLists, BuildSysroot, and
+UploadSysroot for each supported arch of each sysroot creator.
+"""
+
+from __future__ import print_function
+
+import glob
+import hashlib
+import json
+import multiprocessing
+import os
+import re
+import string
+import subprocess
+import sys
+
+
+def run_script(args):
+  fnull = open(os.devnull, 'w')
+  subprocess.check_call(args, stdout=fnull, stderr=fnull)
+
+
+def sha1sumfile(filename):
+  sha1 = hashlib.sha1()
+  with open(filename, 'rb') as f:
+    while True:
+      data = f.read(65536)
+      if not data:
+        break
+      sha1.update(data)
+  return sha1.hexdigest()
+
+
+def get_proc_output(args):
+  return subprocess.check_output(args).strip()
+
+
+def build_and_upload(script_path, distro, release, arch, lock):
+  script_dir = os.path.dirname(os.path.realpath(__file__))
+
+  run_script([script_path, 'BuildSysroot' + arch])
+  run_script([script_path, 'UploadSysroot' + arch])
+
+  tarball = '%s_%s_%s_sysroot.tar.xz' % (distro, release, arch.lower())
+  tarxz_path = os.path.join(script_dir, "..", "..", "..", "out",
+                            "sysroot-build", release, tarball)
+  sha1sum = sha1sumfile(tarxz_path)
+  sysroot_dir = '%s_%s_%s-sysroot' % (distro, release, arch.lower())
+
+  sysroot_metadata = {
+      'Tarball': tarball,
+      'Sha1Sum': sha1sum,
+      'SysrootDir': sysroot_dir,
+  }
+  with lock:
+    with open(os.path.join(script_dir, 'sysroots.json'), 'rw+') as f:
+      sysroots = json.load(f)
+      sysroots["%s_%s" % (release, arch.lower())] = sysroot_metadata
+      f.seek(0)
+      f.truncate()
+      f.write(
+          json.dumps(
+              sysroots, sort_keys=True, indent=4, separators=(',', ': ')))
+      f.write('\n')
+
+
+def main():
+  script_dir = os.path.dirname(os.path.realpath(__file__))
+  procs = []
+  lock = multiprocessing.Lock()
+  for filename in glob.glob(os.path.join(script_dir, 'sysroot-creator-*.sh')):
+    script_path = os.path.join(script_dir, filename)
+    distro = get_proc_output([script_path, 'PrintDistro'])
+    release = get_proc_output([script_path, 'PrintRelease'])
+    architectures = get_proc_output([script_path, 'PrintArchitectures'])
+    for arch in architectures.split('\n'):
+      proc = multiprocessing.Process(
+          target=build_and_upload,
+          args=(script_path, distro, release, arch, lock))
+      procs.append(("%s %s (%s)" % (distro, release, arch), proc))
+      proc.start()
+  for _, proc in procs:
+    proc.join()
+
+  print("SYSROOT CREATION SUMMARY")
+  failures = 0
+  for name, proc in procs:
+    if proc.exitcode:
+      failures += 1
+    status = "FAILURE" if proc.exitcode else "SUCCESS"
+    print("%s sysroot creation\t%s" % (name, status))
+  return failures
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/linux/sysroot_scripts/debian_archive_unstable.gpg b/src/build/linux/sysroot_scripts/debian_archive_unstable.gpg
new file mode 100644
index 0000000..e86a700
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/debian_archive_unstable.gpg
Binary files differ
diff --git a/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py b/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
new file mode 100755
index 0000000..1e0b5f6
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Find incompatible symbols in glibc and output a list of replacements.
+"""
+
+from __future__ import print_function
+
+import re
+import sys
+
+# This constant comes from https://crbug.com/580892
+MAX_ALLOWED_GLIBC_VERSION = [2, 17]
+
+
+def get_replacements(nm_file, max_allowed_glibc_version):
+  version_format = re.compile('GLIBC_[0-9\.]+')
+  symbols = {}
+  for line in nm_file:
+    # Some versions of nm have a bug where the version gets printed twice.
+    # Since the symbol may either be formatted like "name@@VERSION" or
+    # "name@@VERSION@@VERSION", handle both cases.
+    line = line.replace('@@', '@')
+    symver = line.split('@')
+    symbol = symver[0].split(' ')[-1]
+    version = symver[-1]
+    if not re.match(version_format, version):
+      continue
+    if symbol in symbols:
+      symbols[symbol].add(version)
+    else:
+      symbols[symbol] = set([version])
+
+  replacements = []
+  for symbol, versions in symbols.items():
+    if len(versions) <= 1:
+      continue
+    versions_parsed = [[
+        int(part) for part in version.lstrip('GLIBC_').split('.')
+    ] for version in versions]
+    if (max(versions_parsed) > max_allowed_glibc_version and
+        min(versions_parsed) <= max_allowed_glibc_version):
+      # Use the newest allowed version of the symbol.
+      replacement_version_parsed = max([
+          version for version in versions_parsed
+          if version <= max_allowed_glibc_version
+      ])
+      replacement_version = 'GLIBC_' + '.'.join(
+          [str(part) for part in replacement_version_parsed])
+      replacements.append('__asm__(".symver %s, %s@%s");' %
+                          (symbol, symbol, replacement_version))
+  return sorted(replacements)
+
+
+if __name__ == '__main__':
+  replacements = get_replacements(sys.stdin, MAX_ALLOWED_GLIBC_VERSION)
+  if replacements:
+    print('// Chromium-specific hack.')
+    print('// See explanation in sysroot-creator.sh.')
+    for replacement in replacements:
+      print(replacement)
diff --git a/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py b/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
new file mode 100755
index 0000000..7c66555
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import find_incompatible_glibc_symbols
+import sys
+
+if sys.version_info.major == 2:
+  from cStringIO import StringIO
+else:
+  from io import StringIO
+
+NM_DATA = """\
+0000000000000001 W expf@GLIBC_2.2.5
+0000000000000002 W expf@@GLIBC_2.27
+0000000000000003 W foo@@GLIBC_2.2.5
+0000000000000004 W bar@GLIBC_2.2.5
+0000000000000005 W baz@GLIBC_2.2.5
+0000000000000006 T foo2@GLIBC_2.2
+0000000000000007 T foo2@GLIBC_2.3
+0000000000000008 T foo2@GLIBC_2.30
+0000000000000009 T foo2@@GLIBC_2.31
+000000000000000a T bar2@GLIBC_2.30
+000000000000000b T bar2@@GLIBC_2.31
+000000000000000c T baz2@GLIBC_2.2
+000000000000000d T baz2@@GLIBC_2.3
+"""
+
+EXPECTED_REPLACEMENTS = [
+    '__asm__(".symver expf, expf@GLIBC_2.2.5");',
+    '__asm__(".symver foo2, foo2@GLIBC_2.3");',
+]
+
+nm_file = StringIO()
+nm_file.write(NM_DATA)
+nm_file.seek(0)
+
+assert (
+    EXPECTED_REPLACEMENTS == find_incompatible_glibc_symbols.get_replacements(
+        nm_file, [2, 17]))
diff --git a/src/build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh b/src/build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh
new file mode 100755
index 0000000..ecbdd64
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generate_debian_archive_unstable_gpg.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -o nounset
+set -o errexit
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+KEYS=(
+    # Debian Stable Release Key (10/buster)
+    "DCC9EFBF77E11517"
+    # Debian Archive Automatic Signing Key (10/buster)
+    "DC30D7C23CBBABEE"
+    # Debian Security Archive Automatic Signing Key (10/buster)
+    "4DFAB270CAA96DFA"
+    # Debian Archive Automatic Signing Key (10/buster)
+    "DC30D7C23CBBABEE"
+    # Jessie Stable Release Key
+    "CBF8D6FD518E17E1"
+    # Debian Archive Automatic Signing Key (7.0/wheezy)
+    "8B48AD6246925553"
+    # Debian Archive Automatic Signing Key (8/jessie)
+    "7638D0442B90D010"
+    # Debian Security Archive Automatic Signing Key (8/jessie)
+    "9D6D8F6BC857C906"
+    # Debian Archive Automatic Signing Key (9/stretch)
+    "E0B11894F66AEC98"
+    # Debian Security Archive Automatic Signing Key (9/stretch)
+    "EDA0D2388AE22BA9"
+    # Debian Stable Release Key (9/stretch)
+    "EF0F382A1A7B6500"
+)
+
+gpg --recv-keys ${KEYS[@]}
+gpg --output "${SCRIPT_DIR}/debian_archive_unstable.gpg" --export ${KEYS[@]}
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 b/src/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
new file mode 100644
index 0000000..29ea13c
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
@@ -0,0 +1,372 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libcilkrts5_6.3.0-18+deb9u1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libmpx2_6.3.0-18+deb9u1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_amd64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_amd64.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm b/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm
new file mode 100644
index 0000000..b91ebc4
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm
@@ -0,0 +1,368 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_armhf.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_armhf.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 b/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
new file mode 100644
index 0000000..4db2b8a
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
@@ -0,0 +1,371 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_arm64.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_arm64.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.armel b/src/build/linux/sysroot_scripts/generated_package_lists/sid.armel
new file mode 100644
index 0000000..f8f54f3
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.armel
@@ -0,0 +1,367 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_armel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_armel.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.i386 b/src/build/linux/sysroot_scripts/generated_package_lists/sid.i386
new file mode 100644
index 0000000..cdeaf9a
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.i386
@@ -0,0 +1,368 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libasan6_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libitm1_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libcilkrts5_6.3.0-18+deb9u1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libmpx2_6.3.0-18+deb9u1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-1_4.0.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk4/libgtk-4-dev_4.0.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_i386.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_i386.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el b/src/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
new file mode 100644
index 0000000..a3b6559
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
@@ -0,0 +1,359 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_mips64el.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_mips64el.deb
diff --git a/src/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel b/src/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel
new file mode 100644
index 0000000..0aa3849
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel
@@ -0,0 +1,359 @@
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-0_2.36.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/atk1.0/libatk1.0-dev_2.36.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.38.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.38.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/attr/libattr1_2.4.48-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/audit/libaudit1_3.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-client3_0.8-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/a/avahi/libavahi-common3_0.8-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth3_5.55-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/bluez/libbluetooth-dev_5.55-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2_1.16.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/colord/libcolord2_1.4.5-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2_2.3.3op2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl3-gnutls_7.74.0-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/curl/libcurl4-gnutls-dev_7.74.0-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-3_1.12.20-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/d/dbus/libdbus-1-dev_1.12.20-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/e2fsprogs/libcom-err2_1.46.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf1_0.183-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/elfutils/libelf-dev_0.183-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1_2.2.10-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/e/expat/libexpat1-dev_2.2.10-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac8_1.3.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/flac/libflac-dev_1.3.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi0_1.0.8-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gcc-6/libgcc1_6.3.0-18+deb9u1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-0_2.66.7-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.7-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6_2.31-9_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/glibc/libc6-dev_2.31-9_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.0-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls30_3.7.0-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.0-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.0-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.0-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/i/icu/libicu67_67.1-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/krb5-multidev_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libgssrpc4_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libk5crypto3_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkdb5-10_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-3_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5-dev_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/k/krb5/libkrb5support0_1.18.3-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator1_0.4.92-8_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-8_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-8_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-8_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap2_2.44-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdeflate/libdeflate0_1.7-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy0_1.5.5-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.5-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi6_3.2.1-9_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi7_3.3-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libf/libffi/libffi-dev_3.3-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libice/libice6_1.0.10-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libidn/libidn11_1.33-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libindicator/libindicator7_0.5.0-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput10_1.16.4-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libm/libmd/libmd0_1.0.3-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1_3.1-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1_3.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsm/libsm6_1.2.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai0_0.1.28-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libthai/libthai-dev_0.1.28-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva2_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-dev_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-drm2_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-glx2_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-wayland2_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libva/libva-x11-2_2.10.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx6_1.9.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libv/libvpx/libvpx-dev_1.9.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwacom/libwacom2_1.8-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp6_0.6.1-2+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-6_1.7.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-dev_1.7.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb1_1.7.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau6_1.0.9-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi6_1.7.10-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.3+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.3+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss1_1.2.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt6_1.2.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/linux/linux-libc-dev_5.10.19-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lz4/liblz4-1_1.9.3-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm1_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgbm-dev_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libglapi-mesa_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mesa/mesa-common-dev_20.3.4-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libhogweed6_3.7-2.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nettle/libnettle8_3.7-2.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4_4.29-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3_3.61-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/n/nss/libnss3-dev_3.61-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openldap/libldap-2.4-2_2.4.57+dfsg-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl1.1_1.1.1j-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/openssl/libssl-dev_1.1.1j-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus0_1.3.1-0.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g_1.4.0-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pam/libpam0g-dev_1.4.0-6_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci3_3.7.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-dev_10.36-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix0_10.22-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3_8.39-13_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.19-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.19-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.19-4_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd2_0.10.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.10.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/speech-dispatcher/speech-dispatcher_0.10.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libsystemd0_247.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev1_247.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/s/systemd/libudev-dev_247.3-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/t/tiff/libtiff5_4.2.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/unbound/libunbound8_1.13.1-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid1_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libblkid-dev_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount1_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libmount-dev_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/libuuid1_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/u/util-linux/uuid-dev_2.36.1-7_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan1_1.2.162.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/v/vulkan-loader/libvulkan-dev_1.2.162.0-1_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-client0_1.19.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-cursor0_1.19.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-dev_1.19.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-egl1_1.19.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland/libwayland-server0_1.19.0-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft2_2.3.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xft/libxft-dev_2.3.2-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-input-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-kb-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-randr-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-record-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-render-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xext-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xorgproto/x11proto-xinerama-dev_2020.1-1_all.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/x/xz-utils/liblzma5_5.2.5-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2_mipsel.deb
+https://snapshot.debian.org/archive/debian/20210309T203820Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2_mipsel.deb
diff --git a/src/build/linux/sysroot_scripts/install-sysroot.py b/src/build/linux/sysroot_scripts/install-sysroot.py
new file mode 100755
index 0000000..f8b7906
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/install-sysroot.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install Debian sysroots for building chromium.
+"""
+
+# The sysroot is needed to ensure that binaries that get built will run on
+# the oldest stable version of Debian that we currently support.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script is a no-op on non-linux platforms.
+
+# The sysroot image could be constructed from scratch based on the current state
+# of the Debian archive but for consistency we use a pre-built root image (we
+# don't want upstream changes to Debian to effect the chromium build until we
+# choose to pull them in). The images will normally need to be rebuilt every
+# time chrome's build dependencies are changed but should also be updated
+# periodically to include upstream security fixes from Debian.
+
+from __future__ import print_function
+
+import hashlib
+import json
+import platform
+import optparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+try:
+    # For Python 3.0 and later
+    from urllib.request import urlopen
+except ImportError:
+    # Fall back to Python 2's urllib2
+    from urllib2 import urlopen
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+
+URL_PREFIX = 'https://commondatastorage.googleapis.com'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+
+VALID_ARCHS = ('arm', 'arm64', 'i386', 'amd64', 'mips', 'mips64el')
+
+ARCH_TRANSLATIONS = {
+    'x64': 'amd64',
+    'x86': 'i386',
+    'mipsel': 'mips',
+    'mips64': 'mips64el',
+}
+
+DEFAULT_TARGET_PLATFORM = 'sid'
+
+class Error(Exception):
+  pass
+
+
+def GetSha1(filename):
+  sha1 = hashlib.sha1()
+  with open(filename, 'rb') as f:
+    while True:
+      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      sha1.update(chunk)
+  return sha1.hexdigest()
+
+
+def main(args):
+  parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__)
+  parser.add_option('--arch',
+                    help='Sysroot architecture: %s' % ', '.join(VALID_ARCHS))
+  parser.add_option('--all', action='store_true',
+                    help='Install all sysroot images (useful when updating the'
+                         ' images)')
+  parser.add_option('--print-hash',
+                    help='Print the hash of the sysroot for the given arch.')
+  options, _ = parser.parse_args(args)
+
+  if options.print_hash:
+    arch = options.print_hash
+    print(GetSysrootDict(DEFAULT_TARGET_PLATFORM,
+                         ARCH_TRANSLATIONS.get(arch, arch))['Sha1Sum'])
+    return 0
+  if options.arch:
+    InstallSysroot(DEFAULT_TARGET_PLATFORM,
+                   ARCH_TRANSLATIONS.get(options.arch, options.arch))
+  elif options.all:
+    for arch in VALID_ARCHS:
+      InstallSysroot(DEFAULT_TARGET_PLATFORM, arch)
+  else:
+    print('You much specify one of the options.')
+    return 1
+
+  return 0
+
+
+def GetSysrootDict(target_platform, target_arch):
+  if target_arch not in VALID_ARCHS:
+    raise Error('Unknown architecture: %s' % target_arch)
+
+  sysroots_file = os.path.join(SCRIPT_DIR, 'sysroots.json')
+  sysroots = json.load(open(sysroots_file))
+  sysroot_key = '%s_%s' % (target_platform, target_arch)
+  if sysroot_key not in sysroots:
+    raise Error('No sysroot for: %s %s' % (target_platform, target_arch))
+  return sysroots[sysroot_key]
+
+
+def InstallSysroot(target_platform, target_arch):
+  sysroot_dict = GetSysrootDict(target_platform, target_arch)
+  tarball_filename = sysroot_dict['Tarball']
+  tarball_sha1sum = sysroot_dict['Sha1Sum']
+  # TODO(thestig) Consider putting this elsewhere to avoid having to recreate
+  # it on every build.
+  linux_dir = os.path.dirname(SCRIPT_DIR)
+  sysroot = os.path.join(linux_dir, sysroot_dict['SysrootDir'])
+
+  url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, tarball_sha1sum,
+                         tarball_filename)
+
+  stamp = os.path.join(sysroot, '.stamp')
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        return
+
+  print('Installing Debian %s %s root image: %s' % \
+      (target_platform, target_arch, sysroot))
+  if os.path.isdir(sysroot):
+    shutil.rmtree(sysroot)
+  os.mkdir(sysroot)
+  tarball = os.path.join(sysroot, tarball_filename)
+  print('Downloading %s' % url)
+  sys.stdout.flush()
+  sys.stderr.flush()
+  for _ in range(3):
+    try:
+      response = urlopen(url)
+      with open(tarball, "wb") as f:
+        f.write(response.read())
+      break
+    except Exception:  # Ignore exceptions.
+      pass
+  else:
+    raise Error('Failed to download %s' % url)
+  sha1sum = GetSha1(tarball)
+  if sha1sum != tarball_sha1sum:
+    raise Error('Tarball sha1sum is wrong.'
+                'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum))
+  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+  os.remove(tarball)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except Error as e:
+    sys.stderr.write(str(e) + '\n')
+    sys.exit(1)
diff --git a/src/build/linux/sysroot_scripts/libdbus-1-3-symbols b/src/build/linux/sysroot_scripts/libdbus-1-3-symbols
new file mode 100644
index 0000000..28050aa
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/libdbus-1-3-symbols
@@ -0,0 +1,235 @@
+libdbus-1.so.3 libdbus-1-3 #MINVER#
+ dbus_address_entries_free@Base 1.0.2
+ dbus_address_entry_get_method@Base 1.0.2
+ dbus_address_entry_get_value@Base 1.0.2
+ dbus_address_escape_value@Base 1.0.2
+ dbus_address_unescape_value@Base 1.0.2
+ dbus_bus_add_match@Base 1.0.2
+ dbus_bus_get@Base 1.0.2
+ dbus_bus_get_id@Base 1.1.1
+ dbus_bus_get_private@Base 1.0.2
+ dbus_bus_get_unique_name@Base 1.0.2
+ dbus_bus_get_unix_user@Base 1.0.2
+ dbus_bus_name_has_owner@Base 1.0.2
+ dbus_bus_register@Base 1.0.2
+ dbus_bus_release_name@Base 1.0.2
+ dbus_bus_remove_match@Base 1.0.2
+ dbus_bus_request_name@Base 1.0.2
+ dbus_bus_set_unique_name@Base 1.0.2
+ dbus_bus_start_service_by_name@Base 1.0.2
+ dbus_connection_add_filter@Base 1.0.2
+ dbus_connection_allocate_data_slot@Base 1.0.2
+ dbus_connection_borrow_message@Base 1.0.2
+ dbus_connection_can_send_type@Base 1.3.1
+ dbus_connection_close@Base 1.0.2
+ dbus_connection_dispatch@Base 1.0.2
+ dbus_connection_flush@Base 1.0.2
+ dbus_connection_free_data_slot@Base 1.0.2
+ dbus_connection_free_preallocated_send@Base 1.0.2
+ dbus_connection_get_adt_audit_session_data@Base 1.2.4
+ dbus_connection_get_data@Base 1.0.2
+ dbus_connection_get_dispatch_status@Base 1.0.2
+ dbus_connection_get_is_anonymous@Base 1.1.1
+ dbus_connection_get_is_authenticated@Base 1.0.2
+ dbus_connection_get_is_connected@Base 1.0.2
+ dbus_connection_get_max_message_size@Base 1.0.2
+ dbus_connection_get_max_message_unix_fds@Base 1.3.1
+ dbus_connection_get_max_received_size@Base 1.0.2
+ dbus_connection_get_max_received_unix_fds@Base 1.3.1
+ dbus_connection_get_object_path_data@Base 1.0.2
+ dbus_connection_get_outgoing_size@Base 1.0.2
+ dbus_connection_get_outgoing_unix_fds@Base 1.3.1
+ dbus_connection_get_server_id@Base 1.1.1
+ dbus_connection_get_socket@Base 1.0.2
+ dbus_connection_get_unix_fd@Base 1.0.2
+ dbus_connection_get_unix_process_id@Base 1.0.2
+ dbus_connection_get_unix_user@Base 1.0.2
+ dbus_connection_get_windows_user@Base 1.1.1
+ dbus_connection_has_messages_to_send@Base 1.0.2
+ dbus_connection_list_registered@Base 1.0.2
+ dbus_connection_open@Base 1.0.2
+ dbus_connection_open_private@Base 1.0.2
+ dbus_connection_pop_message@Base 1.0.2
+ dbus_connection_preallocate_send@Base 1.0.2
+ dbus_connection_read_write@Base 1.0.2
+ dbus_connection_read_write_dispatch@Base 1.0.2
+ dbus_connection_ref@Base 1.0.2
+ dbus_connection_register_fallback@Base 1.0.2
+ dbus_connection_register_object_path@Base 1.0.2
+ dbus_connection_remove_filter@Base 1.0.2
+ dbus_connection_return_message@Base 1.0.2
+ dbus_connection_send@Base 1.0.2
+ dbus_connection_send_preallocated@Base 1.0.2
+ dbus_connection_send_with_reply@Base 1.0.2
+ dbus_connection_send_with_reply_and_block@Base 1.0.2
+ dbus_connection_set_allow_anonymous@Base 1.1.1
+ dbus_connection_set_change_sigpipe@Base 1.0.2
+ dbus_connection_set_data@Base 1.0.2
+ dbus_connection_set_dispatch_status_function@Base 1.0.2
+ dbus_connection_set_exit_on_disconnect@Base 1.0.2
+ dbus_connection_set_max_message_size@Base 1.0.2
+ dbus_connection_set_max_message_unix_fds@Base 1.3.1
+ dbus_connection_set_max_received_size@Base 1.0.2
+ dbus_connection_set_max_received_unix_fds@Base 1.3.1
+ dbus_connection_set_route_peer_messages@Base 1.0.2
+ dbus_connection_set_timeout_functions@Base 1.0.2
+ dbus_connection_set_unix_user_function@Base 1.0.2
+ dbus_connection_set_wakeup_main_function@Base 1.0.2
+ dbus_connection_set_watch_functions@Base 1.0.2
+ dbus_connection_set_windows_user_function@Base 1.1.1
+ dbus_connection_steal_borrowed_message@Base 1.0.2
+ dbus_connection_try_register_fallback@Base 1.1.4
+ dbus_connection_try_register_object_path@Base 1.1.4
+ dbus_connection_unref@Base 1.0.2
+ dbus_connection_unregister_object_path@Base 1.0.2
+ dbus_error_free@Base 1.0.2
+ dbus_error_has_name@Base 1.0.2
+ dbus_error_init@Base 1.0.2
+ dbus_error_is_set@Base 1.0.2
+ dbus_free@Base 1.0.2
+ dbus_free_string_array@Base 1.0.2
+ dbus_get_local_machine_id@Base 1.0.2
+ dbus_get_version@Base 1.1.4
+ dbus_internal_do_not_use_create_uuid@Base 1.0.2
+ dbus_internal_do_not_use_get_uuid@Base 1.0.2
+ dbus_malloc0@Base 1.0.2
+ dbus_malloc@Base 1.0.2
+ dbus_message_allocate_data_slot@Base 1.0.2
+ dbus_message_append_args@Base 1.0.2
+ dbus_message_append_args_valist@Base 1.0.2
+ dbus_message_contains_unix_fds@Base 1.3.1
+ dbus_message_copy@Base 1.0.2
+ dbus_message_demarshal@Base 1.1.1
+ dbus_message_demarshal_bytes_needed@Base 1.2.14
+ dbus_message_free_data_slot@Base 1.0.2
+ dbus_message_get_args@Base 1.0.2
+ dbus_message_get_args_valist@Base 1.0.2
+ dbus_message_get_auto_start@Base 1.0.2
+ dbus_message_get_data@Base 1.0.2
+ dbus_message_get_destination@Base 1.0.2
+ dbus_message_get_error_name@Base 1.0.2
+ dbus_message_get_interface@Base 1.0.2
+ dbus_message_get_member@Base 1.0.2
+ dbus_message_get_no_reply@Base 1.0.2
+ dbus_message_get_path@Base 1.0.2
+ dbus_message_get_path_decomposed@Base 1.0.2
+ dbus_message_get_reply_serial@Base 1.0.2
+ dbus_message_get_sender@Base 1.0.2
+ dbus_message_get_serial@Base 1.0.2
+ dbus_message_get_signature@Base 1.0.2
+ dbus_message_get_type@Base 1.0.2
+ dbus_message_has_destination@Base 1.0.2
+ dbus_message_has_interface@Base 1.0.2
+ dbus_message_has_member@Base 1.0.2
+ dbus_message_has_path@Base 1.0.2
+ dbus_message_has_sender@Base 1.0.2
+ dbus_message_has_signature@Base 1.0.2
+ dbus_message_is_error@Base 1.0.2
+ dbus_message_is_method_call@Base 1.0.2
+ dbus_message_is_signal@Base 1.0.2
+ dbus_message_iter_abandon_container@Base 1.2.16
+ dbus_message_iter_append_basic@Base 1.0.2
+ dbus_message_iter_append_fixed_array@Base 1.0.2
+ dbus_message_iter_close_container@Base 1.0.2
+ dbus_message_iter_get_arg_type@Base 1.0.2
+ dbus_message_iter_get_array_len@Base 1.0.2
+ dbus_message_iter_get_basic@Base 1.0.2
+ dbus_message_iter_get_element_type@Base 1.0.2
+ dbus_message_iter_get_fixed_array@Base 1.0.2
+ dbus_message_iter_get_signature@Base 1.0.2
+ dbus_message_iter_has_next@Base 1.0.2
+ dbus_message_iter_init@Base 1.0.2
+ dbus_message_iter_init_append@Base 1.0.2
+ dbus_message_iter_next@Base 1.0.2
+ dbus_message_iter_open_container@Base 1.0.2
+ dbus_message_iter_recurse@Base 1.0.2
+ dbus_message_lock@Base 1.2.14
+ dbus_message_marshal@Base 1.1.1
+ dbus_message_new@Base 1.0.2
+ dbus_message_new_error@Base 1.0.2
+ dbus_message_new_error_printf@Base 1.0.2
+ dbus_message_new_method_call@Base 1.0.2
+ dbus_message_new_method_return@Base 1.0.2
+ dbus_message_new_signal@Base 1.0.2
+ dbus_message_ref@Base 1.0.2
+ dbus_message_set_auto_start@Base 1.0.2
+ dbus_message_set_data@Base 1.0.2
+ dbus_message_set_destination@Base 1.0.2
+ dbus_message_set_error_name@Base 1.0.2
+ dbus_message_set_interface@Base 1.0.2
+ dbus_message_set_member@Base 1.0.2
+ dbus_message_set_no_reply@Base 1.0.2
+ dbus_message_set_path@Base 1.0.2
+ dbus_message_set_reply_serial@Base 1.0.2
+ dbus_message_set_sender@Base 1.0.2
+ dbus_message_set_serial@Base 1.2.14
+ dbus_message_type_from_string@Base 1.0.2
+ dbus_message_type_to_string@Base 1.0.2
+ dbus_message_unref@Base 1.0.2
+ dbus_move_error@Base 1.0.2
+ dbus_parse_address@Base 1.0.2
+ dbus_pending_call_allocate_data_slot@Base 1.0.2
+ dbus_pending_call_block@Base 1.0.2
+ dbus_pending_call_cancel@Base 1.0.2
+ dbus_pending_call_free_data_slot@Base 1.0.2
+ dbus_pending_call_get_completed@Base 1.0.2
+ dbus_pending_call_get_data@Base 1.0.2
+ dbus_pending_call_ref@Base 1.0.2
+ dbus_pending_call_set_data@Base 1.0.2
+ dbus_pending_call_set_notify@Base 1.0.2
+ dbus_pending_call_steal_reply@Base 1.0.2
+ dbus_pending_call_unref@Base 1.0.2
+ dbus_realloc@Base 1.0.2
+ dbus_server_allocate_data_slot@Base 1.0.2
+ dbus_server_disconnect@Base 1.0.2
+ dbus_server_free_data_slot@Base 1.0.2
+ dbus_server_get_address@Base 1.0.2
+ dbus_server_get_data@Base 1.0.2
+ dbus_server_get_id@Base 1.1.1
+ dbus_server_get_is_connected@Base 1.0.2
+ dbus_server_listen@Base 1.0.2
+ dbus_server_ref@Base 1.0.2
+ dbus_server_set_auth_mechanisms@Base 1.0.2
+ dbus_server_set_data@Base 1.0.2
+ dbus_server_set_new_connection_function@Base 1.0.2
+ dbus_server_set_timeout_functions@Base 1.0.2
+ dbus_server_set_watch_functions@Base 1.0.2
+ dbus_server_unref@Base 1.0.2
+ dbus_set_error@Base 1.0.2
+ dbus_set_error_const@Base 1.0.2
+ dbus_set_error_from_message@Base 1.0.2
+ dbus_setenv@Base 1.7.6
+ dbus_shutdown@Base 1.0.2
+ dbus_signature_iter_get_current_type@Base 1.0.2
+ dbus_signature_iter_get_element_type@Base 1.0.2
+ dbus_signature_iter_get_signature@Base 1.0.2
+ dbus_signature_iter_init@Base 1.0.2
+ dbus_signature_iter_next@Base 1.0.2
+ dbus_signature_iter_recurse@Base 1.0.2
+ dbus_signature_validate@Base 1.0.2
+ dbus_signature_validate_single@Base 1.0.2
+ dbus_threads_init@Base 1.0.2
+ dbus_threads_init_default@Base 1.0.2
+ dbus_timeout_get_data@Base 1.0.2
+ dbus_timeout_get_enabled@Base 1.0.2
+ dbus_timeout_get_interval@Base 1.0.2
+ dbus_timeout_handle@Base 1.0.2
+ dbus_timeout_set_data@Base 1.0.2
+ dbus_type_is_basic@Base 1.0.2
+ dbus_type_is_container@Base 1.0.2
+ dbus_type_is_fixed@Base 1.0.2
+ dbus_type_is_valid@Base 1.5.0
+ dbus_validate_bus_name@Base 1.5.12
+ dbus_validate_error_name@Base 1.5.12
+ dbus_validate_interface@Base 1.5.12
+ dbus_validate_member@Base 1.5.12
+ dbus_validate_path@Base 1.5.12
+ dbus_validate_utf8@Base 1.5.12
+ dbus_watch_get_data@Base 1.0.2
+ dbus_watch_get_enabled@Base 1.0.2
+ dbus_watch_get_fd@Base 1.0.2
+ dbus_watch_get_flags@Base 1.0.2
+ dbus_watch_get_socket@Base 1.1.1
+ dbus_watch_get_unix_fd@Base 1.1.1
+ dbus_watch_handle@Base 1.0.2
+ dbus_watch_set_data@Base 1.0.2
diff --git a/src/build/linux/sysroot_scripts/libxcomposite1-symbols b/src/build/linux/sysroot_scripts/libxcomposite1-symbols
new file mode 100644
index 0000000..aba31fa
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/libxcomposite1-symbols
@@ -0,0 +1,15 @@
+libXcomposite.so.1 libxcomposite1 #MINVER#
+ XCompositeCreateRegionFromBorderClip@Base 1:0.4.4-1
+ XCompositeExtensionInfo@Base 1:0.4.4-1
+ XCompositeExtensionName@Base 1:0.4.4-1
+ XCompositeFindDisplay@Base 1:0.4.4-1
+ XCompositeGetOverlayWindow@Base 1:0.4.4-1
+ XCompositeNameWindowPixmap@Base 1:0.4.4-1
+ XCompositeQueryExtension@Base 1:0.4.4-1
+ XCompositeQueryVersion@Base 1:0.4.4-1
+ XCompositeRedirectSubwindows@Base 1:0.4.4-1
+ XCompositeRedirectWindow@Base 1:0.4.4-1
+ XCompositeReleaseOverlayWindow@Base 1:0.4.4-1
+ XCompositeUnredirectSubwindows@Base 1:0.4.4-1
+ XCompositeUnredirectWindow@Base 1:0.4.4-1
+ XCompositeVersion@Base 1:0.4.4-1
diff --git a/src/build/linux/sysroot_scripts/libxkbcommon0-symbols b/src/build/linux/sysroot_scripts/libxkbcommon0-symbols
new file mode 100644
index 0000000..5750fc0
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/libxkbcommon0-symbols
@@ -0,0 +1,93 @@
+libxkbcommon.so.0 libxkbcommon0 #MINVER#
+ xkb_context_get_log_level@Base 0.4.1
+ xkb_context_get_log_verbosity@Base 0.4.1
+ xkb_context_get_user_data@Base 0.4.1
+ xkb_context_include_path_append@Base 0.4.1
+ xkb_context_include_path_append_default@Base 0.4.1
+ xkb_context_include_path_clear@Base 0.4.1
+ xkb_context_include_path_get@Base 0.4.1
+ xkb_context_include_path_reset_defaults@Base 0.4.1
+ xkb_context_new@Base 0.4.1
+ xkb_context_num_include_paths@Base 0.4.1
+ xkb_context_ref@Base 0.4.1
+ xkb_context_set_log_fn@Base 0.4.1
+ xkb_context_set_log_level@Base 0.4.1
+ xkb_context_set_log_verbosity@Base 0.4.1
+ xkb_context_set_user_data@Base 0.4.1
+ xkb_context_unref@Base 0.4.1
+ xkb_key_get_syms@Base 0.4.1
+ xkb_key_mod_index_is_consumed@Base 0.4.1
+ xkb_key_mod_mask_remove_consumed@Base 0.4.1
+ xkb_key_num_groups@Base 0.4.1
+ xkb_key_repeats@Base 0.4.1
+ xkb_keymap_get_as_string@Base 0.4.1
+ xkb_keymap_key_for_each@Base 0.4.1
+ xkb_keymap_key_get_syms_by_level@Base 0.4.1
+ xkb_keymap_key_repeats@Base 0.4.1
+ xkb_keymap_layout_get_index@Base 0.4.1
+ xkb_keymap_layout_get_name@Base 0.4.1
+ xkb_keymap_led_get_index@Base 0.4.1
+ xkb_keymap_led_get_name@Base 0.4.1
+ xkb_keymap_max_keycode@Base 0.4.1
+ xkb_keymap_min_keycode@Base 0.4.1
+ xkb_keymap_mod_get_index@Base 0.4.1
+ xkb_keymap_mod_get_name@Base 0.4.1
+ xkb_keymap_new_from_buffer@Base 0.4.1
+ xkb_keymap_new_from_file@Base 0.4.1
+ xkb_keymap_new_from_names@Base 0.4.1
+ xkb_keymap_new_from_string@Base 0.4.1
+ xkb_keymap_num_layouts@Base 0.4.1
+ xkb_keymap_num_layouts_for_key@Base 0.4.1
+ xkb_keymap_num_leds@Base 0.4.1
+ xkb_keymap_num_levels_for_key@Base 0.4.1
+ xkb_keymap_num_mods@Base 0.4.1
+ xkb_keymap_ref@Base 0.4.1
+ xkb_keymap_unref@Base 0.4.1
+ xkb_keysym_from_name@Base 0.4.1
+ xkb_keysym_get_name@Base 0.4.1
+ xkb_keysym_to_utf32@Base 0.4.1
+ xkb_keysym_to_utf8@Base 0.4.1
+ xkb_map_get_as_string@Base 0.4.1
+ xkb_map_group_get_index@Base 0.4.1
+ xkb_map_group_get_name@Base 0.4.1
+ xkb_map_led_get_index@Base 0.4.1
+ xkb_map_led_get_name@Base 0.4.1
+ xkb_map_mod_get_index@Base 0.4.1
+ xkb_map_mod_get_name@Base 0.4.1
+ xkb_map_new_from_file@Base 0.4.1
+ xkb_map_new_from_names@Base 0.4.1
+ xkb_map_new_from_string@Base 0.4.1
+ xkb_map_num_groups@Base 0.4.1
+ xkb_map_num_leds@Base 0.4.1
+ xkb_map_num_mods@Base 0.4.1
+ xkb_map_ref@Base 0.4.1
+ xkb_map_unref@Base 0.4.1
+ xkb_state_get_keymap@Base 0.4.1
+ xkb_state_get_map@Base 0.4.1
+ xkb_state_group_index_is_active@Base 0.4.1
+ xkb_state_group_name_is_active@Base 0.4.1
+ xkb_state_key_get_consumed_mods@Base 0.4.1
+ xkb_state_key_get_layout@Base 0.4.1
+ xkb_state_key_get_level@Base 0.4.1
+ xkb_state_key_get_one_sym@Base 0.4.1
+ xkb_state_key_get_syms@Base 0.4.1
+ xkb_state_key_get_utf32@Base 0.4.1
+ xkb_state_key_get_utf8@Base 0.4.1
+ xkb_state_layout_index_is_active@Base 0.4.1
+ xkb_state_layout_name_is_active@Base 0.4.1
+ xkb_state_led_index_is_active@Base 0.4.1
+ xkb_state_led_name_is_active@Base 0.4.1
+ xkb_state_mod_index_is_active@Base 0.4.1
+ xkb_state_mod_index_is_consumed@Base 0.4.1
+ xkb_state_mod_indices_are_active@Base 0.4.1
+ xkb_state_mod_mask_remove_consumed@Base 0.4.1
+ xkb_state_mod_name_is_active@Base 0.4.1
+ xkb_state_mod_names_are_active@Base 0.4.1
+ xkb_state_new@Base 0.4.1
+ xkb_state_ref@Base 0.4.1
+ xkb_state_serialize_group@Base 0.4.1
+ xkb_state_serialize_layout@Base 0.4.1
+ xkb_state_serialize_mods@Base 0.4.1
+ xkb_state_unref@Base 0.4.1
+ xkb_state_update_key@Base 0.4.1
+ xkb_state_update_mask@Base 0.4.1
diff --git a/src/build/linux/sysroot_scripts/merge-package-lists.py b/src/build/linux/sysroot_scripts/merge-package-lists.py
new file mode 100755
index 0000000..70b3d05
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/merge-package-lists.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merge package entries from different package lists.
+"""
+
+# This is used for replacing packages in eg. sid with those in experimental.
+# The updated packages are ABI compatible, but include security patches, so we
+# should use those instead in our sysroots.
+
+import sys
+
+if len(sys.argv) != 2:
+  exit(1)
+
+packages = {}
+
+def AddPackagesFromFile(file):
+  global packages
+  lines = file.readlines()
+  if len(lines) % 3 != 0:
+    exit(1)
+  for i in range(0, len(lines), 3):
+    packages[lines[i]] = (lines[i + 1], lines[i + 2])
+
+AddPackagesFromFile(open(sys.argv[1], 'r'))
+AddPackagesFromFile(sys.stdin)
+
+output_file = open(sys.argv[1], 'w')
+
+for (package, (filename, sha256)) in packages.items():
+  output_file.write(package + filename + sha256)
diff --git a/src/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/src/build/linux/sysroot_scripts/sysroot-creator-sid.sh
new file mode 100755
index 0000000..86c311c
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/sysroot-creator-sid.sh
@@ -0,0 +1,463 @@
+#!/bin/bash
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+DISTRO=debian
+DIST=sid
+
+# Keep the "experimental" repo before the "sid" repo.  There are some packages
+# that are currently only available in experimental like libgtk-4-1, but if it
+# were to be placed first, experimental (unreleased) versions of other packages
+# like libc6 would take precedence over the sid (released) versions.  While this
+# may be useful for certain kinds of development, the standard sysroots should
+# continue to be shipped only with released packages.
+# Also keep "stretch" before "sid".  For now, it's needed to bring back
+# libgnome-keyring-dev which has since been deprecated and removed from sid.
+# It will be needed until gnome keyring is removed (http://crbug.com/466975 and
+# http://crbug.com/355223).
+ARCHIVE_URL="https://snapshot.debian.org/archive/debian"
+ARCHIVE_TIMESTAMP=20210309T203820Z
+APT_SOURCES_LIST="${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ stretch main
+${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ experimental main
+${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ sid main"
+
+# gpg keyring file generated using generate_debian_archive_unstable.sh
+KEYRING_FILE="${SCRIPT_DIR}/debian_archive_unstable.gpg"
+
+HAS_ARCH_AMD64=1
+HAS_ARCH_I386=1
+HAS_ARCH_ARM=1
+HAS_ARCH_ARM64=1
+HAS_ARCH_ARMEL=1
+HAS_ARCH_MIPS=1
+HAS_ARCH_MIPS64EL=1
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+  comerr-dev
+  krb5-multidev
+  libappindicator-dev
+  libappindicator1
+  libappindicator3-1
+  libappindicator3-dev
+  libasound2
+  libasound2-dev
+  libasyncns0
+  libatk-bridge2.0-0
+  libatk-bridge2.0-dev
+  libatk1.0-0
+  libatk1.0-dev
+  libatomic1
+  libatspi2.0-0
+  libatspi2.0-dev
+  libattr1
+  libaudit1
+  libavahi-client3
+  libavahi-common3
+  libblkid-dev
+  libblkid1
+  libbluetooth-dev
+  libbluetooth3
+  libbrotli-dev
+  libbrotli1
+  libbsd0
+  libc6
+  libc6-dev
+  libcairo-gobject2
+  libcairo-script-interpreter2
+  libcairo2
+  libcairo2-dev
+  libcap-dev
+  libcap-ng0
+  libcap2
+  libcolord2
+  libcom-err2
+  libcups2
+  libcups2-dev
+  libcupsimage2
+  libcupsimage2-dev
+  libdatrie-dev
+  libcurl3-gnutls
+  libcurl4-gnutls-dev
+  libdatrie1
+  libdb5.3
+  libdbus-1-3
+  libdbus-1-dev
+  libdbus-glib-1-2
+  libdbusmenu-glib-dev
+  libdbusmenu-glib4
+  libdbusmenu-gtk3-4
+  libdbusmenu-gtk4
+  libdeflate0
+  libdrm-amdgpu1
+  libdrm-dev
+  libdrm-nouveau2
+  libdrm-radeon1
+  libdrm2
+  libegl-dev
+  libegl1
+  libegl1-mesa
+  libegl1-mesa-dev
+  libelf-dev
+  libelf1
+  libepoxy-dev
+  libepoxy0
+  libevdev-dev
+  libevdev2
+  libevent-2.1-7
+  libexpat1
+  libexpat1-dev
+  libffi-dev
+  libffi6
+  libffi7
+  libflac-dev
+  libflac8
+  libfontconfig-dev
+  libfontconfig1
+  libfreetype-dev
+  libfreetype6
+  libfribidi-dev
+  libfribidi0
+  libgbm-dev
+  libgbm1
+  libgcc-10-dev
+  libgcc1
+  libgcrypt20
+  libgcrypt20-dev
+  libgdk-pixbuf-2.0-0
+  libgdk-pixbuf-2.0-dev
+  libgl-dev
+  libgl1
+  libgl1-mesa-dev
+  libgl1-mesa-glx
+  libglapi-mesa
+  libgles-dev
+  libgles1
+  libgles2
+  libglib2.0-0
+  libglib2.0-dev
+  libglvnd-dev
+  libglvnd0
+  libglx-dev
+  libglx0
+  libgmp10
+  libgnome-keyring-dev
+  libgnome-keyring0
+  libgnutls-dane0
+  libgnutls-openssl27
+  libgnutls28-dev
+  libgnutls30
+  libgnutlsxx28
+  libgomp1
+  libgpg-error-dev
+  libgpg-error0
+  libgraphene-1.0-0
+  libgraphene-1.0-dev
+  libgraphite2-3
+  libgraphite2-dev
+  libgssapi-krb5-2
+  libgssrpc4
+  libgtk-3-0
+  libgtk-3-dev
+  libgtk2.0-0
+  libgudev-1.0-0
+  libharfbuzz-dev
+  libharfbuzz-gobject0
+  libharfbuzz-icu0
+  libharfbuzz0b
+  libhogweed6
+  libice6
+  libicu-le-hb0
+  libicu67
+  libidl-2-0
+  libidn11
+  libidn2-0
+  libindicator3-7
+  libindicator7
+  libinput-dev
+  libinput10
+  libjbig0
+  libjpeg62-turbo
+  libjpeg62-turbo-dev
+  libjson-glib-1.0-0
+  libjsoncpp-dev
+  libjsoncpp24
+  libk5crypto3
+  libkadm5clnt-mit12
+  libkadm5srv-mit12
+  libkdb5-10
+  libkeyutils1
+  libkrb5-3
+  libkrb5-dev
+  libkrb5support0
+  liblcms2-2
+  libldap-2.4-2
+  libltdl7
+  liblz4-1
+  liblzma5
+  liblzo2-2
+  libmd0
+  libminizip-dev
+  libminizip1
+  libmount-dev
+  libmount1
+  libmtdev1
+  libncurses-dev
+  libncurses6
+  libncursesw6
+  libnettle8
+  libnghttp2-14
+  libnsl2
+  libnspr4
+  libnspr4-dev
+  libnss-db
+  libnss3
+  libnss3-dev
+  libogg-dev
+  libogg0
+  libopengl0
+  libopus-dev
+  libopus0
+  libp11-kit0
+  libpam0g
+  libpam0g-dev
+  libpango-1.0-0
+  libpango1.0-dev
+  libpangocairo-1.0-0
+  libpangoft2-1.0-0
+  libpangox-1.0-0
+  libpangoxft-1.0-0
+  libpci-dev
+  libpci3
+  libpciaccess0
+  libpcre16-3
+  libpcre2-16-0
+  libpcre2-32-0
+  libpcre2-8-0
+  libpcre2-dev
+  libpcre2-posix0
+  libpcre2-posix2
+  libpcre3
+  libpcre3-dev
+  libpcre32-3
+  libpcrecpp0v5
+  libpipewire-0.3-0
+  libpipewire-0.3-dev
+  libpixman-1-0
+  libpixman-1-dev
+  libpng-dev
+  libpng16-16
+  libpsl5
+  libpthread-stubs0-dev
+  libpulse-dev
+  libpulse-mainloop-glib0
+  libpulse0
+  libre2-9
+  libre2-dev
+  librest-0.7-0
+  librtmp1
+  libsasl2-2
+  libselinux1
+  libselinux1-dev
+  libsepol1
+  libsepol1-dev
+  libsm6
+  libsnappy-dev
+  libsnappy1v5
+  libsndfile1
+  libsoup-gnome2.4-1
+  libsoup2.4-1
+  libspa-0.2-dev
+  libspeechd-dev
+  libspeechd2
+  libsqlite3-0
+  libssh2-1
+  libssl-dev
+  libssl1.1
+  libstdc++-10-dev
+  libstdc++6
+  libsystemd0
+  libtasn1-6
+  libthai-dev
+  libthai0
+  libtiff5
+  libtinfo6
+  libtirpc3
+  libudev-dev
+  libudev1
+  libunbound8
+  libunistring2
+  libuuid1
+  libva-dev
+  libva-drm2
+  libva-glx2
+  libva-wayland2
+  libva-x11-2
+  libva2
+  libvorbis0a
+  libvorbisenc2
+  libvpx-dev
+  libvpx6
+  libvulkan-dev
+  libvulkan1
+  libwacom2
+  libwayland-client0
+  libwayland-cursor0
+  libwayland-dev
+  libwayland-egl1
+  libwayland-egl1-mesa
+  libwayland-server0
+  libwebp-dev
+  libwebp6
+  libwebpdemux2
+  libwebpmux3
+  libwrap0
+  libx11-6
+  libx11-dev
+  libx11-xcb-dev
+  libx11-xcb1
+  libxau-dev
+  libxau6
+  libxcb-dri2-0
+  libxcb-dri2-0-dev
+  libxcb-dri3-0
+  libxcb-dri3-dev
+  libxcb-glx0
+  libxcb-glx0-dev
+  libxcb-present-dev
+  libxcb-present0
+  libxcb-render0
+  libxcb-render0-dev
+  libxcb-shm0
+  libxcb-shm0-dev
+  libxcb-sync1
+  libxcb-xfixes0
+  libxcb1
+  libxcb1-dev
+  libxcomposite-dev
+  libxcomposite1
+  libxcursor-dev
+  libxcursor1
+  libxdamage-dev
+  libxdamage1
+  libxdmcp-dev
+  libxdmcp6
+  libxext-dev
+  libxext6
+  libxfixes-dev
+  libxfixes3
+  libxft-dev
+  libxft2
+  libxi-dev
+  libxi6
+  libxinerama-dev
+  libxinerama1
+  libxkbcommon-dev
+  libxkbcommon0
+  libxml2
+  libxml2-dev
+  libxrandr-dev
+  libxrandr2
+  libxrender-dev
+  libxrender1
+  libxshmfence-dev
+  libxshmfence1
+  libxslt1-dev
+  libxslt1.1
+  libxss-dev
+  libxss1
+  libxt-dev
+  libxt6
+  libxtst-dev
+  libxtst6
+  libxxf86vm-dev
+  libxxf86vm1
+  libzstd1
+  linux-libc-dev
+  mesa-common-dev
+  shared-mime-info
+  speech-dispatcher
+  uuid-dev
+  wayland-protocols
+  x11proto-composite-dev
+  x11proto-damage-dev
+  x11proto-dev
+  x11proto-fixes-dev
+  x11proto-input-dev
+  x11proto-kb-dev
+  x11proto-randr-dev
+  x11proto-record-dev
+  x11proto-render-dev
+  x11proto-scrnsaver-dev
+  x11proto-xext-dev
+  x11proto-xinerama-dev
+  zlib1g
+  zlib1g-dev
+"
+
+DEBIAN_PACKAGES_AMD64="
+  libgtk-4-1
+  libgtk-4-dev
+  liblsan0
+  libtsan0
+"
+
+DEBIAN_PACKAGES_X86="
+  libasan6
+  libcilkrts5
+  libdrm-intel1
+  libgtk-4-1
+  libgtk-4-dev
+  libitm1
+  libmpx2
+  libquadmath0
+  libubsan1
+"
+
+DEBIAN_PACKAGES_ARM="
+  libasan6
+  libdrm-etnaviv1
+  libdrm-exynos1
+  libdrm-freedreno1
+  libdrm-omap1
+  libdrm-tegra0
+  libgtk-4-1
+  libgtk-4-dev
+  libubsan1
+"
+
+DEBIAN_PACKAGES_ARM64="
+  libasan6
+  libdrm-etnaviv1
+  libdrm-freedreno1
+  libdrm-tegra0
+  libgmp10
+  libgtk-4-1
+  libgtk-4-dev
+  libitm1
+  liblsan0
+  libthai0
+  libtsan0
+  libubsan1
+"
+
+DEBIAN_PACKAGES_ARMEL="
+  libasan6
+  libdrm-exynos1
+  libdrm-freedreno1
+  libdrm-omap1
+  libdrm-tegra0
+  libgtk-4-1
+  libgtk-4-dev
+  libubsan1
+"
+
+DEBIAN_PACKAGES_MIPS64EL="
+"
+
+. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/src/build/linux/sysroot_scripts/sysroot-creator.sh b/src/build/linux/sysroot_scripts/sysroot-creator.sh
new file mode 100644
index 0000000..fda3de4
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/sysroot-creator.sh
@@ -0,0 +1,968 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script should not be run directly but sourced by the other
+# scripts (e.g. sysroot-creator-sid.sh).  Its up to the parent scripts
+# to define certain environment variables: e.g.
+#  DISTRO=debian
+#  DIST=sid
+#  # Similar in syntax to /etc/apt/sources.list
+#  APT_SOURCES_LIST="http://ftp.us.debian.org/debian/ sid main"
+#  KEYRING_FILE=debian-archive-sid-stable.gpg
+#  DEBIAN_PACKAGES="gcc libz libssl"
+
+#@ This script builds Debian/Ubuntu sysroot images for building Google Chrome.
+#@
+#@  Generally this script is invoked as:
+#@  sysroot-creator-<flavour>.sh <mode> <args>*
+#@  Available modes are shown below.
+#@
+#@ List of modes:
+
+######################################################################
+# Config
+######################################################################
+
+set -o nounset
+set -o errexit
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+if [ -z "${DIST:-}" ]; then
+  echo "error: DIST not defined"
+  exit 1
+fi
+
+if [ -z "${KEYRING_FILE:-}" ]; then
+  echo "error: KEYRING_FILE not defined"
+  exit 1
+fi
+
+if [ -z "${DEBIAN_PACKAGES:-}" ]; then
+  echo "error: DEBIAN_PACKAGES not defined"
+  exit 1
+fi
+
+readonly HAS_ARCH_AMD64=${HAS_ARCH_AMD64:=0}
+readonly HAS_ARCH_I386=${HAS_ARCH_I386:=0}
+readonly HAS_ARCH_ARM=${HAS_ARCH_ARM:=0}
+readonly HAS_ARCH_ARM64=${HAS_ARCH_ARM64:=0}
+readonly HAS_ARCH_ARMEL=${HAS_ARCH_ARMEL:=0}
+readonly HAS_ARCH_MIPS=${HAS_ARCH_MIPS:=0}
+readonly HAS_ARCH_MIPS64EL=${HAS_ARCH_MIPS64EL:=0}
+
+readonly REQUIRED_TOOLS="curl xzcat"
+
+######################################################################
+# Package Config
+######################################################################
+
+readonly PACKAGES_EXT=xz
+readonly RELEASE_FILE="Release"
+readonly RELEASE_FILE_GPG="Release.gpg"
+
+readonly DEBIAN_DEP_LIST_AMD64="generated_package_lists/${DIST}.amd64"
+readonly DEBIAN_DEP_LIST_I386="generated_package_lists/${DIST}.i386"
+readonly DEBIAN_DEP_LIST_ARM="generated_package_lists/${DIST}.arm"
+readonly DEBIAN_DEP_LIST_ARM64="generated_package_lists/${DIST}.arm64"
+readonly DEBIAN_DEP_LIST_ARMEL="generated_package_lists/${DIST}.armel"
+readonly DEBIAN_DEP_LIST_MIPS="generated_package_lists/${DIST}.mipsel"
+readonly DEBIAN_DEP_LIST_MIPS64EL="generated_package_lists/${DIST}.mips64el"
+
+
+######################################################################
+# Helper
+######################################################################
+
+Banner() {
+  echo "######################################################################"
+  echo $*
+  echo "######################################################################"
+}
+
+
+SubBanner() {
+  echo "----------------------------------------------------------------------"
+  echo $*
+  echo "----------------------------------------------------------------------"
+}
+
+
+Usage() {
+  egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
+}
+
+
+DownloadOrCopyNonUniqueFilename() {
+  # Use this function instead of DownloadOrCopy when the url uniquely
+  # identifies the file, but the filename (excluding the directory)
+  # does not.
+  local url="$1"
+  local dest="$2"
+
+  local hash="$(echo "$url" | sha256sum | cut -d' ' -f1)"
+
+  DownloadOrCopy "${url}" "${dest}.${hash}"
+  # cp the file to prevent having to redownload it, but mv it to the
+  # final location so that it's atomic.
+  cp "${dest}.${hash}" "${dest}.$$"
+  mv "${dest}.$$" "${dest}"
+}
+
+DownloadOrCopy() {
+  if [ -f "$2" ] ; then
+    echo "$2 already in place"
+    return
+  fi
+
+  HTTP=0
+  echo "$1" | grep -Eqs '^https?://' && HTTP=1
+  if [ "$HTTP" = "1" ]; then
+    SubBanner "downloading from $1 -> $2"
+    # Appending the "$$" shell pid is necessary here to prevent concurrent
+    # instances of sysroot-creator.sh from trying to write to the same file.
+    local temp_file="${2}.partial.$$"
+    # curl --retry doesn't retry when the page gives a 4XX error, so we need to
+    # manually rerun.
+    for i in {1..10}; do
+      # --create-dirs is added in case there are slashes in the filename, as can
+      # happen with the "debian/security" release class.
+      local http_code=$(curl -L "$1" --create-dirs -o "${temp_file}" \
+                        -w "%{http_code}")
+      if [ ${http_code} -eq 200 ]; then
+        break
+      fi
+      echo "Bad HTTP code ${http_code} when downloading $1"
+      rm -f "${temp_file}"
+      sleep $i
+    done
+    if [ ! -f "${temp_file}" ]; then
+      exit 1
+    fi
+    mv "${temp_file}" $2
+  else
+    SubBanner "copying from $1"
+    cp "$1" "$2"
+  fi
+}
+
+
+SetEnvironmentVariables() {
+  case $1 in
+    *Amd64)
+      ARCH=AMD64
+      ;;
+    *I386)
+      ARCH=I386
+      ;;
+    *Mips64el)
+      ARCH=MIPS64EL
+      ;;
+    *Mips)
+      ARCH=MIPS
+      ;;
+    *ARM)
+      ARCH=ARM
+      ;;
+    *ARM64)
+      ARCH=ARM64
+      ;;
+    *ARMEL)
+      ARCH=ARMEL
+      ;;
+    *)
+      echo "ERROR: Unable to determine architecture based on: $1"
+      exit 1
+      ;;
+  esac
+  ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
+}
+
+
+# some sanity checks to make sure this script is run from the right place
+# with the right tools
+SanityCheck() {
+  Banner "Sanity Checks"
+
+  local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd)
+  BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
+  mkdir -p ${BUILD_DIR}
+  echo "Using build directory: ${BUILD_DIR}"
+
+  for tool in ${REQUIRED_TOOLS} ; do
+    if ! which ${tool} > /dev/null ; then
+      echo "Required binary $tool not found."
+      echo "Exiting."
+      exit 1
+    fi
+  done
+
+  # This is where the staging sysroot is.
+  INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
+  TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tar.xz"
+
+  if ! mkdir -p "${INSTALL_ROOT}" ; then
+    echo "ERROR: ${INSTALL_ROOT} can't be created."
+    exit 1
+  fi
+}
+
+
+ChangeDirectory() {
+  # Change directory to where this script is.
+  cd ${SCRIPT_DIR}
+}
+
+
+ClearInstallDir() {
+  Banner "Clearing dirs in ${INSTALL_ROOT}"
+  rm -rf ${INSTALL_ROOT}/*
+}
+
+
+CreateTarBall() {
+  Banner "Creating tarball ${TARBALL}"
+  tar -I "xz -9 -T0" -cf ${TARBALL} -C ${INSTALL_ROOT} .
+}
+
+ExtractPackageXz() {
+  local src_file="$1"
+  local dst_file="$2"
+  local repo="$3"
+  xzcat "${src_file}" | egrep '^(Package:|Filename:|SHA256:) ' |
+    sed "s|Filename: |Filename: ${repo}|" > "${dst_file}"
+}
+
+GeneratePackageListDist() {
+  local arch="$1"
+  set -- $2
+  local repo="$1"
+  local dist="$2"
+  local repo_name="$3"
+
+  TMP_PACKAGE_LIST="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}"
+  local repo_basedir="${repo}/dists/${dist}"
+  local package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}.${PACKAGES_EXT}"
+  local package_file_arch="${repo_name}/binary-${arch}/Packages.${PACKAGES_EXT}"
+  local package_list_arch="${repo_basedir}/${package_file_arch}"
+
+  DownloadOrCopyNonUniqueFilename "${package_list_arch}" "${package_list}"
+  VerifyPackageListing "${package_file_arch}" "${package_list}" ${repo} ${dist}
+  ExtractPackageXz "${package_list}" "${TMP_PACKAGE_LIST}" ${repo}
+}
+
+GeneratePackageListCommon() {
+  local output_file="$1"
+  local arch="$2"
+  local packages="$3"
+
+  local dists="${DIST} ${DIST_UPDATES:-}"
+
+  local list_base="${BUILD_DIR}/Packages.${DIST}_${arch}"
+  > "${list_base}"  # Create (or truncate) a zero-length file.
+  echo "${APT_SOURCES_LIST}" | while read source; do
+    GeneratePackageListDist "${arch}" "${source}"
+    cat "${TMP_PACKAGE_LIST}" | ./merge-package-lists.py "${list_base}"
+  done
+
+  GeneratePackageList "${list_base}" "${output_file}" "${packages}"
+}
+
+GeneratePackageListAmd64() {
+  GeneratePackageListCommon "$1" amd64 "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86:=} ${DEBIAN_PACKAGES_AMD64:=}"
+}
+
+GeneratePackageListI386() {
+  GeneratePackageListCommon "$1" i386 "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86:=}"
+}
+
+GeneratePackageListARM() {
+  GeneratePackageListCommon "$1" armhf "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_ARM:=}"
+}
+
+GeneratePackageListARM64() {
+  GeneratePackageListCommon "$1" arm64 "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_ARM64:=}"
+}
+
+GeneratePackageListARMEL() {
+  GeneratePackageListCommon "$1" armel "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_ARMEL:=}"
+}
+
+GeneratePackageListMips() {
+  GeneratePackageListCommon "$1" mipsel "${DEBIAN_PACKAGES}"
+}
+
+GeneratePackageListMips64el() {
+  GeneratePackageListCommon "$1" mips64el "${DEBIAN_PACKAGES}
+  ${DEBIAN_PACKAGES_MIPS64EL:=}"
+}
+
+StripChecksumsFromPackageList() {
+  local package_file="$1"
+  sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
+}
+
+######################################################################
+#
+######################################################################
+
+HacksAndPatchesCommon() {
+  local arch=$1
+  local os=$2
+  local strip=$3
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/${arch}-${os}/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/${arch}-${os}/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/${arch}-${os}/||g'  ${lscripts}
+  sed -i -e 's|/lib/${arch}-${os}/||g' ${lscripts}
+
+  # Unversion libdbus and libxkbcommon symbols.  This is required because
+  # libdbus-1-3 and libxkbcommon0 switched from unversioned symbols to versioned
+  # ones, and we must still support distros using the unversioned library.  This
+  # hack can be removed once support for Ubuntu Trusty and Debian Jessie are
+  # dropped.
+  ${strip} -R .gnu.version_d -R .gnu.version \
+    "${INSTALL_ROOT}/lib/${arch}-${os}/libdbus-1.so.3"
+  cp "${SCRIPT_DIR}/libdbus-1-3-symbols" \
+    "${INSTALL_ROOT}/debian/libdbus-1-3/DEBIAN/symbols"
+
+  ${strip} -R .gnu.version_d -R .gnu.version \
+    "${INSTALL_ROOT}/usr/lib/${arch}-${os}/libxkbcommon.so.0.0.0"
+  cp "${SCRIPT_DIR}/libxkbcommon0-symbols" \
+    "${INSTALL_ROOT}/debian/libxkbcommon0/DEBIAN/symbols"
+
+  # libxcomposite1 is missing a symbols file.
+  cp "${SCRIPT_DIR}/libxcomposite1-symbols" \
+    "${INSTALL_ROOT}/debian/libxcomposite1/DEBIAN/symbols"
+
+  # Shared objects depending on libdbus-1.so.3 have unsatisfied undefined
+  # versioned symbols. To avoid LLD --no-allow-shlib-undefined errors, rewrite
+  # DT_NEEDED entries from libdbus-1.so.3 to a different string. LLD will
+  # suppress --no-allow-shlib-undefined diagnostics for such shared objects.
+  set +e
+  for f in "${INSTALL_ROOT}/lib/${arch}-${os}"/*.so \
+           "${INSTALL_ROOT}/usr/lib/${arch}-${os}"/*.so; do
+    echo "$f" | grep -q 'libdbus-1.so$' && continue
+    # In a dependent shared object, the only occurrence of "libdbus-1.so.3" is
+    # the string referenced by the DT_NEEDED entry.
+    offset=$(LANG=C grep -abo libdbus-1.so.3 "$f")
+    [ -n "$offset" ] || continue
+    echo -n 'libdbus-1.so.0' | dd of="$f" conv=notrunc bs=1 \
+      seek="$(echo -n "$offset" | cut -d : -f 1)" status=none
+  done
+  set -e
+
+  # Glibc 2.27 introduced some new optimizations to several math functions, but
+  # it will be a while before it makes it into all supported distros.  Luckily,
+  # glibc maintains ABI compatibility with previous versions, so the old symbols
+  # are still there.
+  # TODO(thomasanderson): Remove this once glibc 2.27 is available on all
+  # supported distros.
+  local math_h="${INSTALL_ROOT}/usr/include/math.h"
+  local libm_so="${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6"
+  nm -D --defined-only --with-symbol-versions "${libm_so}" | \
+    "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${math_h}"
+
+  # glob64() was also optimized in glibc 2.27.  Make sure to choose the older
+  # version.
+  local glob_h="${INSTALL_ROOT}/usr/include/glob.h"
+  local libc_so="${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6"
+  nm -D --defined-only --with-symbol-versions "${libc_so}" | \
+    "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${glob_h}"
+
+  # fcntl64() was introduced in glibc 2.28.  Make sure to use fcntl() instead.
+  local fcntl_h="${INSTALL_ROOT}/usr/include/fcntl.h"
+  sed -i '{N; s/#ifndef __USE_FILE_OFFSET64\(\nextern int fcntl\)/#if 1\1/}' \
+      "${fcntl_h}"
+  # On i386, fcntl() was updated in glibc 2.28.
+  nm -D --defined-only --with-symbol-versions "${libc_so}" | \
+    "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${fcntl_h}"
+
+  # __GLIBC_MINOR__ is used as a feature test macro.  Replace it with the
+  # earliest supported version of glibc (2.17, https://crbug.com/376567).
+  local features_h="${INSTALL_ROOT}/usr/include/features.h"
+  sed -i 's|\(#define\s\+__GLIBC_MINOR__\)|\1 17 //|' "${features_h}"
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_LIBDIR internally
+  SubBanner "Move pkgconfig scripts"
+  mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
+  mv ${INSTALL_ROOT}/usr/lib/${arch}-${os}/pkgconfig/* \
+      ${INSTALL_ROOT}/usr/lib/pkgconfig
+}
+
+
+HacksAndPatchesAmd64() {
+  HacksAndPatchesCommon x86_64 linux-gnu strip
+}
+
+
+HacksAndPatchesI386() {
+  HacksAndPatchesCommon i386 linux-gnu strip
+}
+
+
+HacksAndPatchesARM() {
+  HacksAndPatchesCommon arm linux-gnueabihf arm-linux-gnueabihf-strip
+}
+
+HacksAndPatchesARM64() {
+  # Use the unstripped libdbus for arm64 to prevent linker errors.
+  # https://bugs.chromium.org/p/webrtc/issues/detail?id=8535
+  HacksAndPatchesCommon aarch64 linux-gnu true
+}
+
+HacksAndPatchesARMEL() {
+  HacksAndPatchesCommon arm linux-gnueabi arm-linux-gnueabi-strip
+}
+
+HacksAndPatchesMips() {
+  HacksAndPatchesCommon mipsel linux-gnu mipsel-linux-gnu-strip
+}
+
+
+HacksAndPatchesMips64el() {
+  HacksAndPatchesCommon mips64el linux-gnuabi64 mips64el-linux-gnuabi64-strip
+}
+
+
+InstallIntoSysroot() {
+  Banner "Install Libs And Headers Into Jail"
+
+  mkdir -p ${BUILD_DIR}/debian-packages
+  # The /debian directory is an implementation detail that's used to cd into
+  # when running dpkg-shlibdeps.
+  mkdir -p ${INSTALL_ROOT}/debian
+  # An empty control file is necessary to run dpkg-shlibdeps.
+  touch ${INSTALL_ROOT}/debian/control
+  while (( "$#" )); do
+    local file="$1"
+    local package="${BUILD_DIR}/debian-packages/${file##*/}"
+    shift
+    local sha256sum="$1"
+    shift
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from package list"
+      exit 1
+    fi
+
+    Banner "Installing $(basename ${file})"
+    DownloadOrCopy ${file} ${package}
+    if [ ! -s "${package}" ] ; then
+      echo
+      echo "ERROR: bad package ${package}"
+      exit 1
+    fi
+    echo "${sha256sum}  ${package}" | sha256sum --quiet -c
+
+    SubBanner "Extracting to ${INSTALL_ROOT}"
+    dpkg-deb -x ${package} ${INSTALL_ROOT}
+
+    base_package=$(dpkg-deb --field ${package} Package)
+    mkdir -p ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
+    dpkg-deb -e ${package} ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
+  done
+
+  # Prune /usr/share, leaving only pkgconfig.
+  ls -d ${INSTALL_ROOT}/usr/share/* | grep -v "/pkgconfig$" | xargs rm -r
+}
+
+
+CleanupJailSymlinks() {
+  Banner "Jail symlink cleanup"
+
+  SAVEDPWD=$(pwd)
+  cd ${INSTALL_ROOT}
+  local libdirs="lib usr/lib"
+  if [ "${ARCH}" != "MIPS" ]; then
+    libdirs="${libdirs} lib64"
+  fi
+
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # skip links with non-absolute paths
+    echo "${target}" | grep -qs ^/ || continue
+    echo "${link}: ${target}"
+    # Relativize the symlink.
+    prefix=$(echo "${link}" | sed -e 's/[^/]//g' | sed -e 's|/|../|g')
+    ln -snfv "${prefix}${target}" "${link}"
+  done
+
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # Make sure we catch new bad links.
+    if [ ! -r "${link}" ]; then
+      echo "ERROR: FOUND BAD LINK ${link}"
+      ls -l ${link}
+      exit 1
+    fi
+  done
+  cd "$SAVEDPWD"
+}
+
+
+VerifyLibraryDepsCommon() {
+  local arch=$1
+  local os=$2
+  local find_dirs=(
+    "${INSTALL_ROOT}/lib/"
+    "${INSTALL_ROOT}/lib/${arch}-${os}/"
+    "${INSTALL_ROOT}/usr/lib/${arch}-${os}/"
+  )
+  local needed_libs="$(
+    find ${find_dirs[*]} -name "*\.so*" -type f -exec file {} \; | \
+      grep ': ELF' | sed 's/^\(.*\): .*$/\1/' | xargs readelf -d | \
+      grep NEEDED | sort | uniq | sed 's/^.*Shared library: \[\(.*\)\]$/\1/g')"
+  local all_libs="$(find ${find_dirs[*]} -printf '%f\n')"
+  # Ignore missing libdbus-1.so.0
+  all_libs+="$(echo -e '\nlibdbus-1.so.0')"
+  local missing_libs="$(grep -vFxf <(echo "${all_libs}") \
+    <(echo "${needed_libs}"))"
+  if [ ! -z "${missing_libs}" ]; then
+    echo "Missing libraries:"
+    echo "${missing_libs}"
+    exit 1
+  fi
+}
+
+
+VerifyLibraryDepsAmd64() {
+  VerifyLibraryDepsCommon x86_64 linux-gnu
+}
+
+
+VerifyLibraryDepsI386() {
+  VerifyLibraryDepsCommon i386 linux-gnu
+}
+
+
+VerifyLibraryDepsARM() {
+  VerifyLibraryDepsCommon arm linux-gnueabihf
+}
+
+
+VerifyLibraryDepsARM64() {
+  VerifyLibraryDepsCommon aarch64 linux-gnu
+}
+
+VerifyLibraryDepsARMEL() {
+  VerifyLibraryDepsCommon arm linux-gnueabi
+}
+
+VerifyLibraryDepsMips() {
+  VerifyLibraryDepsCommon mipsel linux-gnu
+}
+
+
+VerifyLibraryDepsMips64el() {
+  VerifyLibraryDepsCommon mips64el linux-gnuabi64
+}
+
+
+#@
+#@ BuildSysrootAmd64
+#@
+#@    Build everything and package it
+BuildSysrootAmd64() {
+  if [ "$HAS_ARCH_AMD64" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_AMD64}"
+  GeneratePackageListAmd64 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesAmd64
+  VerifyLibraryDepsAmd64
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootI386
+#@
+#@    Build everything and package it
+BuildSysrootI386() {
+  if [ "$HAS_ARCH_I386" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_I386}"
+  GeneratePackageListI386 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesI386
+  VerifyLibraryDepsI386
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM
+#@
+#@    Build everything and package it
+BuildSysrootARM() {
+  if [ "$HAS_ARCH_ARM" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_ARM}"
+  GeneratePackageListARM "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesARM
+  VerifyLibraryDepsARM
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM64
+#@
+#@    Build everything and package it
+BuildSysrootARM64() {
+  if [ "$HAS_ARCH_ARM64" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_ARM64}"
+  GeneratePackageListARM64 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesARM64
+  VerifyLibraryDepsARM64
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootARMEL
+#@
+#@    Build everything and package it
+BuildSysrootARMEL() {
+  if [ "$HAS_ARCH_ARMEL" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_ARMEL}"
+  GeneratePackageListARMEL "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesARMEL
+  VerifyLibraryDepsARMEL
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips
+#@
+#@    Build everything and package it
+BuildSysrootMips() {
+  if [ "$HAS_ARCH_MIPS" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_MIPS}"
+  GeneratePackageListMips "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesMips
+  VerifyLibraryDepsMips
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips64el
+#@
+#@    Build everything and package it
+BuildSysrootMips64el() {
+  if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
+    return
+  fi
+  ClearInstallDir
+  local package_file="${DEBIAN_DEP_LIST_MIPS64EL}"
+  GeneratePackageListMips64el "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesMips64el
+  VerifyLibraryDepsMips64el
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootAll
+#@
+#@    Build sysroot images for all architectures
+BuildSysrootAll() {
+  RunCommand BuildSysrootAmd64
+  RunCommand BuildSysrootI386
+  RunCommand BuildSysrootARM
+  RunCommand BuildSysrootARM64
+  RunCommand BuildSysrootARMEL
+  RunCommand BuildSysrootMips
+  RunCommand BuildSysrootMips64el
+}
+
+UploadSysroot() {
+  local sha=$(sha1sum "${TARBALL}" | awk '{print $1;}')
+  set -x
+  gsutil.py cp -a public-read "${TARBALL}" \
+      "gs://chrome-linux-sysroot/toolchain/$sha/"
+  set +x
+}
+
+#@
+#@ UploadSysrootAmd64
+#@
+UploadSysrootAmd64() {
+  if [ "$HAS_ARCH_AMD64" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootI386
+#@
+UploadSysrootI386() {
+  if [ "$HAS_ARCH_I386" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM
+#@
+UploadSysrootARM() {
+  if [ "$HAS_ARCH_ARM" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM64
+#@
+UploadSysrootARM64() {
+  if [ "$HAS_ARCH_ARM64" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARMEL
+#@
+UploadSysrootARMEL() {
+  if [ "$HAS_ARCH_ARMEL" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips
+#@
+UploadSysrootMips() {
+  if [ "$HAS_ARCH_MIPS" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips64el
+#@
+UploadSysrootMips64el() {
+  if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
+    return
+  fi
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootAll
+#@
+#@    Upload sysroot image for all architectures
+UploadSysrootAll() {
+  RunCommand UploadSysrootAmd64 "$@"
+  RunCommand UploadSysrootI386 "$@"
+  RunCommand UploadSysrootARM "$@"
+  RunCommand UploadSysrootARM64 "$@"
+  RunCommand UploadSysrootARMEL "$@"
+  RunCommand UploadSysrootMips "$@"
+  RunCommand UploadSysrootMips64el "$@"
+
+}
+
+#
+# CheckForDebianGPGKeyring
+#
+#     Make sure the Debian GPG keys exist. Otherwise print a helpful message.
+#
+CheckForDebianGPGKeyring() {
+  if [ ! -e "$KEYRING_FILE" ]; then
+    echo "KEYRING_FILE not found: ${KEYRING_FILE}"
+    echo "Debian GPG keys missing. Install the debian-archive-keyring package."
+    exit 1
+  fi
+}
+
+#
+# VerifyPackageListing
+#
+#     Verifies the downloaded Packages.xz file has the right checksums.
+#
+VerifyPackageListing() {
+  local file_path="$1"
+  local output_file="$2"
+  local repo="$3"
+  local dist="$4"
+
+  local repo_basedir="${repo}/dists/${dist}"
+  local release_list="${repo_basedir}/${RELEASE_FILE}"
+  local release_list_gpg="${repo_basedir}/${RELEASE_FILE_GPG}"
+
+  local release_file="${BUILD_DIR}/${dist}-${RELEASE_FILE}"
+  local release_file_gpg="${BUILD_DIR}/${dist}-${RELEASE_FILE_GPG}"
+
+  CheckForDebianGPGKeyring
+
+  DownloadOrCopyNonUniqueFilename ${release_list} ${release_file}
+  DownloadOrCopyNonUniqueFilename ${release_list_gpg} ${release_file_gpg}
+  echo "Verifying: ${release_file} with ${release_file_gpg}"
+  set -x
+  gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}"
+  set +x
+
+  echo "Verifying: ${output_file}"
+  local sha256sum=$(grep -E "${file_path}\$|:\$" "${release_file}" | \
+    grep "SHA256:" -A 1 | xargs echo | awk '{print $2;}')
+
+  if [ "${#sha256sum}" -ne "64" ]; then
+    echo "Bad sha256sum from ${release_list}"
+    exit 1
+  fi
+
+  echo "${sha256sum}  ${output_file}" | sha256sum --quiet -c
+}
+
+#
+# GeneratePackageList
+#
+#     Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
+#     to output file.
+#
+GeneratePackageList() {
+  local input_file="$1"
+  local output_file="$2"
+  echo "Updating: ${output_file} from ${input_file}"
+  /bin/rm -f "${output_file}"
+  shift
+  shift
+  for pkg in $@ ; do
+    local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
+      egrep "pool/.*" | sed 's/.*Filename: //')
+    if [ -z "${pkg_full}" ]; then
+        echo "ERROR: missing package: $pkg"
+        exit 1
+    fi
+    local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
+      grep ^SHA256: | sed 's/^SHA256: //')
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from Packages"
+      exit 1
+    fi
+    echo $pkg_full $sha256sum >> "$output_file"
+  done
+  # sort -o does an in-place sort of this file
+  sort "$output_file" -o "$output_file"
+}
+
+#@
+#@ PrintArchitectures
+#@
+#@    Prints supported architectures.
+PrintArchitectures() {
+  if [ "$HAS_ARCH_AMD64" = "1" ]; then
+    echo Amd64
+  fi
+  if [ "$HAS_ARCH_I386" = "1" ]; then
+    echo I386
+  fi
+  if [ "$HAS_ARCH_ARM" = "1" ]; then
+    echo ARM
+  fi
+  if [ "$HAS_ARCH_ARM64" = "1" ]; then
+    echo ARM64
+  fi
+  if [ "$HAS_ARCH_ARMEL" = "1" ]; then
+    echo ARMEL
+  fi
+  if [ "$HAS_ARCH_MIPS" = "1" ]; then
+    echo Mips
+  fi
+  if [ "$HAS_ARCH_MIPS64EL" = "1" ]; then
+    echo Mips64el
+  fi
+}
+
+#@
+#@ PrintDistro
+#@
+#@    Prints distro.  eg: ubuntu
+PrintDistro() {
+  echo ${DISTRO}
+}
+
+#@
+#@ DumpRelease
+#@
+#@    Prints disto release.  eg: jessie
+PrintRelease() {
+  echo ${DIST}
+}
+
+RunCommand() {
+  SetEnvironmentVariables "$1"
+  SanityCheck
+  "$@"
+}
+
+if [ $# -eq 0 ] ; then
+  echo "ERROR: you must specify a mode on the commandline"
+  echo
+  Usage
+  exit 1
+elif [ "$(type -t $1)" != "function" ]; then
+  echo "ERROR: unknown function '$1'." >&2
+  echo "For help, try:"
+  echo "    $0 help"
+  exit 1
+else
+  ChangeDirectory
+  if echo $1 | grep -qs --regexp='\(^Print\)\|\(All$\)'; then
+    "$@"
+  else
+    RunCommand "$@"
+  fi
+fi
diff --git a/src/build/linux/sysroot_scripts/sysroots.json b/src/build/linux/sysroot_scripts/sysroots.json
new file mode 100644
index 0000000..6248db7
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/sysroots.json
@@ -0,0 +1,37 @@
+{
+    "sid_amd64": {
+        "Sha1Sum": "43a87bbebccad99325fdcf34166295b121ee15c7",
+        "SysrootDir": "debian_sid_amd64-sysroot",
+        "Tarball": "debian_sid_amd64_sysroot.tar.xz"
+    },
+    "sid_arm": {
+        "Sha1Sum": "11d6f690ca49e8ba01a1d8c5346cedad2cf308fd",
+        "SysrootDir": "debian_sid_arm-sysroot",
+        "Tarball": "debian_sid_arm_sysroot.tar.xz"
+    },
+    "sid_arm64": {
+        "Sha1Sum": "2befe8ce3e88be6080e4fb7e6d412278ea6a7625",
+        "SysrootDir": "debian_sid_arm64-sysroot",
+        "Tarball": "debian_sid_arm64_sysroot.tar.xz"
+    },
+    "sid_armel": {
+        "Sha1Sum": "a0e2a51aaa7d779fc45415ac30c835b67caa6663",
+        "SysrootDir": "debian_sid_armel-sysroot",
+        "Tarball": "debian_sid_armel_sysroot.tar.xz"
+    },
+    "sid_i386": {
+        "Sha1Sum": "d53a049af5961f2f121ee4e149918097c193f8ed",
+        "SysrootDir": "debian_sid_i386-sysroot",
+        "Tarball": "debian_sid_i386_sysroot.tar.xz"
+    },
+    "sid_mips": {
+        "Sha1Sum": "eb577cef43088b7e0540950c74f994267631d4cd",
+        "SysrootDir": "debian_sid_mips-sysroot",
+        "Tarball": "debian_sid_mips_sysroot.tar.xz"
+    },
+    "sid_mips64el": {
+        "Sha1Sum": "6cb76f27035d1460fe164f7e6c5318c047aac153",
+        "SysrootDir": "debian_sid_mips64el-sysroot",
+        "Tarball": "debian_sid_mips64el_sysroot.tar.xz"
+    }
+}
diff --git a/src/build/linux/sysroot_scripts/update-archive-timestamp.sh b/src/build/linux/sysroot_scripts/update-archive-timestamp.sh
new file mode 100755
index 0000000..a61dd99
--- /dev/null
+++ b/src/build/linux/sysroot_scripts/update-archive-timestamp.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script updates sysroot-creator-*.sh with the timestamp of the latest
+# snapshot from snapshot.debian.org.
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ARCHIVE_URL="http://snapshot.debian.org/archive/debian"
+
+# Use 9999-01-01 as the date so that we get a redirect to the page with the
+# latest timestamp.
+TIMESTAMP=$(curl -s "${ARCHIVE_URL}/99990101T000000Z/pool/" | \
+  sed -n "s|.*${ARCHIVE_URL}/\([[:digit:]TZ]\+\)/pool/.*|\1|p" | head -n 1)
+
+sed -i "s/ARCHIVE_TIMESTAMP=.*$/ARCHIVE_TIMESTAMP=${TIMESTAMP}/" \
+  "${SCRIPT_DIR}"/sysroot-creator-*.sh
diff --git a/src/build/linux/unbundle/README b/src/build/linux/unbundle/README
new file mode 100644
index 0000000..b6b6321
--- /dev/null
+++ b/src/build/linux/unbundle/README
@@ -0,0 +1,57 @@
+This directory contains files that make it possible for Linux
+distributions to build Chromium using system libraries and exclude the
+source code for Chromium's bundled copies of system libraries in a
+consistent manner. Nothing here is used in normal developer builds.
+
+
+For more info on the Linux distros' philosophy on bundling system
+libraries and why this exists, please read the following:
+
+ - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
+ - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
+ - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
+
+For more Chromium-specific context please read
+http://spot.livejournal.com/312320.html .
+
+Additional resources which might provide even more context:
+
+  - http://events.linuxfoundation.org/sites/events/files/slides/LinuxCon%202014%20Slides_0.pdf
+  - https://lwn.net/Articles/619158/
+
+
+This directory is provided in the source tree so one can follow the
+above guidelines without having to download additional tools and worry
+about having the right version of the tool. It is a compromise solution
+which takes into account Chromium developers who want to avoid the
+perceived burden of more conditional code in build files, and
+expectations of Open Source community, where using system-provided
+libraries is the norm.
+
+Usage:
+
+1. remove_bundled_libraries.py <preserved-directories>
+
+   For example: remove_bundled_libraries.py third_party/zlib
+
+   The script scans sources looking for third_party directories.
+   Everything that is not explicitly preserved is removed (except for
+   GYP/GN build files), and the script fails if any directory passed on
+   command line does not exist (to ensure list is kept up to date).
+
+   This is intended to be used on source code extracted from a tarball,
+   not on a git repository.
+
+   NOTE: by default this will not remove anything (for safety). Pass
+   the --do-remove flag to actually remove files.
+
+2. replace_gn_files.py --system-libraries lib...
+
+   This swaps out a normal library GN build file that is intended for
+   use with a bundled library for a build file that is set up to use
+   the system library. While some build files have use_system_libfoo
+   build flags, using unbundled build files has the advantage that Linux
+   distros can build Chromium without having to specify many additional
+   build flags.
+
+   For example: replace_gn_files.py --system-libraries libxml
diff --git a/src/build/linux/unbundle/ffmpeg.gn b/src/build/linux/unbundle/ffmpeg.gn
new file mode 100644
index 0000000..978298a
--- /dev/null
+++ b/src/build/linux/unbundle/ffmpeg.gn
@@ -0,0 +1,37 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/buildflag_header.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_ffmpeg") {
+  packages = [
+    "libavcodec",
+    "libavformat",
+    "libavutil",
+  ]
+}
+
+buildflag_header("ffmpeg_features") {
+  header = "ffmpeg_features.h"
+  flags = [ "USE_SYSTEM_FFMPEG=true" ]
+}
+
+shim_headers("ffmpeg_shim") {
+  root_path = "."
+  headers = [
+    "libavcodec/avcodec.h",
+    "libavformat/avformat.h",
+    "libavutil/imgutils.h",
+  ]
+}
+
+source_set("ffmpeg") {
+  deps = [
+    ":ffmpeg_features",
+    ":ffmpeg_shim",
+  ]
+  public_configs = [ ":system_ffmpeg" ]
+}
diff --git a/src/build/linux/unbundle/flac.gn b/src/build/linux/unbundle/flac.gn
new file mode 100644
index 0000000..ced81d6
--- /dev/null
+++ b/src/build/linux/unbundle/flac.gn
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_flac") {
+  packages = [ "flac" ]
+}
+
+shim_headers("flac_shim") {
+  root_path = "include"
+  headers = [
+    "FLAC/all.h",
+    "FLAC/assert.h",
+    "FLAC/callback.h",
+    "FLAC/export.h",
+    "FLAC/format.h",
+    "FLAC/metadata.h",
+    "FLAC/ordinals.h",
+    "FLAC/stream_decoder.h",
+    "FLAC/stream_encoder.h",
+  ]
+}
+
+source_set("flac") {
+  deps = [ ":flac_shim" ]
+  public_configs = [ ":system_flac" ]
+}
diff --git a/src/build/linux/unbundle/fontconfig.gn b/src/build/linux/unbundle/fontconfig.gn
new file mode 100644
index 0000000..49236c9
--- /dev/null
+++ b/src/build/linux/unbundle/fontconfig.gn
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_linux || is_chromeos)
+
+config("fontconfig_config") {
+  libs = [ "fontconfig" ]
+}
+
+group("fontconfig") {
+  public_configs = [ ":fontconfig_config" ]
+}
diff --git a/src/build/linux/unbundle/freetype.gn b/src/build/linux/unbundle/freetype.gn
new file mode 100644
index 0000000..cafa9db
--- /dev/null
+++ b/src/build/linux/unbundle/freetype.gn
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Blink needs a recent and properly build-configured FreeType version to
+  # support OpenType variations, color emoji and avoid security bugs. By default
+  # we ship and link such a version as part of Chrome. For distributions that
+  # prefer to keep linking to the version the system, FreeType must be newer
+  # than version 2.7.1 and have color bitmap support compiled in. WARNING:
+  # System FreeType configurations other than as described WILL INTRODUCE TEXT
+  # RENDERING AND SECURITY REGRESSIONS.
+  use_system_freetype = true
+}
diff --git a/src/build/linux/unbundle/harfbuzz-ng.gn b/src/build/linux/unbundle/harfbuzz-ng.gn
new file mode 100644
index 0000000..b4ba17a
--- /dev/null
+++ b/src/build/linux/unbundle/harfbuzz-ng.gn
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Blink uses a cutting-edge version of Harfbuzz (version listed in
+  # third_party/harfbuzz-ng/README.chromium); most Linux distros do not contain
+  # a new enough version of the code to work correctly. However, ChromeOS
+  # chroots (i.e. real ChromeOS builds for devices) do contain a new enough
+  # version of the library, and so this variable exists so that ChromeOS can
+  # build against the system lib and keep binary sizes smaller.
+  use_system_harfbuzz = true
+}
diff --git a/src/build/linux/unbundle/icu.gn b/src/build/linux/unbundle/icu.gn
new file mode 100644
index 0000000..6f3f843
--- /dev/null
+++ b/src/build/linux/unbundle/icu.gn
@@ -0,0 +1,254 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+group("icu") {
+  public_deps = [
+    ":icui18n",
+    ":icuuc",
+  ]
+}
+
+config("icu_config") {
+  defines = [
+    "USING_SYSTEM_ICU=1",
+    "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
+
+    # U_EXPORT (defined in unicode/platform.h) is used to set public visibility
+    # on classes through the U_COMMON_API and U_I18N_API macros (among others).
+    # When linking against the system ICU library, we want its symbols to have
+    # public LTO visibility. This disables CFI checks for the ICU classes and
+    # allows whole-program optimization to be applied to the rest of Chromium.
+    #
+    # Both U_COMMON_API and U_I18N_API macros would be defined to U_EXPORT only
+    # when U_COMBINED_IMPLEMENTATION is defined (see unicode/utypes.h). Because
+    # we override the default system UCHAR_TYPE (char16_t), it is not possible
+    # to use U_COMBINED_IMPLEMENTATION at this moment, meaning the U_COMMON_API
+    # and U_I18N_API macros are set to U_IMPORT which is an empty definition.
+    #
+    # Until building with UCHAR_TYPE=char16_t is supported, one way to apply
+    # public visibility (and thus public LTO visibility) to all ICU classes is
+    # to define U_IMPORT to have the same value as U_EXPORT. For more details,
+    # please see: https://crbug.com/822820
+    "U_IMPORT=U_EXPORT",
+  ]
+}
+
+pkg_config("system_icui18n") {
+  packages = [ "icu-i18n" ]
+}
+
+pkg_config("system_icuuc") {
+  packages = [ "icu-uc" ]
+}
+
+source_set("icui18n") {
+  deps = [ ":icui18n_shim" ]
+  public_configs = [
+    ":icu_config",
+    ":system_icui18n",
+  ]
+}
+
+source_set("icuuc") {
+  deps = [ ":icuuc_shim" ]
+  public_configs = [
+    ":icu_config",
+    ":system_icuuc",
+  ]
+}
+
+shim_headers("icui18n_shim") {
+  root_path = "source/i18n"
+  headers = [
+    # This list can easily be updated using the commands below:
+    # cd third_party/icu/source/i18n
+    # find unicode -iname '*.h' -printf '    "%p",\n' | LC_ALL=C sort -u
+    "unicode/alphaindex.h",
+    "unicode/basictz.h",
+    "unicode/calendar.h",
+    "unicode/choicfmt.h",
+    "unicode/coleitr.h",
+    "unicode/coll.h",
+    "unicode/compactdecimalformat.h",
+    "unicode/curramt.h",
+    "unicode/currpinf.h",
+    "unicode/currunit.h",
+    "unicode/datefmt.h",
+    "unicode/dcfmtsym.h",
+    "unicode/decimfmt.h",
+    "unicode/dtfmtsym.h",
+    "unicode/dtitvfmt.h",
+    "unicode/dtitvinf.h",
+    "unicode/dtptngen.h",
+    "unicode/dtrule.h",
+    "unicode/fieldpos.h",
+    "unicode/fmtable.h",
+    "unicode/format.h",
+    "unicode/fpositer.h",
+    "unicode/gender.h",
+    "unicode/gregocal.h",
+    "unicode/listformatter.h",
+    "unicode/measfmt.h",
+    "unicode/measunit.h",
+    "unicode/measure.h",
+    "unicode/msgfmt.h",
+    "unicode/numfmt.h",
+    "unicode/numsys.h",
+    "unicode/plurfmt.h",
+    "unicode/plurrule.h",
+    "unicode/rbnf.h",
+    "unicode/rbtz.h",
+    "unicode/regex.h",
+    "unicode/region.h",
+    "unicode/reldatefmt.h",
+    "unicode/scientificnumberformatter.h",
+    "unicode/search.h",
+    "unicode/selfmt.h",
+    "unicode/simpletz.h",
+    "unicode/smpdtfmt.h",
+    "unicode/sortkey.h",
+    "unicode/stsearch.h",
+    "unicode/tblcoll.h",
+    "unicode/timezone.h",
+    "unicode/tmunit.h",
+    "unicode/tmutamt.h",
+    "unicode/tmutfmt.h",
+    "unicode/translit.h",
+    "unicode/tzfmt.h",
+    "unicode/tznames.h",
+    "unicode/tzrule.h",
+    "unicode/tztrans.h",
+    "unicode/ucal.h",
+    "unicode/ucol.h",
+    "unicode/ucoleitr.h",
+    "unicode/ucsdet.h",
+    "unicode/udat.h",
+    "unicode/udateintervalformat.h",
+    "unicode/udatpg.h",
+    "unicode/ufieldpositer.h",
+    "unicode/uformattable.h",
+    "unicode/ugender.h",
+    "unicode/ulocdata.h",
+    "unicode/umsg.h",
+    "unicode/unirepl.h",
+    "unicode/unum.h",
+    "unicode/unumsys.h",
+    "unicode/upluralrules.h",
+    "unicode/uregex.h",
+    "unicode/uregion.h",
+    "unicode/ureldatefmt.h",
+    "unicode/usearch.h",
+    "unicode/uspoof.h",
+    "unicode/utmscale.h",
+    "unicode/utrans.h",
+    "unicode/vtzone.h",
+  ]
+}
+
+shim_headers("icuuc_shim") {
+  root_path = "source/common"
+  headers = [
+    # This list can easily be updated using the commands below:
+    # cd third_party/icu/source/common
+    # find unicode -iname '*.h' -printf '    "%p",\n' | LC_ALL=C sort -u
+    "unicode/appendable.h",
+    "unicode/brkiter.h",
+    "unicode/bytestream.h",
+    "unicode/bytestrie.h",
+    "unicode/bytestriebuilder.h",
+    "unicode/caniter.h",
+    "unicode/casemap.h",
+    "unicode/char16ptr.h",
+    "unicode/chariter.h",
+    "unicode/dbbi.h",
+    "unicode/docmain.h",
+    "unicode/dtintrv.h",
+    "unicode/edits.h",
+    "unicode/enumset.h",
+    "unicode/errorcode.h",
+    "unicode/filteredbrk.h",
+    "unicode/icudataver.h",
+    "unicode/icuplug.h",
+    "unicode/idna.h",
+    "unicode/localematcher.h",
+    "unicode/localpointer.h",
+    "unicode/locdspnm.h",
+    "unicode/locid.h",
+    "unicode/messagepattern.h",
+    "unicode/normalizer2.h",
+    "unicode/normlzr.h",
+    "unicode/parseerr.h",
+    "unicode/parsepos.h",
+    "unicode/platform.h",
+    "unicode/ptypes.h",
+    "unicode/putil.h",
+    "unicode/rbbi.h",
+    "unicode/rep.h",
+    "unicode/resbund.h",
+    "unicode/schriter.h",
+    "unicode/simpleformatter.h",
+    "unicode/std_string.h",
+    "unicode/strenum.h",
+    "unicode/stringpiece.h",
+    "unicode/stringtriebuilder.h",
+    "unicode/symtable.h",
+    "unicode/ubidi.h",
+    "unicode/ubiditransform.h",
+    "unicode/ubrk.h",
+    "unicode/ucasemap.h",
+    "unicode/ucat.h",
+    "unicode/uchar.h",
+    "unicode/ucharstrie.h",
+    "unicode/ucharstriebuilder.h",
+    "unicode/uchriter.h",
+    "unicode/uclean.h",
+    "unicode/ucnv.h",
+    "unicode/ucnv_cb.h",
+    "unicode/ucnv_err.h",
+    "unicode/ucnvsel.h",
+    "unicode/uconfig.h",
+    "unicode/ucurr.h",
+    "unicode/udata.h",
+    "unicode/udisplaycontext.h",
+    "unicode/uenum.h",
+    "unicode/uidna.h",
+    "unicode/uiter.h",
+    "unicode/uldnames.h",
+    "unicode/ulistformatter.h",
+    "unicode/uloc.h",
+    "unicode/umachine.h",
+    "unicode/umisc.h",
+    "unicode/unifilt.h",
+    "unicode/unifunct.h",
+    "unicode/unimatch.h",
+    "unicode/uniset.h",
+    "unicode/unistr.h",
+    "unicode/unorm.h",
+    "unicode/unorm2.h",
+    "unicode/uobject.h",
+    "unicode/urename.h",
+    "unicode/urep.h",
+    "unicode/ures.h",
+    "unicode/uscript.h",
+    "unicode/uset.h",
+    "unicode/usetiter.h",
+    "unicode/ushape.h",
+    "unicode/usprep.h",
+    "unicode/ustring.h",
+    "unicode/ustringtrie.h",
+    "unicode/utext.h",
+    "unicode/utf.h",
+    "unicode/utf16.h",
+    "unicode/utf32.h",
+    "unicode/utf8.h",
+    "unicode/utf_old.h",
+    "unicode/utrace.h",
+    "unicode/utypes.h",
+    "unicode/uvernum.h",
+    "unicode/uversion.h",
+  ]
+}
diff --git a/src/build/linux/unbundle/libdrm.gn b/src/build/linux/unbundle/libdrm.gn
new file mode 100644
index 0000000..30cdcef
--- /dev/null
+++ b/src/build/linux/unbundle/libdrm.gn
@@ -0,0 +1,20 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libdrm") {
+  packages = [ "libdrm" ]
+}
+
+shim_headers("libdrm_shim") {
+  root_path = "src/include"
+  headers = [ "drm.h" ]
+}
+
+source_set("libdrm") {
+  deps = [ ":libdrm_shim" ]
+  public_configs = [ ":system_libdrm" ]
+}
diff --git a/src/build/linux/unbundle/libevent.gn b/src/build/linux/unbundle/libevent.gn
new file mode 100644
index 0000000..7e1c34d
--- /dev/null
+++ b/src/build/linux/unbundle/libevent.gn
@@ -0,0 +1,15 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("libevent_shim") {
+  root_path = "."
+  headers = [ "event.h" ]
+}
+
+source_set("libevent") {
+  deps = [ ":libevent_shim" ]
+  libs = [ "event" ]
+}
diff --git a/src/build/linux/unbundle/libjpeg.gn b/src/build/linux/unbundle/libjpeg.gn
new file mode 100644
index 0000000..17398ea
--- /dev/null
+++ b/src/build/linux/unbundle/libjpeg.gn
@@ -0,0 +1,12 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Uses system libjpeg. If true, overrides use_libjpeg_turbo.
+  use_system_libjpeg = true
+
+  # Uses libjpeg_turbo as the jpeg implementation. Has no effect if
+  # use_system_libjpeg is set.
+  use_libjpeg_turbo = true
+}
diff --git a/src/build/linux/unbundle/libpng.gn b/src/build/linux/unbundle/libpng.gn
new file mode 100644
index 0000000..1d6590d
--- /dev/null
+++ b/src/build/linux/unbundle/libpng.gn
@@ -0,0 +1,23 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("libpng_config") {
+  packages = [ "libpng" ]
+}
+
+shim_headers("libpng_shim") {
+  root_path = "."
+  headers = [
+    "png.h",
+    "pngconf.h",
+  ]
+}
+
+source_set("libpng") {
+  deps = [ ":libpng_shim" ]
+  public_configs = [ ":libpng_config" ]
+}
diff --git a/src/build/linux/unbundle/libvpx.gn b/src/build/linux/unbundle/libvpx.gn
new file mode 100644
index 0000000..87be30d
--- /dev/null
+++ b/src/build/linux/unbundle/libvpx.gn
@@ -0,0 +1,32 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libvpx") {
+  packages = [ "vpx" ]
+}
+
+shim_headers("libvpx_shim") {
+  root_path = "source/libvpx"
+  headers = [
+    "vpx/vp8.h",
+    "vpx/vp8cx.h",
+    "vpx/vp8dx.h",
+    "vpx/vpx_codec.h",
+    "vpx/vpx_codec_impl_bottom.h",
+    "vpx/vpx_codec_impl_top.h",
+    "vpx/vpx_decoder.h",
+    "vpx/vpx_encoder.h",
+    "vpx/vpx_frame_buffer.h",
+    "vpx/vpx_image.h",
+    "vpx/vpx_integer.h",
+  ]
+}
+
+source_set("libvpx") {
+  deps = [ ":libvpx_shim" ]
+  public_configs = [ ":system_libvpx" ]
+}
diff --git a/src/build/linux/unbundle/libwebp.gn b/src/build/linux/unbundle/libwebp.gn
new file mode 100644
index 0000000..de0c230
--- /dev/null
+++ b/src/build/linux/unbundle/libwebp.gn
@@ -0,0 +1,35 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libwebp") {
+  packages = [
+    "libwebp",
+    "libwebpdemux",
+    "libwebpmux",
+  ]
+}
+
+shim_headers("libwebp_shim") {
+  root_path = "src"
+  headers = [
+    "webp/decode.h",
+    "webp/demux.h",
+    "webp/encode.h",
+    "webp/mux.h",
+    "webp/mux_types.h",
+    "webp/types.h",
+  ]
+}
+
+source_set("libwebp_webp") {
+  deps = [ ":libwebp_shim" ]
+  public_configs = [ ":system_libwebp" ]
+}
+
+group("libwebp") {
+  deps = [ ":libwebp_webp" ]
+}
diff --git a/src/build/linux/unbundle/libxml.gn b/src/build/linux/unbundle/libxml.gn
new file mode 100644
index 0000000..3587881
--- /dev/null
+++ b/src/build/linux/unbundle/libxml.gn
@@ -0,0 +1,55 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("system_libxml") {
+  packages = [ "libxml-2.0" ]
+}
+
+source_set("libxml") {
+  public_configs = [ ":system_libxml" ]
+}
+
+static_library("libxml_utils") {
+  # Do not expand this visibility list without first consulting with the
+  # Security Team.
+  visibility = [
+    ":xml_reader",
+    ":xml_writer",
+    "//base/test:test_support",
+    "//services/data_decoder:xml_parser_fuzzer",
+  ]
+  sources = [
+    "chromium/libxml_utils.cc",
+    "chromium/libxml_utils.h",
+  ]
+  public_configs = [ ":system_libxml" ]
+}
+
+static_library("xml_reader") {
+  # Do not expand this visibility list without first consulting with the
+  # Security Team.
+  visibility = [
+    "//base/test:test_support",
+    "//components/policy/core/common:unit_tests",
+    "//services/data_decoder:*",
+    "//tools/traffic_annotation/auditor:auditor_sources",
+  ]
+  sources = [
+    "chromium/xml_reader.cc",
+    "chromium/xml_reader.h",
+  ]
+  deps = [ ":libxml_utils" ]
+}
+
+static_library("xml_writer") {
+  # The XmlWriter is considered safe to use from any target.
+  visibility = [ "*" ]
+  sources = [
+    "chromium/xml_writer.cc",
+    "chromium/xml_writer.h",
+  ]
+  deps = [ ":libxml_utils" ]
+}
diff --git a/src/build/linux/unbundle/libxslt.gn b/src/build/linux/unbundle/libxslt.gn
new file mode 100644
index 0000000..885574e
--- /dev/null
+++ b/src/build/linux/unbundle/libxslt.gn
@@ -0,0 +1,13 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("system_libxslt") {
+  packages = [ "libxslt" ]
+}
+
+source_set("libxslt") {
+  public_configs = [ ":system_libxslt" ]
+}
diff --git a/src/build/linux/unbundle/openh264.gn b/src/build/linux/unbundle/openh264.gn
new file mode 100644
index 0000000..882e0a8
--- /dev/null
+++ b/src/build/linux/unbundle/openh264.gn
@@ -0,0 +1,36 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("config") {
+  packages = [ "openh264" ]
+}
+
+shim_headers("openh264_shim") {
+  prefix = "wels/"
+  root_path = "src/codec/api/svc"
+  headers = [
+    "codec_api.h",
+    "codec_app_def.h",
+    "codec_def.h",
+    "codec_ver.h",
+  ]
+}
+
+source_set("common") {
+  deps = [ ":openh264_shim" ]
+  public_configs = [ ":config" ]
+}
+
+source_set("processing") {
+  deps = [ ":openh264_shim" ]
+  public_configs = [ ":config" ]
+}
+
+source_set("encoder") {
+  deps = [ ":openh264_shim" ]
+  public_configs = [ ":config" ]
+}
diff --git a/src/build/linux/unbundle/opus.gn b/src/build/linux/unbundle/opus.gn
new file mode 100644
index 0000000..504d7d2
--- /dev/null
+++ b/src/build/linux/unbundle/opus.gn
@@ -0,0 +1,43 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("opus_config") {
+  packages = [ "opus" ]
+}
+
+shim_headers("opus_shim") {
+  root_path = "src/include"
+  headers = [
+    "opus.h",
+    "opus_defines.h",
+    "opus_multistream.h",
+    "opus_types.h",
+  ]
+}
+
+source_set("opus") {
+  deps = [ ":opus_shim" ]
+  public_configs = [ ":opus_config" ]
+}
+
+source_set("opus_compare") {
+}
+
+source_set("opus_demo") {
+}
+
+source_set("test_opus_api") {
+}
+
+source_set("test_opus_decode") {
+}
+
+source_set("test_opus_encode") {
+}
+
+source_set("test_opus_padding") {
+}
diff --git a/src/build/linux/unbundle/re2.gn b/src/build/linux/unbundle/re2.gn
new file mode 100644
index 0000000..824d778
--- /dev/null
+++ b/src/build/linux/unbundle/re2.gn
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/shim_headers.gni")
+
+assert(!use_custom_libcxx,
+       "Usage of the system libre2.so is not supported with " +
+           "use_custom_libcxx=true because the library's interface relies on " +
+           "libstdc++'s std::string and std::vector.")
+
+shim_headers("re2_shim") {
+  root_path = "src"
+  headers = [
+    "re2/filtered_re2.h",
+    "re2/re2.h",
+    "re2/set.h",
+    "re2/stringpiece.h",
+    "re2/variadic_function.h",
+  ]
+}
+
+source_set("re2") {
+  deps = [ ":re2_shim" ]
+  libs = [ "re2" ]
+}
diff --git a/src/build/linux/unbundle/remove_bundled_libraries.py b/src/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 0000000..899877a
--- /dev/null
+++ b/src/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+from __future__ import print_function
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+  my_dirname = os.path.abspath(os.path.dirname(__file__))
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+    print('Sanity check failed: please run this script from '
+          'build/linux/unbundle directory.')
+    return 1
+
+  parser = optparse.OptionParser()
+  parser.add_option('--do-remove', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  exclusion_used = {}
+  for exclusion in args:
+    exclusion_used[exclusion] = False
+
+  for root, dirs, files in os.walk(source_tree_root, topdown=False):
+    # Only look at paths which contain a "third_party" component
+    # (note that e.g. third_party.png doesn't count).
+    root_relpath = os.path.relpath(root, source_tree_root)
+    if 'third_party' not in root_relpath.split(os.sep):
+      continue
+
+    for f in files:
+      path = os.path.join(root, f)
+      relpath = os.path.relpath(path, source_tree_root)
+
+      excluded = False
+      for exclusion in args:
+        # Require precise exclusions. Find the right-most third_party
+        # in the relative path, and if there is more than one ignore
+        # the exclusion if it's completely contained within the part
+        # before right-most third_party path component.
+        split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
+        if len(split) > 1 and split[0].startswith(exclusion):
+          continue
+
+        if relpath.startswith(exclusion):
+          # Multiple exclusions can match the same path. Go through all of them
+          # and mark each one as used.
+          exclusion_used[exclusion] = True
+          excluded = True
+      if excluded:
+        continue
+
+      # Deleting gyp files almost always leads to gyp failures.
+      # These files come from Chromium project, and can be replaced if needed.
+      if f.endswith('.gyp') or f.endswith('.gypi'):
+        continue
+
+      # Same about GN files.
+      if f.endswith('.gn') or f.endswith('.gni'):
+        continue
+
+      # Deleting .isolate files leads to gyp failures. They are usually
+      # not used by a distro build anyway.
+      # See http://www.chromium.org/developers/testing/isolated-testing
+      # for more info.
+      if f.endswith('.isolate'):
+        continue
+
+      if options.do_remove:
+        # Delete the file - best way to ensure it's not used during build.
+        os.remove(path)
+      else:
+        # By default just print paths that would be removed.
+        print(path)
+
+  exit_code = 0
+
+  # Fail if exclusion list contains stale entries - this helps keep it
+  # up to date.
+  for exclusion, used in exclusion_used.items():
+    if not used:
+      print('%s does not exist' % exclusion)
+      exit_code = 1
+
+  if not options.do_remove:
+    print('To actually remove files printed above, please pass '
+          '--do-remove flag.')
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv[1:]))
diff --git a/src/build/linux/unbundle/replace_gn_files.py b/src/build/linux/unbundle/replace_gn_files.py
new file mode 100755
index 0000000..eba4bd1
--- /dev/null
+++ b/src/build/linux/unbundle/replace_gn_files.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Replaces GN files in tree with files from here that
+make the build use system libraries.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import shutil
+import sys
+
+
+REPLACEMENTS = {
+  'ffmpeg': 'third_party/ffmpeg/BUILD.gn',
+  'flac': 'third_party/flac/BUILD.gn',
+  'fontconfig': 'third_party/fontconfig/BUILD.gn',
+  'freetype': 'build/config/freetype/freetype.gni',
+  'harfbuzz-ng': 'third_party/harfbuzz-ng/harfbuzz.gni',
+  'icu': 'third_party/icu/BUILD.gn',
+  'libdrm': 'third_party/libdrm/BUILD.gn',
+  'libevent': 'base/third_party/libevent/BUILD.gn',
+  'libjpeg': 'third_party/libjpeg.gni',
+  'libpng': 'third_party/libpng/BUILD.gn',
+  'libvpx': 'third_party/libvpx/BUILD.gn',
+  'libwebp': 'third_party/libwebp/BUILD.gn',
+  'libxml': 'third_party/libxml/BUILD.gn',
+  'libxslt': 'third_party/libxslt/BUILD.gn',
+  'openh264': 'third_party/openh264/BUILD.gn',
+  'opus': 'third_party/opus/BUILD.gn',
+  're2': 'third_party/re2/BUILD.gn',
+  'snappy': 'third_party/snappy/BUILD.gn',
+  'zlib': 'third_party/zlib/BUILD.gn',
+}
+
+
+def DoMain(argv):
+  my_dirname = os.path.dirname(__file__)
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--system-libraries', nargs='*', default=[])
+  parser.add_argument('--undo', action='store_true')
+
+  args = parser.parse_args(argv)
+
+  handled_libraries = set()
+  for lib, path in REPLACEMENTS.items():
+    if lib not in args.system_libraries:
+      continue
+    handled_libraries.add(lib)
+
+    if args.undo:
+      # Restore original file, and also remove the backup.
+      # This is meant to restore the source tree to its original state.
+      os.rename(os.path.join(source_tree_root, path + '.orig'),
+                os.path.join(source_tree_root, path))
+    else:
+      # Create a backup copy for --undo.
+      shutil.copyfile(os.path.join(source_tree_root, path),
+                      os.path.join(source_tree_root, path + '.orig'))
+
+      # Copy the GN file from directory of this script to target path.
+      shutil.copyfile(os.path.join(my_dirname, '%s.gn' % lib),
+                      os.path.join(source_tree_root, path))
+
+  unhandled_libraries = set(args.system_libraries) - handled_libraries
+  if unhandled_libraries:
+    print('Unrecognized system libraries requested: %s' % ', '.join(
+        sorted(unhandled_libraries)), file=sys.stderr)
+    return 1
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv[1:]))
diff --git a/src/build/linux/unbundle/snappy.gn b/src/build/linux/unbundle/snappy.gn
new file mode 100644
index 0000000..966666f
--- /dev/null
+++ b/src/build/linux/unbundle/snappy.gn
@@ -0,0 +1,20 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("snappy_shim") {
+  root_path = "src"
+  headers = [
+    "snappy-c.h",
+    "snappy-sinksource.h",
+    "snappy-stubs-public.h",
+    "snappy.h",
+  ]
+}
+
+source_set("snappy") {
+  deps = [ ":snappy_shim" ]
+  libs = [ "snappy" ]
+}
diff --git a/src/build/linux/unbundle/zlib.gn b/src/build/linux/unbundle/zlib.gn
new file mode 100644
index 0000000..97d6aeb
--- /dev/null
+++ b/src/build/linux/unbundle/zlib.gn
@@ -0,0 +1,64 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("zlib_shim") {
+  root_path = "."
+  headers = [ "zlib.h" ]
+}
+
+config("system_zlib") {
+  defines = [ "USE_SYSTEM_ZLIB=1" ]
+}
+
+config("zlib_config") {
+  configs = [ ":system_zlib" ]
+}
+
+source_set("zlib") {
+  deps = [ ":zlib_shim" ]
+  libs = [ "z" ]
+  public_configs = [ ":system_zlib" ]
+}
+
+shim_headers("minizip_shim") {
+  root_path = "contrib"
+  headers = [
+    "minizip/crypt.h",
+    "minizip/ioapi.h",
+    "minizip/iowin32.h",
+    "minizip/mztools.h",
+    "minizip/unzip.h",
+    "minizip/zip.h",
+  ]
+}
+
+source_set("minizip") {
+  deps = [ ":minizip_shim" ]
+  libs = [ "minizip" ]
+}
+
+static_library("zip") {
+  sources = [
+    "google/zip.cc",
+    "google/zip.h",
+    "google/zip_internal.cc",
+    "google/zip_internal.h",
+    "google/zip_reader.cc",
+    "google/zip_reader.h",
+  ]
+  deps = [
+    ":minizip",
+    "//base",
+  ]
+}
+
+static_library("compression_utils") {
+  sources = [
+    "google/compression_utils.cc",
+    "google/compression_utils.h",
+  ]
+  deps = [ ":zlib" ]
+}
diff --git a/src/build/locale_tool.py b/src/build/locale_tool.py
new file mode 100755
index 0000000..cad5190
--- /dev/null
+++ b/src/build/locale_tool.py
@@ -0,0 +1,1529 @@
+#!/usr/bin/env vpython
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script used to manage locale-related files in Chromium.
+
+This script is used to check, and potentially fix, many locale-related files
+in your Chromium workspace, such as:
+
+  - GRIT input files (.grd) and the corresponding translations (.xtb).
+
+  - BUILD.gn files listing Android localized resource string resource .xml
+    generated by GRIT for all supported Chrome locales. These correspond to
+    <output> elements that use the type="android" attribute.
+
+The --scan-dir <dir> option can be used to check for all files under a specific
+directory, and the --fix-inplace option can be used to try fixing any file
+that doesn't pass the check.
+
+This can be very handy to avoid tedious and repetitive work when adding new
+translations / locales to the Chrome code base, since this script can update
+said input files for you.
+
+Important note: checks and fix may fail on some input files. For example
+remoting/resources/remoting_strings.grd contains an in-line comment element
+inside its <outputs> section that breaks the script. The check will fail, and
+trying to fix it too, but at least the file will not be modified.
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import unittest
+
+# Assume this script is under build/
+_SCRIPT_DIR = os.path.dirname(__file__)
+_SCRIPT_NAME = os.path.join(_SCRIPT_DIR, os.path.basename(__file__))
+_TOP_SRC_DIR = os.path.join(_SCRIPT_DIR, '..')
+
+# Need to import android/gyp/util/resource_utils.py here.
+sys.path.insert(0, os.path.join(_SCRIPT_DIR, 'android/gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+
+# This locale is the default and doesn't have translations.
+_DEFAULT_LOCALE = 'en-US'
+
+# Misc terminal codes to provide human friendly progress output.
+_CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 = '\x1b[0G'
+_CONSOLE_CODE_ERASE_LINE = '\x1b[K'
+_CONSOLE_START_LINE = (
+    _CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 + _CONSOLE_CODE_ERASE_LINE)
+
+##########################################################################
+##########################################################################
+#####
+#####    G E N E R I C   H E L P E R   F U N C T I O N S
+#####
+##########################################################################
+##########################################################################
+
+def _FixChromiumLangAttribute(lang):
+  """Map XML "lang" attribute values to Chromium locale names."""
+  _CHROMIUM_LANG_FIXES = {
+      'en': 'en-US',  # For now, Chromium doesn't have an 'en' locale.
+      'iw': 'he',  # 'iw' is the obsolete form of ISO 639-1 for Hebrew
+      'no': 'nb',  # 'no' is used by the Translation Console for Norwegian (nb).
+  }
+  return _CHROMIUM_LANG_FIXES.get(lang, lang)
+
+
+def _FixTranslationConsoleLocaleName(locale):
+  _FIXES = {
+      'nb': 'no',  # Norwegian.
+      'he': 'iw',  # Hebrew
+  }
+  return _FIXES.get(locale, locale)
+
+
+def _CompareLocaleLists(list_a, list_expected, list_name):
+  """Compare two lists of locale names. Print errors if they differ.
+
+  Args:
+    list_a: First list of locales.
+    list_expected: Second list of locales, as expected.
+    list_name: Name of list printed in error messages.
+  Returns:
+    On success, return False. On error, print error messages and return True.
+  """
+  errors = []
+  missing_locales = sorted(set(list_a) - set(list_expected))
+  if missing_locales:
+    errors.append('Missing locales: %s' % missing_locales)
+
+  extra_locales = sorted(set(list_expected) - set(list_a))
+  if extra_locales:
+    errors.append('Unexpected locales: %s' % extra_locales)
+
+  if errors:
+    print('Errors in %s definition:' % list_name)
+    for error in errors:
+      print('  %s\n' % error)
+    return True
+
+  return False
+
+
+def _BuildIntervalList(input_list, predicate):
+  """Find ranges of contiguous list items that pass a given predicate.
+
+  Args:
+    input_list: An input list of items of any type.
+    predicate: A function that takes a list item and return True if it
+      passes a given test.
+  Returns:
+    A list of (start_pos, end_pos) tuples, where all items in
+    [start_pos, end_pos) pass the predicate.
+  """
+  result = []
+  size = len(input_list)
+  start = 0
+  while True:
+    # Find first item in list that passes the predicate.
+    while start < size and not predicate(input_list[start]):
+      start += 1
+
+    if start >= size:
+      return result
+
+    # Find first item in the rest of the list that does not pass the
+    # predicate.
+    end = start + 1
+    while end < size and predicate(input_list[end]):
+      end += 1
+
+    result.append((start, end))
+    start = end + 1
+
+
+def _SortListSubRange(input_list, start, end, key_func):
+  """Sort an input list's sub-range according to a specific key function.
+
+  Args:
+    input_list: An input list.
+    start: Sub-range starting position in list.
+    end: Sub-range limit position in list.
+    key_func: A function that extracts a sort key from a line.
+  Returns:
+    A copy of |input_list|, with all items in [|start|, |end|) sorted
+    according to |key_func|.
+  """
+  result = input_list[:start]
+  inputs = []
+  for pos in xrange(start, end):
+    line = input_list[pos]
+    key = key_func(line)
+    inputs.append((key, line))
+
+  for _, line in sorted(inputs):
+    result.append(line)
+
+  result += input_list[end:]
+  return result
+
+
+def _SortElementsRanges(lines, element_predicate, element_key):
+  """Sort all elements of a given type in a list of lines by a given key.
+
+  Args:
+    lines: input lines.
+    element_predicate: predicate function to select elements to sort.
+    element_key: lambda returning a comparison key for each element that
+      passes the predicate.
+  Returns:
+    A new list of input lines, with lines [start..end) sorted.
+  """
+  intervals = _BuildIntervalList(lines, element_predicate)
+  for start, end in intervals:
+    lines = _SortListSubRange(lines, start, end, element_key)
+
+  return lines
+
+
+def _ProcessFile(input_file, locales, check_func, fix_func):
+  """Process a given input file, potentially fixing it.
+
+  Args:
+    input_file: Input file path.
+    locales: List of Chrome locales to consider / expect.
+    check_func: A lambda called to check the input file lines with
+      (input_lines, locales) argument. It must return an list of error
+      messages, or None on success.
+    fix_func: None, or a lambda called to fix the input file lines with
+      (input_lines, locales). It must return the new list of lines for
+      the input file, and may raise an Exception in case of error.
+  Returns:
+    True at the moment.
+  """
+  print('%sProcessing %s...' % (_CONSOLE_START_LINE, input_file), end=' ')
+  sys.stdout.flush()
+  with open(input_file) as f:
+    input_lines = f.readlines()
+  errors = check_func(input_file, input_lines, locales)
+  if errors:
+    print('\n%s%s' % (_CONSOLE_START_LINE, '\n'.join(errors)))
+    if fix_func:
+      try:
+        input_lines = fix_func(input_file, input_lines, locales)
+        output = ''.join(input_lines)
+        with open(input_file, 'wt') as f:
+          f.write(output)
+        print('Fixed %s.' % input_file)
+      except Exception as e:  # pylint: disable=broad-except
+        print('Skipped %s: %s' % (input_file, e))
+
+  return True
+
+
+def _ScanDirectoriesForFiles(scan_dirs, file_predicate):
+  """Scan a directory for files that match a given predicate.
+
+  Args:
+    scan_dir: A list of top-level directories to start scan in.
+    file_predicate: lambda function which is passed the file's base name
+      and returns True if its full path, relative to |scan_dir|, should be
+      passed in the result.
+  Returns:
+    A list of file full paths.
+  """
+  result = []
+  for src_dir in scan_dirs:
+    for root, _, files in os.walk(src_dir):
+      result.extend(os.path.join(root, f) for f in files if file_predicate(f))
+  return result
+
+
+def _WriteFile(file_path, file_data):
+  """Write |file_data| to |file_path|."""
+  with open(file_path, 'w') as f:
+    f.write(file_data)
+
+
+def _FindGnExecutable():
+  """Locate the real GN executable used by this Chromium checkout.
+
+  This is needed because the depot_tools 'gn' wrapper script will look
+  for .gclient and other things we really don't need here.
+
+  Returns:
+    Path of real host GN executable from current Chromium src/ checkout.
+  """
+  # Simply scan buildtools/*/gn and return the first one found so we don't
+  # have to guess the platform-specific sub-directory name (e.g. 'linux64'
+  # for 64-bit Linux machines).
+  buildtools_dir = os.path.join(_TOP_SRC_DIR, 'buildtools')
+  for subdir in os.listdir(buildtools_dir):
+    subdir_path = os.path.join(buildtools_dir, subdir)
+    if not os.path.isdir(subdir_path):
+      continue
+    gn_path = os.path.join(subdir_path, 'gn')
+    if os.path.exists(gn_path):
+      return gn_path
+  return None
+
+
+def _PrettyPrintListAsLines(input_list, available_width, trailing_comma=False):
+  result = []
+  input_str = ', '.join(input_list)
+  while len(input_str) > available_width:
+    pos = input_str.rfind(',', 0, available_width)
+    result.append(input_str[:pos + 1])
+    input_str = input_str[pos + 1:].lstrip()
+  if trailing_comma and input_str:
+    input_str += ','
+  result.append(input_str)
+  return result
+
+
+class _PrettyPrintListAsLinesTest(unittest.TestCase):
+
+  def test_empty_list(self):
+    self.assertListEqual([''], _PrettyPrintListAsLines([], 10))
+
+  def test_wrapping(self):
+    input_list = ['foo', 'bar', 'zoo', 'tool']
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 8),
+        ['foo,', 'bar,', 'zoo,', 'tool'])
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 12), ['foo, bar,', 'zoo, tool'])
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 79), ['foo, bar, zoo, tool'])
+
+  def test_trailing_comma(self):
+    input_list = ['foo', 'bar', 'zoo', 'tool']
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 8, trailing_comma=True),
+        ['foo,', 'bar,', 'zoo,', 'tool,'])
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 12, trailing_comma=True),
+        ['foo, bar,', 'zoo, tool,'])
+    self.assertListEqual(
+        _PrettyPrintListAsLines(input_list, 79, trailing_comma=True),
+        ['foo, bar, zoo, tool,'])
+
+
+##########################################################################
+##########################################################################
+#####
+#####    L O C A L E S   L I S T S
+#####
+##########################################################################
+##########################################################################
+
+# Various list of locales that will be extracted from build/config/locales.gni
+# Do not use these directly, use ChromeLocales(), AndroidAPKOmittedLocales() and
+# IosUnsupportedLocales() instead to access these lists.
+_INTERNAL_CHROME_LOCALES = []
+_INTERNAL_ANDROID_APK_OMITTED_LOCALES = []
+_INTERNAL_IOS_UNSUPPORTED_LOCALES = []
+
+
+def ChromeLocales():
+  """Return the list of all locales supported by Chrome."""
+  if not _INTERNAL_CHROME_LOCALES:
+    _ExtractAllChromeLocalesLists()
+  return _INTERNAL_CHROME_LOCALES
+
+
+def AndroidAPKOmittedLocales():
+  """Return the list of locales omitted from Android APKs."""
+  if not _INTERNAL_ANDROID_APK_OMITTED_LOCALES:
+    _ExtractAllChromeLocalesLists()
+  return _INTERNAL_ANDROID_APK_OMITTED_LOCALES
+
+
+def IosUnsupportedLocales():
+  """Return the list of locales that are unsupported on iOS."""
+  if not _INTERNAL_IOS_UNSUPPORTED_LOCALES:
+    _ExtractAllChromeLocalesLists()
+  return _INTERNAL_IOS_UNSUPPORTED_LOCALES
+
+
+def _PrepareTinyGnWorkspace(work_dir, out_subdir_name='out'):
+  """Populate an empty directory with a tiny set of working GN config files.
+
+  This allows us to run 'gn gen <out> --root <work_dir>' as fast as possible
+  to generate files containing the locales list. This takes about 300ms on
+  a decent machine, instead of more than 5 seconds when running the equivalent
+  commands from a real Chromium workspace, which requires regenerating more
+  than 23k targets.
+
+  Args:
+    work_dir: target working directory.
+    out_subdir_name: Name of output sub-directory.
+  Returns:
+    Full path of output directory created inside |work_dir|.
+  """
+  # Create top-level .gn file that must point to the BUILDCONFIG.gn.
+  _WriteFile(os.path.join(work_dir, '.gn'),
+             'buildconfig = "//BUILDCONFIG.gn"\n')
+  # Create BUILDCONFIG.gn which must set a default toolchain. Also add
+  # all variables that may be used in locales.gni in a declare_args() block.
+  _WriteFile(
+      os.path.join(work_dir, 'BUILDCONFIG.gn'),
+      r'''set_default_toolchain("toolchain")
+declare_args () {
+  is_ios = false
+  is_android = true
+}
+''')
+
+  # Create fake toolchain required by BUILDCONFIG.gn.
+  os.mkdir(os.path.join(work_dir, 'toolchain'))
+  _WriteFile(os.path.join(work_dir, 'toolchain', 'BUILD.gn'),
+             r'''toolchain("toolchain") {
+  tool("stamp") {
+    command = "touch {{output}}"  # Required by action()
+  }
+}
+''')
+
+  # Create top-level BUILD.gn, GN requires at least one target to build so do
+  # that with a fake action which will never be invoked. Also write the locales
+  # to misc files in the output directory.
+  _WriteFile(
+      os.path.join(work_dir, 'BUILD.gn'), r'''import("//locales.gni")
+
+action("create_foo") {   # fake action to avoid GN complaints.
+  script = "//build/create_foo.py"
+  inputs = []
+  outputs = [ "$target_out_dir/$target_name" ]
+}
+
+# Write the locales lists to files in the output directory.
+_filename = root_build_dir + "/foo"
+write_file(_filename + ".locales", locales, "json")
+write_file(_filename + ".android_apk_omitted_locales",
+            android_apk_omitted_locales,
+            "json")
+write_file(_filename + ".ios_unsupported_locales",
+            ios_unsupported_locales,
+            "json")
+''')
+
+  # Copy build/config/locales.gni to the workspace, as required by BUILD.gn.
+  shutil.copyfile(os.path.join(_TOP_SRC_DIR, 'build', 'config', 'locales.gni'),
+                  os.path.join(work_dir, 'locales.gni'))
+
+  # Create output directory.
+  out_path = os.path.join(work_dir, out_subdir_name)
+  os.mkdir(out_path)
+
+  # And ... we're good.
+  return out_path
+
+
+# Set this global variable to the path of a given temporary directory
+# before calling _ExtractAllChromeLocalesLists() if you want to debug
+# the locales list extraction process.
+_DEBUG_LOCALES_WORK_DIR = None
+
+
+def _ReadJsonList(file_path):
+  """Read a JSON file that must contain a list, and return it."""
+  with open(file_path) as f:
+    data = json.load(f)
+    assert isinstance(data, list), "JSON file %s is not a list!" % file_path
+  return [item.encode('utf8') for item in data]
+
+
+def _ExtractAllChromeLocalesLists():
+  with build_utils.TempDir() as tmp_path:
+    if _DEBUG_LOCALES_WORK_DIR:
+      tmp_path = _DEBUG_LOCALES_WORK_DIR
+      build_utils.DeleteDirectory(tmp_path)
+      build_utils.MakeDirectory(tmp_path)
+
+    out_path = _PrepareTinyGnWorkspace(tmp_path, 'out')
+
+    # NOTE: The file suffixes used here should be kept in sync with
+    # build/config/locales.gni
+    gn_executable = _FindGnExecutable()
+    try:
+      subprocess.check_output(
+          [gn_executable, 'gen', out_path, '--root=' + tmp_path])
+    except subprocess.CalledProcessError as e:
+      print(e.output)
+      raise e
+
+    global _INTERNAL_CHROME_LOCALES
+    _INTERNAL_CHROME_LOCALES = _ReadJsonList(
+        os.path.join(out_path, 'foo.locales'))
+
+    global _INTERNAL_ANDROID_APK_OMITTED_LOCALES
+    _INTERNAL_ANDROID_APK_OMITTED_LOCALES = _ReadJsonList(
+        os.path.join(out_path, 'foo.android_apk_omitted_locales'))
+
+    global _INTERNAL_IOS_UNSUPPORTED_LOCALES
+    _INTERNAL_IOS_UNSUPPORTED_LOCALES = _ReadJsonList(
+        os.path.join(out_path, 'foo.ios_unsupported_locales'))
+
+
+##########################################################################
+##########################################################################
+#####
+#####    G R D   H E L P E R   F U N C T I O N S
+#####
+##########################################################################
+##########################################################################
+
+# Technical note:
+#
+# Even though .grd files are XML, an xml parser library is not used in order
+# to preserve the original file's structure after modification. ElementTree
+# tends to re-order attributes in each element when re-writing an XML
+# document tree, which is undesirable here.
+#
+# Thus simple line-based regular expression matching is used instead.
+#
+
+# Misc regular expressions used to match elements and their attributes.
+_RE_OUTPUT_ELEMENT = re.compile(r'<output (.*)\s*/>')
+_RE_TRANSLATION_ELEMENT = re.compile(r'<file( | .* )path="(.*\.xtb)".*/>')
+_RE_FILENAME_ATTRIBUTE = re.compile(r'filename="([^"]*)"')
+_RE_LANG_ATTRIBUTE = re.compile(r'lang="([^"]*)"')
+_RE_PATH_ATTRIBUTE = re.compile(r'path="([^"]*)"')
+_RE_TYPE_ANDROID_ATTRIBUTE = re.compile(r'type="android"')
+
+
+
+def _IsGritInputFile(input_file):
+  """Returns True iff this is a GRIT input file."""
+  return input_file.endswith('.grd')
+
+
+def _GetXmlLangAttribute(xml_line):
+  """Extract the lang attribute value from an XML input line."""
+  m = _RE_LANG_ATTRIBUTE.search(xml_line)
+  if not m:
+    return None
+  return m.group(1)
+
+
+class _GetXmlLangAttributeTest(unittest.TestCase):
+  TEST_DATA = {
+      '': None,
+      'foo': None,
+      'lang=foo': None,
+      'lang="foo"': 'foo',
+      '<something lang="foo bar" />': 'foo bar',
+      '<file lang="fr-CA" path="path/to/strings_fr-CA.xtb" />': 'fr-CA',
+  }
+
+  def test_GetXmlLangAttribute(self):
+    for test_line, expected in self.TEST_DATA.iteritems():
+      self.assertEquals(_GetXmlLangAttribute(test_line), expected)
+
+
+def _SortGrdElementsRanges(grd_lines, element_predicate):
+  """Sort all .grd elements of a given type by their lang attribute."""
+  return _SortElementsRanges(grd_lines, element_predicate, _GetXmlLangAttribute)
+
+
+def _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales):
+  """Check the element 'lang' attributes in specific .grd lines range.
+
+  This really checks the following:
+    - Each item has a correct 'lang' attribute.
+    - There are no duplicated lines for the same 'lang' attribute.
+    - That there are no extra locales that Chromium doesn't want.
+    - That no wanted locale is missing.
+
+  Args:
+    grd_lines: Input .grd lines.
+    start: Sub-range start position in input line list.
+    end: Sub-range limit position in input line list.
+    wanted_locales: Set of wanted Chromium locale names.
+  Returns:
+    List of error message strings for this input. Empty on success.
+  """
+  errors = []
+  locales = set()
+  for pos in xrange(start, end):
+    line = grd_lines[pos]
+    lang = _GetXmlLangAttribute(line)
+    if not lang:
+      errors.append('%d: Missing "lang" attribute in <output> element' % pos +
+                    1)
+      continue
+    cr_locale = _FixChromiumLangAttribute(lang)
+    if cr_locale in locales:
+      errors.append(
+          '%d: Redefinition of <output> for "%s" locale' % (pos + 1, lang))
+    locales.add(cr_locale)
+
+  extra_locales = locales.difference(wanted_locales)
+  if extra_locales:
+    errors.append('%d-%d: Extra locales found: %s' % (start + 1, end + 1,
+                                                      sorted(extra_locales)))
+
+  missing_locales = wanted_locales.difference(locales)
+  if missing_locales:
+    errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1,
+                                                  sorted(missing_locales)))
+
+  return errors
+
+
+##########################################################################
+##########################################################################
+#####
+#####    G R D   A N D R O I D   O U T P U T S
+#####
+##########################################################################
+##########################################################################
+
+def _IsGrdAndroidOutputLine(line):
+  """Returns True iff this is an Android-specific <output> line."""
+  m = _RE_OUTPUT_ELEMENT.search(line)
+  if m:
+    return 'type="android"' in m.group(1)
+  return False
+
+assert _IsGrdAndroidOutputLine('  <output type="android"/>')
+
+# Many of the functions below have unused arguments due to genericity.
+# pylint: disable=unused-argument
+
+def _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end,
+                                               wanted_locales):
+  """Check all <output> elements in specific input .grd lines range.
+
+  This really checks the following:
+    - Filenames exist for each listed locale.
+    - Filenames are well-formed.
+
+  Args:
+    grd_lines: Input .grd lines.
+    start: Sub-range start position in input line list.
+    end: Sub-range limit position in input line list.
+    wanted_locales: Set of wanted Chromium locale names.
+  Returns:
+    List of error message strings for this input. Empty on success.
+  """
+  errors = []
+  for pos in xrange(start, end):
+    line = grd_lines[pos]
+    lang = _GetXmlLangAttribute(line)
+    if not lang:
+      continue
+    cr_locale = _FixChromiumLangAttribute(lang)
+
+    m = _RE_FILENAME_ATTRIBUTE.search(line)
+    if not m:
+      errors.append('%d: Missing filename attribute in <output> element' % pos +
+                    1)
+    else:
+      filename = m.group(1)
+      if not filename.endswith('.xml'):
+        errors.append(
+            '%d: Filename should end with ".xml": %s' % (pos + 1, filename))
+
+      dirname = os.path.basename(os.path.dirname(filename))
+      prefix = ('values-%s' % resource_utils.ToAndroidLocaleName(cr_locale)
+                if cr_locale != _DEFAULT_LOCALE else 'values')
+      if dirname != prefix:
+        errors.append(
+            '%s: Directory name should be %s: %s' % (pos + 1, prefix, filename))
+
+  return errors
+
+
+def _CheckGrdAndroidOutputElements(grd_file, grd_lines, wanted_locales):
+  """Check all <output> elements related to Android.
+
+  Args:
+    grd_file: Input .grd file path.
+    grd_lines: List of input .grd lines.
+    wanted_locales: set of wanted Chromium locale names.
+  Returns:
+    List of error message strings. Empty on success.
+  """
+  intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine)
+  errors = []
+  for start, end in intervals:
+    errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales)
+    errors += _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end,
+                                                         wanted_locales)
+  return errors
+
+
+def _AddMissingLocalesInGrdAndroidOutputs(grd_file, grd_lines, wanted_locales):
+  """Fix an input .grd line by adding missing Android outputs.
+
+  Args:
+    grd_file: Input .grd file path.
+    grd_lines: Input .grd line list.
+    wanted_locales: set of Chromium locale names.
+  Returns:
+    A new list of .grd lines, containing new <output> elements when needed
+    for locales from |wanted_locales| that were not part of the input.
+  """
+  intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine)
+  for start, end in reversed(intervals):
+    locales = set()
+    for pos in xrange(start, end):
+      lang = _GetXmlLangAttribute(grd_lines[pos])
+      locale = _FixChromiumLangAttribute(lang)
+      locales.add(locale)
+
+    missing_locales = wanted_locales.difference(locales)
+    if not missing_locales:
+      continue
+
+    src_locale = 'bg'
+    src_lang_attribute = 'lang="%s"' % src_locale
+    src_line = None
+    for pos in xrange(start, end):
+      if src_lang_attribute in grd_lines[pos]:
+        src_line = grd_lines[pos]
+        break
+
+    if not src_line:
+      raise Exception(
+          'Cannot find <output> element with "%s" lang attribute' % src_locale)
+
+    line_count = end - 1
+    for locale in missing_locales:
+      android_locale = resource_utils.ToAndroidLocaleName(locale)
+      dst_line = src_line.replace(
+          'lang="%s"' % src_locale, 'lang="%s"' % locale).replace(
+              'values-%s/' % src_locale, 'values-%s/' % android_locale)
+      grd_lines.insert(line_count, dst_line)
+      line_count += 1
+
+  # Sort the new <output> elements.
+  return _SortGrdElementsRanges(grd_lines, _IsGrdAndroidOutputLine)
+
+
+##########################################################################
+##########################################################################
+#####
+#####    G R D   T R A N S L A T I O N S
+#####
+##########################################################################
+##########################################################################
+
+
+def _IsTranslationGrdOutputLine(line):
+  """Returns True iff this is an output .xtb <file> element."""
+  m = _RE_TRANSLATION_ELEMENT.search(line)
+  return m is not None
+
+
+class _IsTranslationGrdOutputLineTest(unittest.TestCase):
+
+  def test_GrdTranslationOutputLines(self):
+    _VALID_INPUT_LINES = [
+        '<file path="foo/bar.xtb" />',
+        '<file path="foo/bar.xtb"/>',
+        '<file lang="fr-CA" path="translations/aw_strings_fr-CA.xtb"/>',
+        '<file lang="fr-CA" path="translations/aw_strings_fr-CA.xtb" />',
+        '  <file path="translations/aw_strings_ar.xtb" lang="ar" />',
+    ]
+    _INVALID_INPUT_LINES = ['<file path="foo/bar.xml" />']
+
+    for line in _VALID_INPUT_LINES:
+      self.assertTrue(
+          _IsTranslationGrdOutputLine(line),
+          '_IsTranslationGrdOutputLine() returned False for [%s]' % line)
+
+    for line in _INVALID_INPUT_LINES:
+      self.assertFalse(
+          _IsTranslationGrdOutputLine(line),
+          '_IsTranslationGrdOutputLine() returned True for [%s]' % line)
+
+
+def _CheckGrdTranslationElementRange(grd_lines, start, end,
+                                     wanted_locales):
+  """Check all <translations> sub-elements in specific input .grd lines range.
+
+  This really checks the following:
+    - Each item has a 'path' attribute.
+    - Each such path value ends up with '.xtb'.
+
+  Args:
+    grd_lines: Input .grd lines.
+    start: Sub-range start position in input line list.
+    end: Sub-range limit position in input line list.
+    wanted_locales: Set of wanted Chromium locale names.
+  Returns:
+    List of error message strings for this input. Empty on success.
+  """
+  errors = []
+  for pos in xrange(start, end):
+    line = grd_lines[pos]
+    lang = _GetXmlLangAttribute(line)
+    if not lang:
+      continue
+    m = _RE_PATH_ATTRIBUTE.search(line)
+    if not m:
+      errors.append('%d: Missing path attribute in <file> element' % pos +
+                    1)
+    else:
+      filename = m.group(1)
+      if not filename.endswith('.xtb'):
+        errors.append(
+            '%d: Path should end with ".xtb": %s' % (pos + 1, filename))
+
+  return errors
+
+
+def _CheckGrdTranslations(grd_file, grd_lines, wanted_locales):
+  """Check all <file> elements that correspond to an .xtb output file.
+
+  Args:
+    grd_file: Input .grd file path.
+    grd_lines: List of input .grd lines.
+    wanted_locales: set of wanted Chromium locale names.
+  Returns:
+    List of error message strings. Empty on success.
+  """
+  wanted_locales = wanted_locales - set([_DEFAULT_LOCALE])
+  intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine)
+  errors = []
+  for start, end in intervals:
+    errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales)
+    errors += _CheckGrdTranslationElementRange(grd_lines, start, end,
+                                              wanted_locales)
+  return errors
+
+
+# Regular expression used to replace the lang attribute inside .xtb files.
+_RE_TRANSLATIONBUNDLE = re.compile('<translationbundle lang="(.*)">')
+
+
+def _CreateFakeXtbFileFrom(src_xtb_path, dst_xtb_path, dst_locale):
+  """Create a fake .xtb file.
+
+  Args:
+    src_xtb_path: Path to source .xtb file to copy from.
+    dst_xtb_path: Path to destination .xtb file to write to.
+    dst_locale: Destination locale, the lang attribute in the source file
+      will be substituted with this value before its lines are written
+      to the destination file.
+  """
+  with open(src_xtb_path) as f:
+    src_xtb_lines = f.readlines()
+
+  def replace_xtb_lang_attribute(line):
+    m = _RE_TRANSLATIONBUNDLE.search(line)
+    if not m:
+      return line
+    return line[:m.start(1)] + dst_locale + line[m.end(1):]
+
+  dst_xtb_lines = [replace_xtb_lang_attribute(line) for line in src_xtb_lines]
+  with build_utils.AtomicOutput(dst_xtb_path) as tmp:
+    tmp.writelines(dst_xtb_lines)
+
+
+def _AddMissingLocalesInGrdTranslations(grd_file, grd_lines, wanted_locales):
+  """Fix an input .grd line by adding missing Android outputs.
+
+  This also creates fake .xtb files from the one provided for 'en-GB'.
+
+  Args:
+    grd_file: Input .grd file path.
+    grd_lines: Input .grd line list.
+    wanted_locales: set of Chromium locale names.
+  Returns:
+    A new list of .grd lines, containing new <output> elements when needed
+    for locales from |wanted_locales| that were not part of the input.
+  """
+  wanted_locales = wanted_locales - set([_DEFAULT_LOCALE])
+  intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine)
+  for start, end in reversed(intervals):
+    locales = set()
+    for pos in xrange(start, end):
+      lang = _GetXmlLangAttribute(grd_lines[pos])
+      locale = _FixChromiumLangAttribute(lang)
+      locales.add(locale)
+
+    missing_locales = wanted_locales.difference(locales)
+    if not missing_locales:
+      continue
+
+    src_locale = 'en-GB'
+    src_lang_attribute = 'lang="%s"' % src_locale
+    src_line = None
+    for pos in xrange(start, end):
+      if src_lang_attribute in grd_lines[pos]:
+        src_line = grd_lines[pos]
+        break
+
+    if not src_line:
+      raise Exception(
+          'Cannot find <file> element with "%s" lang attribute' % src_locale)
+
+    src_path = os.path.join(
+        os.path.dirname(grd_file),
+        _RE_PATH_ATTRIBUTE.search(src_line).group(1))
+
+    line_count = end - 1
+    for locale in missing_locales:
+      dst_line = src_line.replace(
+          'lang="%s"' % src_locale, 'lang="%s"' % locale).replace(
+              '_%s.xtb' % src_locale, '_%s.xtb' % locale)
+      grd_lines.insert(line_count, dst_line)
+      line_count += 1
+
+      dst_path = src_path.replace('_%s.xtb' % src_locale, '_%s.xtb' % locale)
+      _CreateFakeXtbFileFrom(src_path, dst_path, locale)
+
+
+  # Sort the new <output> elements.
+  return _SortGrdElementsRanges(grd_lines, _IsTranslationGrdOutputLine)
+
+
+##########################################################################
+##########################################################################
+#####
+#####    G N   A N D R O I D   O U T P U T S
+#####
+##########################################################################
+##########################################################################
+
+_RE_GN_VALUES_LIST_LINE = re.compile(
+    r'^\s*".*values(\-([A-Za-z0-9-]+))?/.*\.xml",\s*$')
+
+def _IsBuildGnInputFile(input_file):
+  """Returns True iff this is a BUILD.gn file."""
+  return os.path.basename(input_file) == 'BUILD.gn'
+
+
+def _GetAndroidGnOutputLocale(line):
+  """Check a GN list, and return its Android locale if it is an output .xml"""
+  m = _RE_GN_VALUES_LIST_LINE.match(line)
+  if not m:
+    return None
+
+  if m.group(1):  # First group is optional and contains group 2.
+    return m.group(2)
+
+  return resource_utils.ToAndroidLocaleName(_DEFAULT_LOCALE)
+
+
+def _IsAndroidGnOutputLine(line):
+  """Returns True iff this is an Android-specific localized .xml output."""
+  return _GetAndroidGnOutputLocale(line) != None
+
+
+def _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+  """Check that a range of GN lines corresponds to localized strings.
+
+  Special case: Some BUILD.gn files list several non-localized .xml files
+  that should be ignored by this function, e.g. in
+  components/cronet/android/BUILD.gn, the following appears:
+
+    inputs = [
+      ...
+      "sample/res/layout/activity_main.xml",
+      "sample/res/layout/dialog_url.xml",
+      "sample/res/values/dimens.xml",
+      "sample/res/values/strings.xml",
+      ...
+    ]
+
+  These are non-localized strings, and should be ignored. This function is
+  used to detect them quickly.
+  """
+  for pos in xrange(start, end):
+    if not 'values/' in gn_lines[pos]:
+      return True
+  return False
+
+
+def _CheckGnOutputsRange(gn_lines, start, end, wanted_locales):
+  if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+    return []
+
+  errors = []
+  locales = set()
+  for pos in xrange(start, end):
+    line = gn_lines[pos]
+    android_locale = _GetAndroidGnOutputLocale(line)
+    assert android_locale != None
+    cr_locale = resource_utils.ToChromiumLocaleName(android_locale)
+    if cr_locale in locales:
+      errors.append('%s: Redefinition of output for "%s" locale' %
+                    (pos + 1, android_locale))
+    locales.add(cr_locale)
+
+  extra_locales = locales.difference(wanted_locales)
+  if extra_locales:
+    errors.append('%d-%d: Extra locales: %s' % (start + 1, end + 1,
+                                                sorted(extra_locales)))
+
+  missing_locales = wanted_locales.difference(locales)
+  if missing_locales:
+    errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1,
+                                                  sorted(missing_locales)))
+
+  return errors
+
+
+def _CheckGnAndroidOutputs(gn_file, gn_lines, wanted_locales):
+  intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine)
+  errors = []
+  for start, end in intervals:
+    errors += _CheckGnOutputsRange(gn_lines, start, end, wanted_locales)
+  return errors
+
+
+def _AddMissingLocalesInGnAndroidOutputs(gn_file, gn_lines, wanted_locales):
+  intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine)
+  # NOTE: Since this may insert new lines to each interval, process the
+  # list in reverse order to maintain valid (start,end) positions during
+  # the iteration.
+  for start, end in reversed(intervals):
+    if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+      continue
+
+    locales = set()
+    for pos in xrange(start, end):
+      lang = _GetAndroidGnOutputLocale(gn_lines[pos])
+      locale = resource_utils.ToChromiumLocaleName(lang)
+      locales.add(locale)
+
+    missing_locales = wanted_locales.difference(locales)
+    if not missing_locales:
+      continue
+
+    src_locale = 'bg'
+    src_values = 'values-%s/' % resource_utils.ToAndroidLocaleName(src_locale)
+    src_line = None
+    for pos in xrange(start, end):
+      if src_values in gn_lines[pos]:
+        src_line = gn_lines[pos]
+        break
+
+    if not src_line:
+      raise Exception(
+          'Cannot find output list item with "%s" locale' % src_locale)
+
+    line_count = end - 1
+    for locale in missing_locales:
+      if locale == _DEFAULT_LOCALE:
+        dst_line = src_line.replace('values-%s/' % src_locale, 'values/')
+      else:
+        dst_line = src_line.replace(
+            'values-%s/' % src_locale,
+            'values-%s/' % resource_utils.ToAndroidLocaleName(locale))
+      gn_lines.insert(line_count, dst_line)
+      line_count += 1
+
+    gn_lines = _SortListSubRange(
+        gn_lines, start, line_count,
+        lambda line: _RE_GN_VALUES_LIST_LINE.match(line).group(1))
+
+  return gn_lines
+
+
+##########################################################################
+##########################################################################
+#####
+#####    T R A N S L A T I O N   E X P E C T A T I O N S
+#####
+##########################################################################
+##########################################################################
+
+_EXPECTATIONS_FILENAME = 'translation_expectations.pyl'
+
+# Technical note: the format of translation_expectations.pyl
+# is a 'Python literal', which defines a python dictionary, so should
+# be easy to parse. However, when modifying it, care should be taken
+# to respect the line comments and the order of keys within the text
+# file.
+
+
+def _ReadPythonLiteralFile(pyl_path):
+  """Read a .pyl file into a Python data structure."""
+  with open(pyl_path) as f:
+    pyl_content = f.read()
+  # Evaluate as a Python data structure, use an empty global
+  # and local dictionary.
+  return eval(pyl_content, dict(), dict())
+
+
+def _UpdateLocalesInExpectationLines(pyl_lines,
+                                     wanted_locales,
+                                     available_width=79):
+  """Update the locales list(s) found in an expectations file.
+
+  Args:
+    pyl_lines: Iterable of input lines from the file.
+    wanted_locales: Set or list of new locale names.
+    available_width: Optional, number of character colums used
+      to word-wrap the new list items.
+  Returns:
+    New list of updated lines.
+  """
+  locales_list = ['"%s"' % loc for loc in sorted(wanted_locales)]
+  result = []
+  line_count = len(pyl_lines)
+  line_num = 0
+  DICT_START = '"languages": ['
+  while line_num < line_count:
+    line = pyl_lines[line_num]
+    line_num += 1
+    result.append(line)
+    # Look for start of "languages" dictionary.
+    pos = line.find(DICT_START)
+    if pos < 0:
+      continue
+
+    start_margin = pos
+    start_line = line_num
+    # Skip over all lines from the list.
+    while (line_num < line_count and
+           not pyl_lines[line_num].rstrip().endswith('],')):
+      line_num += 1
+      continue
+
+    if line_num == line_count:
+      raise Exception('%d: Missing list termination!' % start_line)
+
+    # Format the new list according to the new margin.
+    locale_width = available_width - (start_margin + 2)
+    locale_lines = _PrettyPrintListAsLines(
+        locales_list, locale_width, trailing_comma=True)
+    for locale_line in locale_lines:
+      result.append(' ' * (start_margin + 2) + locale_line)
+    result.append(' ' * start_margin + '],')
+    line_num += 1
+
+  return result
+
+
+class _UpdateLocalesInExpectationLinesTest(unittest.TestCase):
+
+  def test_simple(self):
+    self.maxDiff = 1000
+    input_text = r'''
+# This comment should be preserved
+# 23456789012345678901234567890123456789
+{
+  "android_grd": {
+    "languages": [
+      "aa", "bb", "cc", "dd", "ee",
+      "ff", "gg", "hh", "ii", "jj",
+      "kk"],
+  },
+  # Example with bad indentation in input.
+  "another_grd": {
+         "languages": [
+  "aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj", "kk",
+      ],
+  },
+}
+'''
+    expected_text = r'''
+# This comment should be preserved
+# 23456789012345678901234567890123456789
+{
+  "android_grd": {
+    "languages": [
+      "A2", "AA", "BB", "CC", "DD",
+      "E2", "EE", "FF", "GG", "HH",
+      "I2", "II", "JJ", "KK",
+    ],
+  },
+  # Example with bad indentation in input.
+  "another_grd": {
+         "languages": [
+           "A2", "AA", "BB", "CC", "DD",
+           "E2", "EE", "FF", "GG", "HH",
+           "I2", "II", "JJ", "KK",
+         ],
+  },
+}
+'''
+    input_lines = input_text.splitlines()
+    test_locales = ([
+        'AA', 'BB', 'CC', 'DD', 'EE', 'FF', 'GG', 'HH', 'II', 'JJ', 'KK', 'A2',
+        'E2', 'I2'
+    ])
+    expected_lines = expected_text.splitlines()
+    self.assertListEqual(
+        _UpdateLocalesInExpectationLines(input_lines, test_locales, 40),
+        expected_lines)
+
+  def test_missing_list_termination(self):
+    input_lines = r'''
+  "languages": ['
+    "aa", "bb", "cc", "dd"
+'''.splitlines()
+    with self.assertRaises(Exception) as cm:
+      _UpdateLocalesInExpectationLines(input_lines, ['a', 'b'], 40)
+
+    self.assertEqual(str(cm.exception), '2: Missing list termination!')
+
+
+def _UpdateLocalesInExpectationFile(pyl_path, wanted_locales):
+  """Update all locales listed in a given expectations file.
+
+  Args:
+    pyl_path: Path to .pyl file to update.
+    wanted_locales: List of locales that need to be written to
+      the file.
+  """
+  tc_locales = {
+      _FixTranslationConsoleLocaleName(locale)
+      for locale in set(wanted_locales) - set([_DEFAULT_LOCALE])
+  }
+
+  with open(pyl_path) as f:
+    input_lines = [l.rstrip() for l in f.readlines()]
+
+  updated_lines = _UpdateLocalesInExpectationLines(input_lines, tc_locales)
+  with build_utils.AtomicOutput(pyl_path) as f:
+    f.writelines('\n'.join(updated_lines) + '\n')
+
+
+##########################################################################
+##########################################################################
+#####
+#####    C H E C K   E V E R Y T H I N G
+#####
+##########################################################################
+##########################################################################
+
+# pylint: enable=unused-argument
+
+
+def _IsAllInputFile(input_file):
+  return _IsGritInputFile(input_file) or _IsBuildGnInputFile(input_file)
+
+
+def _CheckAllFiles(input_file, input_lines, wanted_locales):
+  errors = []
+  if _IsGritInputFile(input_file):
+    errors += _CheckGrdTranslations(input_file, input_lines, wanted_locales)
+    errors += _CheckGrdAndroidOutputElements(
+        input_file, input_lines, wanted_locales)
+  elif _IsBuildGnInputFile(input_file):
+    errors += _CheckGnAndroidOutputs(input_file, input_lines, wanted_locales)
+  return errors
+
+
+def _AddMissingLocalesInAllFiles(input_file, input_lines, wanted_locales):
+  if _IsGritInputFile(input_file):
+    lines = _AddMissingLocalesInGrdTranslations(
+        input_file, input_lines, wanted_locales)
+    lines = _AddMissingLocalesInGrdAndroidOutputs(
+        input_file, lines, wanted_locales)
+  elif _IsBuildGnInputFile(input_file):
+    lines = _AddMissingLocalesInGnAndroidOutputs(
+        input_file, input_lines, wanted_locales)
+  return lines
+
+
+##########################################################################
+##########################################################################
+#####
+#####    C O M M A N D   H A N D L I N G
+#####
+##########################################################################
+##########################################################################
+
+class _Command(object):
+  """A base class for all commands recognized by this script.
+
+  Usage is the following:
+    1) Derived classes must re-define the following class-based fields:
+       - name: Command name (e.g. 'list-locales')
+       - description: Command short description.
+       - long_description: Optional. Command long description.
+         NOTE: As a convenience, if the first character is a newline,
+         it will be omitted in the help output.
+
+    2) Derived classes for commands that take arguments should override
+       RegisterExtraArgs(), which receives a corresponding argparse
+       sub-parser as argument.
+
+    3) Derived classes should implement a Run() command, which can read
+       the current arguments from self.args.
+  """
+  name = None
+  description = None
+  long_description = None
+
+  def __init__(self):
+    self._parser = None
+    self.args = None
+
+  def RegisterExtraArgs(self, subparser):
+    pass
+
+  def RegisterArgs(self, parser):
+    subp = parser.add_parser(
+        self.name, help=self.description,
+        description=self.long_description or self.description,
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    self._parser = subp
+    subp.set_defaults(command=self)
+    group = subp.add_argument_group('%s arguments' % self.name)
+    self.RegisterExtraArgs(group)
+
+  def ProcessArgs(self, args):
+    self.args = args
+
+
+class _ListLocalesCommand(_Command):
+  """Implement the 'list-locales' command to list locale lists of interest."""
+  name = 'list-locales'
+  description = 'List supported Chrome locales'
+  long_description = r'''
+List locales of interest, by default this prints all locales supported by
+Chrome, but `--type=android_apk_omitted` can be used to print the list of
+locales omitted from Android APKs (but not app bundles), and
+`--type=ios_unsupported` for the list of locales unsupported on iOS.
+
+These values are extracted directly from build/config/locales.gni.
+
+Additionally, use the --as-json argument to print the list as a JSON list,
+instead of the default format (which is a space-separated list of locale names).
+'''
+
+  # Maps type argument to a function returning the corresponding locales list.
+  TYPE_MAP = {
+      'all': ChromeLocales,
+      'android_apk_omitted': AndroidAPKOmittedLocales,
+      'ios_unsupported': IosUnsupportedLocales,
+  }
+
+  def RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--as-json',
+        action='store_true',
+        help='Output as JSON list.')
+    group.add_argument(
+        '--type',
+        choices=tuple(self.TYPE_MAP.viewkeys()),
+        default='all',
+        help='Select type of locale list to print.')
+
+  def Run(self):
+    locale_list = self.TYPE_MAP[self.args.type]()
+    if self.args.as_json:
+      print('[%s]' % ", ".join("'%s'" % loc for loc in locale_list))
+    else:
+      print(' '.join(locale_list))
+
+
+class _CheckInputFileBaseCommand(_Command):
+  """Used as a base for other _Command subclasses that check input files.
+
+  Subclasses should also define the following class-level variables:
+
+  - select_file_func:
+      A predicate that receives a file name (not path) and return True if it
+      should be selected for inspection. Used when scanning directories with
+      '--scan-dir <dir>'.
+
+  - check_func:
+  - fix_func:
+      Two functions passed as parameters to _ProcessFile(), see relevant
+      documentation in this function's definition.
+  """
+  select_file_func = None
+  check_func = None
+  fix_func = None
+
+  def RegisterExtraArgs(self, group):
+    group.add_argument(
+      '--scan-dir',
+      action='append',
+      help='Optional directory to scan for input files recursively.')
+    group.add_argument(
+      'input',
+      nargs='*',
+      help='Input file(s) to check.')
+    group.add_argument(
+      '--fix-inplace',
+      action='store_true',
+      help='Try to fix the files in-place too.')
+    group.add_argument(
+      '--add-locales',
+      help='Space-separated list of additional locales to use')
+
+  def Run(self):
+    args = self.args
+    input_files = []
+    if args.input:
+      input_files = args.input
+    if args.scan_dir:
+      input_files.extend(_ScanDirectoriesForFiles(
+          args.scan_dir, self.select_file_func.__func__))
+    locales = ChromeLocales()
+    if args.add_locales:
+      locales.extend(args.add_locales.split(' '))
+
+    locales = set(locales)
+
+    for input_file in input_files:
+      _ProcessFile(input_file,
+                   locales,
+                   self.check_func.__func__,
+                   self.fix_func.__func__ if args.fix_inplace else None)
+    print('%sDone.' % (_CONSOLE_START_LINE))
+
+
+class _CheckGrdAndroidOutputsCommand(_CheckInputFileBaseCommand):
+  name = 'check-grd-android-outputs'
+  description = (
+      'Check the Android resource (.xml) files outputs in GRIT input files.')
+  long_description = r'''
+Check the Android .xml files outputs in one or more input GRIT (.grd) files
+for the following conditions:
+
+    - Each item has a correct 'lang' attribute.
+    - There are no duplicated lines for the same 'lang' attribute.
+    - That there are no extra locales that Chromium doesn't want.
+    - That no wanted locale is missing.
+    - Filenames exist for each listed locale.
+    - Filenames are well-formed.
+'''
+  select_file_func = _IsGritInputFile
+  check_func = _CheckGrdAndroidOutputElements
+  fix_func = _AddMissingLocalesInGrdAndroidOutputs
+
+
+class _CheckGrdTranslationsCommand(_CheckInputFileBaseCommand):
+  name = 'check-grd-translations'
+  description = (
+      'Check the translation (.xtb) files outputted by .grd input files.')
+  long_description = r'''
+Check the translation (.xtb) file outputs in one or more input GRIT (.grd) files
+for the following conditions:
+
+    - Each item has a correct 'lang' attribute.
+    - There are no duplicated lines for the same 'lang' attribute.
+    - That there are no extra locales that Chromium doesn't want.
+    - That no wanted locale is missing.
+    - Each item has a 'path' attribute.
+    - Each such path value ends up with '.xtb'.
+'''
+  select_file_func = _IsGritInputFile
+  check_func = _CheckGrdTranslations
+  fix_func = _AddMissingLocalesInGrdTranslations
+
+
+class _CheckGnAndroidOutputsCommand(_CheckInputFileBaseCommand):
+  name = 'check-gn-android-outputs'
+  description = 'Check the Android .xml file lists in GN build files.'
+  long_description = r'''
+Check one or more BUILD.gn file, looking for lists of Android resource .xml
+files, and checking that:
+
+  - There are no duplicated output files in the list.
+  - Each output file belongs to a wanted Chromium locale.
+  - There are no output files for unwanted Chromium locales.
+'''
+  select_file_func = _IsBuildGnInputFile
+  check_func = _CheckGnAndroidOutputs
+  fix_func = _AddMissingLocalesInGnAndroidOutputs
+
+
+class _CheckAllCommand(_CheckInputFileBaseCommand):
+  name = 'check-all'
+  description = 'Check everything.'
+  long_description = 'Equivalent to calling all other check-xxx commands.'
+  select_file_func = _IsAllInputFile
+  check_func = _CheckAllFiles
+  fix_func = _AddMissingLocalesInAllFiles
+
+
+class _UpdateExpectationsCommand(_Command):
+  name = 'update-expectations'
+  description = 'Update translation expectations file.'
+  long_description = r'''
+Update %s files to match the current list of locales supported by Chromium.
+This is especially useful to add new locales before updating any GRIT or GN
+input file with the --add-locales option.
+''' % _EXPECTATIONS_FILENAME
+
+  def RegisterExtraArgs(self, group):
+    group.add_argument(
+        '--add-locales',
+        help='Space-separated list of additional locales to use.')
+
+  def Run(self):
+    locales = ChromeLocales()
+    add_locales = self.args.add_locales
+    if add_locales:
+      locales.extend(add_locales.split(' '))
+
+    expectation_paths = [
+        'tools/gritsettings/translation_expectations.pyl',
+        'clank/tools/translation_expectations.pyl',
+    ]
+    missing_expectation_files = []
+    for path in enumerate(expectation_paths):
+      file_path = os.path.join(_TOP_SRC_DIR, path)
+      if not os.path.exists(file_path):
+        missing_expectation_files.append(file_path)
+        continue
+      _UpdateLocalesInExpectationFile(file_path, locales)
+
+    if missing_expectation_files:
+      sys.stderr.write('WARNING: Missing file(s): %s\n' %
+                       (', '.join(missing_expectation_files)))
+
+
+class _UnitTestsCommand(_Command):
+  name = 'unit-tests'
+  description = 'Run internal unit-tests for this script'
+
+  def RegisterExtraArgs(self, group):
+    group.add_argument(
+        '-v', '--verbose', action='count', help='Increase test verbosity.')
+    group.add_argument('args', nargs=argparse.REMAINDER)
+
+  def Run(self):
+    argv = [_SCRIPT_NAME] + self.args.args
+    unittest.main(argv=argv, verbosity=self.args.verbose)
+
+
+# List of all commands supported by this script.
+_COMMANDS = [
+    _ListLocalesCommand,
+    _CheckGrdAndroidOutputsCommand,
+    _CheckGrdTranslationsCommand,
+    _CheckGnAndroidOutputsCommand,
+    _CheckAllCommand,
+    _UpdateExpectationsCommand,
+    _UnitTestsCommand,
+]
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  subparsers = parser.add_subparsers()
+  commands = [clazz() for clazz in _COMMANDS]
+  for command in commands:
+    command.RegisterArgs(subparsers)
+
+  if not argv:
+    argv = ['--help']
+
+  args = parser.parse_args(argv)
+  args.command.ProcessArgs(args)
+  args.command.Run()
+
+
+if __name__ == "__main__":
+  main(sys.argv[1:])
diff --git a/src/build/mac/find_sdk.py b/src/build/mac/find_sdk.py
new file mode 100755
index 0000000..d86f310
--- /dev/null
+++ b/src/build/mac/find_sdk.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+r"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+If --print_sdk_path is passed, then the script will also print the SDK path.
+If --print_bin_path is passed, then the script will also print the path to the
+toolchain bin dir.
+
+Usage:
+  python find_sdk.py     \
+      [--print_sdk_path] \
+      [--print_bin_path] \
+      10.6  # Ignores SDKs < 10.6
+
+Sample Output:
+/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.14.sdk
+/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/
+10.14
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import subprocess
+import sys
+
+from optparse import OptionParser
+
+
+class SdkError(Exception):
+  def __init__(self, value):
+    self.value = value
+  def __str__(self):
+    return repr(self.value)
+
+
+def parse_version(version_str):
+  """'10.6' => [10, 6]"""
+  return [int(s) for s in re.findall(r'(\d+)', version_str)]
+
+
+def main():
+  parser = OptionParser()
+  parser.add_option("--print_sdk_path",
+                    action="store_true", dest="print_sdk_path", default=False,
+                    help="Additionally print the path the SDK (appears first).")
+  parser.add_option("--print_bin_path",
+                    action="store_true", dest="print_bin_path", default=False,
+                    help="Additionally print the path the toolchain bin dir.")
+  options, args = parser.parse_args()
+  if len(args) != 1:
+    parser.error('Please specify a minimum SDK version')
+  min_sdk_version = args[0]
+
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print(out, file=sys.stderr)
+    print(err, file=sys.stderr)
+    raise Exception('Error %d running xcode-select' % job.returncode)
+  dev_dir = out.decode('UTF-8').rstrip()
+  sdk_dir = os.path.join(
+      dev_dir, 'Platforms/MacOSX.platform/Developer/SDKs')
+
+  if not os.path.isdir(sdk_dir):
+    raise SdkError('Install Xcode, launch it, accept the license ' +
+      'agreement, and run `sudo xcode-select -s /path/to/Xcode.app` ' +
+      'to continue.')
+  sdks = [re.findall('^MacOSX(\d+\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
+
+  if options.print_sdk_path:
+    sdk_name = 'MacOSX' + best_sdk + '.sdk'
+    print(os.path.join(sdk_dir, sdk_name))
+
+  if options.print_bin_path:
+    bin_path = 'Toolchains/XcodeDefault.xctoolchain/usr/bin/'
+    print(os.path.join(dev_dir, bin_path))
+
+  return best_sdk
+
+
+if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
+  print(main())
+  sys.exit(0)
diff --git a/src/build/mac/should_use_hermetic_xcode.py b/src/build/mac/should_use_hermetic_xcode.py
new file mode 100755
index 0000000..63d44af
--- /dev/null
+++ b/src/build/mac/should_use_hermetic_xcode.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Prints "1" if Chrome targets should be built with hermetic Xcode.
+Prints "2" if Chrome targets should be built with hermetic Xcode, but the OS
+version does not meet the minimum requirements of the hermetic version of Xcode.
+Prints "3" if FORCE_MAC_TOOLCHAIN is set for an iOS target_os
+Otherwise prints "0".
+
+Usage:
+  python should_use_hermetic_xcode.py <target_os>
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+_THIS_DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+_BUILD_PATH = os.path.join(_THIS_DIR_PATH, os.pardir)
+sys.path.insert(0, _BUILD_PATH)
+
+import mac_toolchain
+
+
+def _IsCorpMachine():
+  if sys.platform == 'darwin':
+    return os.path.isdir('/Library/GoogleCorpSupport/')
+  if sys.platform.startswith('linux'):
+    import subprocess
+    try:
+      return subprocess.check_output(['lsb_release',
+                                      '-sc']).rstrip() == b'rodete'
+    except:
+      return False
+  return False
+
+
+def main():
+  parser = argparse.ArgumentParser(description='Download hermetic Xcode.')
+  parser.add_argument('platform')
+  args = parser.parse_args()
+
+  force_toolchain = os.environ.get('FORCE_MAC_TOOLCHAIN')
+  if force_toolchain and args.platform == 'ios':
+    return "3"
+  allow_corp = args.platform == 'mac' and _IsCorpMachine()
+  if force_toolchain or allow_corp:
+    if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements():
+      return "2"
+    return "1"
+  else:
+    return "0"
+
+
+if __name__ == '__main__':
+  print(main())
+  sys.exit(0)
diff --git a/src/build/mac_toolchain.py b/src/build/mac_toolchain.py
new file mode 100755
index 0000000..43d336c
--- /dev/null
+++ b/src/build/mac_toolchain.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+If should_use_hermetic_xcode.py emits "1", and the current toolchain is out of
+date:
+  * Downloads the hermetic mac toolchain
+    * Requires CIPD authentication. Run `cipd auth-login`, use Google account.
+  * Accepts the license.
+    * If xcode-select and xcodebuild are not passwordless in sudoers, requires
+      user interaction.
+  * Downloads standalone binaries from [a possibly different version of Xcode].
+
+The toolchain version can be overridden by setting MAC_TOOLCHAIN_REVISION with
+the full revision, e.g. 9A235.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import pkg_resources
+import platform
+import plistlib
+import shutil
+import subprocess
+import sys
+
+
+def LoadPList(path):
+  """Loads Plist at |path| and returns it as a dictionary."""
+  if sys.version_info.major == 2:
+    return plistlib.readPlist(path)
+  with open(path, 'rb') as f:
+    return plistlib.load(f)
+
+
+# This contains binaries from Xcode 12.4 12D4e, along with the macOS 11 SDK.
+# To build these packages, see comments in build/xcode_binaries.yaml
+MAC_BINARIES_LABEL = 'infra_internal/ios/xcode/xcode_binaries/mac-amd64'
+MAC_BINARIES_TAG = 'Za4aUIwiTUjk8rnjRow4nXbth-j7ZoN5plyOSCLidcgC'
+
+# The toolchain will not be downloaded if the minimum OS version is not met. 19
+# is the major version number for macOS 10.15. 12B5044c (Xcode 12.2rc) only runs
+# on 10.15.4 and newer.
+MAC_MINIMUM_OS_VERSION = [19, 4]
+
+BASE_DIR = os.path.abspath(os.path.dirname(__file__))
+TOOLCHAIN_ROOT = os.path.join(BASE_DIR, 'mac_files')
+TOOLCHAIN_BUILD_DIR = os.path.join(TOOLCHAIN_ROOT, 'Xcode.app')
+
+# Always integrity-check the entire SDK. Mac SDK packages are complex and often
+# hit edge cases in cipd (eg https://crbug.com/1033987,
+# https://crbug.com/915278), and generally when this happens it requires manual
+# intervention to fix.
+# Note the trailing \n!
+PARANOID_MODE = '$ParanoidMode CheckIntegrity\n'
+
+
+def PlatformMeetsHermeticXcodeRequirements():
+  if sys.platform != 'darwin':
+    return True
+  needed = MAC_MINIMUM_OS_VERSION
+  major_version = [int(v) for v in platform.release().split('.')[:len(needed)]]
+  return major_version >= needed
+
+
+def _UseHermeticToolchain():
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  script_path = os.path.join(current_dir, 'mac/should_use_hermetic_xcode.py')
+  proc = subprocess.Popen([script_path, 'mac'], stdout=subprocess.PIPE)
+  return '1' in proc.stdout.readline().decode()
+
+
+def RequestCipdAuthentication():
+  """Requests that the user authenticate to access Xcode CIPD packages."""
+
+  print('Access to Xcode CIPD package requires authentication.')
+  print('-----------------------------------------------------------------')
+  print()
+  print('You appear to be a Googler.')
+  print()
+  print('I\'m sorry for the hassle, but you may need to do a one-time manual')
+  print('authentication. Please run:')
+  print()
+  print('    cipd auth-login')
+  print()
+  print('and follow the instructions.')
+  print()
+  print('NOTE: Use your google.com credentials, not chromium.org.')
+  print()
+  print('-----------------------------------------------------------------')
+  print()
+  sys.stdout.flush()
+
+
+def PrintError(message):
+  # Flush buffers to ensure correct output ordering.
+  sys.stdout.flush()
+  sys.stderr.write(message + '\n')
+  sys.stderr.flush()
+
+
+def InstallXcodeBinaries():
+  """Installs the Xcode binaries needed to build Chrome and accepts the license.
+
+  This is the replacement for InstallXcode that installs a trimmed down version
+  of Xcode that is OS-version agnostic.
+  """
+  # First make sure the directory exists. It will serve as the cipd root. This
+  # also ensures that there will be no conflicts of cipd root.
+  binaries_root = os.path.join(TOOLCHAIN_ROOT, 'xcode_binaries')
+  if not os.path.exists(binaries_root):
+    os.makedirs(binaries_root)
+
+  # 'cipd ensure' is idempotent.
+  args = ['cipd', 'ensure', '-root', binaries_root, '-ensure-file', '-']
+
+  p = subprocess.Popen(args,
+                       universal_newlines=True,
+                       stdin=subprocess.PIPE,
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE)
+  stdout, stderr = p.communicate(input=PARANOID_MODE + MAC_BINARIES_LABEL +
+                                 ' ' + MAC_BINARIES_TAG)
+  if p.returncode != 0:
+    print(stdout)
+    print(stderr)
+    RequestCipdAuthentication()
+    return 1
+
+  if sys.platform != 'darwin':
+    return 0
+
+  # Accept the license for this version of Xcode if it's newer than the
+  # currently accepted version.
+  cipd_xcode_version_plist_path = os.path.join(binaries_root,
+                                               'Contents/version.plist')
+  cipd_xcode_version_plist = LoadPList(cipd_xcode_version_plist_path)
+  cipd_xcode_version = cipd_xcode_version_plist['CFBundleShortVersionString']
+
+  cipd_license_path = os.path.join(binaries_root,
+                                   'Contents/Resources/LicenseInfo.plist')
+  cipd_license_plist = LoadPList(cipd_license_path)
+  cipd_license_version = cipd_license_plist['licenseID']
+
+  should_overwrite_license = True
+  current_license_path = '/Library/Preferences/com.apple.dt.Xcode.plist'
+  if os.path.exists(current_license_path):
+    current_license_plist = LoadPList(current_license_path)
+    xcode_version = current_license_plist.get(
+        'IDEXcodeVersionForAgreedToGMLicense')
+    if (xcode_version is not None and pkg_resources.parse_version(xcode_version)
+        >= pkg_resources.parse_version(cipd_xcode_version)):
+      should_overwrite_license = False
+
+  if not should_overwrite_license:
+    return 0
+
+  # Use puppet's sudoers script to accept the license if its available.
+  license_accept_script = '/usr/local/bin/xcode_accept_license.py'
+  if os.path.exists(license_accept_script):
+    args = [
+        'sudo', license_accept_script, '--xcode-version', cipd_xcode_version,
+        '--license-version', cipd_license_version
+    ]
+    subprocess.check_call(args)
+    return 0
+
+  # Otherwise manually accept the license. This will prompt for sudo.
+  print('Accepting new Xcode license. Requires sudo.')
+  sys.stdout.flush()
+  args = [
+      'sudo', 'defaults', 'write', current_license_path,
+      'IDEXcodeVersionForAgreedToGMLicense', cipd_xcode_version
+  ]
+  subprocess.check_call(args)
+  args = [
+      'sudo', 'defaults', 'write', current_license_path,
+      'IDELastGMLicenseAgreedTo', cipd_license_version
+  ]
+  subprocess.check_call(args)
+  args = ['sudo', 'plutil', '-convert', 'xml1', current_license_path]
+  subprocess.check_call(args)
+
+  return 0
+
+
+def main():
+  if not _UseHermeticToolchain():
+    print('Skipping Mac toolchain installation for mac')
+    return 0
+
+  parser = argparse.ArgumentParser(description='Download hermetic Xcode.')
+  args = parser.parse_args()
+
+  if not PlatformMeetsHermeticXcodeRequirements():
+    print('OS version does not support toolchain.')
+    return 0
+
+  return InstallXcodeBinaries()
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/nocompile.gni b/src/build/nocompile.gni
new file mode 100644
index 0000000..4f17837
--- /dev/null
+++ b/src/build/nocompile.gni
@@ -0,0 +1,139 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+#
+# import("//build/nocompile.gni")
+# nocompile_test("my_module_nc_unittests") {
+#   sources = [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ]
+#
+#   # optional extra include dirs:
+#   include_dirs = [ ... ]
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/python.gni")
+import("//build/toolchain/toolchain.gni")
+import("//testing/test.gni")
+
+declare_args() {
+  # TODO(crbug.com/105388): make sure no-compile test is not flaky.
+  enable_nocompile_tests = (is_linux || is_chromeos || is_apple) && is_clang &&
+                           host_cpu == target_cpu
+}
+
+if (enable_nocompile_tests) {
+  import("//build/config/c++/c++.gni")
+  import("//build/config/sysroot.gni")
+  template("nocompile_test") {
+    nocompile_target = target_name + "_run_nocompile"
+
+    # TODO(crbug.com/1112471): Get this to run cleanly under Python 3.
+    python2_action_foreach(nocompile_target) {
+      testonly = true
+      script = "//tools/nocompile_driver.py"
+      sources = invoker.sources
+      deps = invoker.deps
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+
+      result_path = "$target_gen_dir/{{source_name_part}}_nc.cc"
+      depfile = "${result_path}.d"
+      outputs = [ result_path ]
+      args = [
+        rebase_path("$clang_base_path/bin/clang++", root_build_dir),
+        "4",  # number of compilers to invoke in parallel.
+        "{{source}}",
+        rebase_path(result_path, root_build_dir),
+        "--",
+        "-nostdinc++",
+        "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
+        "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
+        "-std=c++14",
+        "-Wall",
+        "-Werror",
+        "-Wfatal-errors",
+        "-Wthread-safety",
+        "-I" + rebase_path("//", root_build_dir),
+        "-I" + rebase_path(root_gen_dir, root_build_dir),
+
+        # TODO(https://crbug.com/989932): Track build/config/compiler/BUILD.gn
+        "-Wno-implicit-int-float-conversion",
+
+        # TODO(crbug.com/1166707): libc++ now requires this macro to be defined.
+        "-D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS",
+      ]
+
+      if (is_apple && host_os != "mac") {
+        args += [ "--target=x86_64-apple-macos" ]
+      }
+
+      # Iterate over any extra include dirs and append them to the command line.
+      if (defined(invoker.include_dirs)) {
+        foreach(include_dir, invoker.include_dirs) {
+          args += [ "-I" + rebase_path(include_dir, root_build_dir) ]
+        }
+      }
+
+      if (sysroot != "") {
+        args += [
+          "--sysroot",
+          rebase_path(sysroot, root_build_dir),
+        ]
+      }
+    }
+
+    test(target_name) {
+      deps = invoker.deps + [ ":$nocompile_target" ]
+      sources = get_target_outputs(":$nocompile_target")
+    }
+  }
+}
diff --git a/src/build/noop.py b/src/build/noop.py
new file mode 100644
index 0000000..cf4d3ae
--- /dev/null
+++ b/src/build/noop.py
@@ -0,0 +1,4 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script that does nothing successfully."""
diff --git a/src/build/partitioned_shared_library.gni b/src/build/partitioned_shared_library.gni
new file mode 100644
index 0000000..2ea32ce
--- /dev/null
+++ b/src/build/partitioned_shared_library.gni
@@ -0,0 +1,143 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+
+# This template creates a set of shared libraries, by linking a single
+# "partitioned" shared library, then splitting it into multiple pieces.
+# The intention is to facilitate code-splitting between a base library and
+# additional feature-specific libraries that may be obtained and loaded at a
+# later time.
+#
+# The combined library is an intermediate product made by leveraging the LLVM
+# toolchain.  Code modules may be labeled via compiler flag as belonging to a
+# particular partition.  At link time, any symbols reachable by only a single
+# partition's entrypoints will be located in a partition-specific library
+# segment.  After linking, the segments are split apart using objcopy into
+# separate libraries.  The main library is then packaged with the application
+# as usual, while feature libraries may be packaged, delivered and loaded
+# separately (via an Android Dynamic Feature Module).
+#
+# When loading a feature library, the intended address of the library must be
+# supplied to the loader, so that it can be mapped to the memory location.  The
+# address offsets of the feature libraries are stored in the base library and
+# accessed through special symbols named according to the partitions.
+#
+# The template instantiates targets for the base library, as well as each
+# specified partition, based on the root target name.  Example:
+#
+#   - libmonochrome            (base library)
+#   - libmonochrome_foo        (partition library for feature 'foo')
+#   - libmonochrome_bar        (partition library for feature 'bar')
+#
+# Note that the feature library filenames are chosen based on the main
+# library's name (eg. libmonochrome_foo.so), but the soname of the feature
+# library is based on the feature name (eg. "foo").  This should generally be
+# okay, with the caveat that loading the library multiple times *might* cause
+# problems in Android.
+#
+# This template uses shared_library's default configurations.
+#
+# Variables:
+#   partitions: A list of library partition names to extract, in addition to
+#     the base library.
+
+template("partitioned_shared_library") {
+  assert(is_clang)
+  forward_variables_from(invoker, [ "testonly" ])
+
+  _combined_library_target = "${target_name}__combined"
+
+  # Strip "lib" from target names; it will be re-added to output libraries.
+  _output_name = string_replace(target_name, "lib", "")
+
+  shared_library(_combined_library_target) {
+    forward_variables_from(invoker, "*", [ "partitions" ])
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags += [
+      "-Wl,-soname,lib${_output_name}.so",
+      "--link-only",
+    ]
+
+    # This shared library is an intermediate artifact that should not packaged
+    # into the final build. Therefore, reset metadata.
+    metadata = {
+    }
+  }
+
+  template("partition_action") {
+    action(target_name) {
+      deps = [ ":$_combined_library_target" ]
+      script = "//build/extract_partition.py"
+      sources =
+          [ "$root_out_dir/lib.unstripped/lib${_output_name}__combined.so" ]
+      outputs = [
+        invoker.unstripped_output,
+        invoker.stripped_output,
+      ]
+      data = [ invoker.unstripped_output ]
+      metadata = {
+        shared_libraries = [ invoker.stripped_output ]
+      }
+      args = [
+        "--objcopy",
+        rebase_path("$clang_base_path/bin/llvm-objcopy", root_build_dir),
+        "--unstripped-output",
+        rebase_path(invoker.unstripped_output, root_build_dir),
+        "--stripped-output",
+        rebase_path(invoker.stripped_output, root_build_dir),
+      ]
+      if (defined(invoker.partition) && invoker.partition != "") {
+        args += [
+          "--partition",
+          "${invoker.partition}",
+        ]
+      }
+
+      if (use_debug_fission) {
+        _dwp = rebase_path("${android_tool_prefix}dwp", root_build_dir)
+        args += [ "--dwp=${_dwp}" ]
+        outputs += [ invoker.unstripped_output + ".dwp" ]
+      }
+      args += [ rebase_path(sources[0], root_build_dir) ]
+    }
+  }
+
+  partition_action(target_name) {
+    stripped_output = "$root_out_dir/lib${_output_name}.so"
+    unstripped_output = "$root_out_dir/lib.unstripped/lib${_output_name}.so"
+  }
+
+  # Note that as of now, non-base partition libraries are placed in a
+  # subdirectory of the root output directory.  This is because partition
+  # sonames are not sensitive to the filename of the base library, and as such,
+  # their corresponding file names may be generated multiple times by different
+  # base libraries.  To avoid collisions, each base library target has a
+  # corresponding subdir for its extra partitions.
+  #
+  # If this proves problematic to various pieces of infrastructure, a proposed
+  # alternative is allowing the linker to rename partitions.  For example,
+  # feature "foo" may be a partition.  If two different base libraries both
+  # define "foo" partitions, the linker may be made to accept an extra command
+  # to rename the partition's soname to "foo1" or "foo2".  Other build config
+  # can name the libraries foo1.so and foo2.so, allowing them to reside in the
+  # same directory.
+  foreach(_partition, invoker.partitions) {
+    partition_action("${target_name}_${_partition}") {
+      partition = "${_partition}_partition"
+      stripped_output = "$root_out_dir/lib${_output_name}_${partition}.so"
+      unstripped_output =
+          "$root_out_dir/lib.unstripped/lib${_output_name}_${partition}.so"
+    }
+  }
+}
+
+set_defaults("partitioned_shared_library") {
+  configs = default_shared_library_configs
+}
diff --git a/src/build/precompile.cc b/src/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/src/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/src/build/precompile.h b/src/build/precompile.h
new file mode 100644
index 0000000..c699562
--- /dev/null
+++ b/src/build/precompile.h
@@ -0,0 +1,53 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is used as a precompiled header for both C and C++ files. So
+// any C++ headers must go in the __cplusplus block below.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <math.h>
+#include <memory.h>
+#include <signal.h>
+#include <stdarg.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#if defined(__cplusplus)
+
+#include <algorithm>
+#include <bitset>
+#include <cmath>
+#include <cstddef>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <fstream>
+#include <functional>
+#include <iomanip>
+#include <iosfwd>
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#endif  // __cplusplus
diff --git a/src/build/print_python_deps.py b/src/build/print_python_deps.py
new file mode 100755
index 0000000..e567834
--- /dev/null
+++ b/src/build/print_python_deps.py
@@ -0,0 +1,223 @@
+#!/usr/bin/python2.7
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints all non-system dependencies for the given module.
+
+The primary use-case for this script is to generate the list of python modules
+required for .isolate files.
+
+This script should be compatible with Python 2 and Python 3.
+"""
+
+import argparse
+import fnmatch
+import os
+import pipes
+import sys
+
+# Don't use any helper modules, or else they will end up in the results.
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def ComputePythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.values()
+                  if m and hasattr(m, '__file__') and m.__file__)
+
+  src_paths = set()
+  for path in module_paths:
+    if path == __file__:
+      continue
+    path = os.path.abspath(path)
+    if not path.startswith(_SRC_ROOT):
+      continue
+
+    if (path.endswith('.pyc')
+        or (path.endswith('c') and not os.path.splitext(path)[1])):
+      path = path[:-1]
+    src_paths.add(path)
+
+  return src_paths
+
+
+def _NormalizeCommandLine(options):
+  """Returns a string that when run from SRC_ROOT replicates the command."""
+  args = ['build/print_python_deps.py']
+  root = os.path.relpath(options.root, _SRC_ROOT)
+  if root != '.':
+    args.extend(('--root', root))
+  if options.output:
+    args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT)))
+  if options.gn_paths:
+    args.extend(('--gn-paths',))
+  for allowlist in sorted(options.allowlists):
+    args.extend(('--allowlist', os.path.relpath(allowlist, _SRC_ROOT)))
+  args.append(os.path.relpath(options.module, _SRC_ROOT))
+  return ' '.join(pipes.quote(x) for x in args)
+
+
+def _FindPythonInDirectory(directory, allow_test):
+  """Returns an iterable of all non-test python files in the given directory."""
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    for filename in filenames:
+      if filename.endswith('.py') and (allow_test
+                                       or not filename.endswith('_test.py')):
+        yield os.path.join(root, filename)
+
+
+def _GetTargetPythonVersion(module):
+  """Heuristically determines the target module's Python version."""
+  with open(module) as f:
+    shebang = f.readline().strip()
+  default_version = 2
+  if shebang.startswith('#!'):
+    # Examples:
+    # '#!/usr/bin/python'
+    # '#!/usr/bin/python2.7'
+    # '#!/usr/bin/python3'
+    # '#!/usr/bin/env python3'
+    # '#!/usr/bin/env vpython'
+    # '#!/usr/bin/env vpython3'
+    exec_name = os.path.basename(shebang[2:].split(' ')[-1])
+    for python_prefix in ['python', 'vpython']:
+      if exec_name.startswith(python_prefix):
+        version_string = exec_name[len(python_prefix):]
+        break
+    else:
+      raise ValueError('Invalid shebang: ' + shebang)
+    if version_string:
+      return int(float(version_string))
+  return default_version
+
+
+def _ImportModuleByPath(module_path):
+  """Imports a module by its source file."""
+  # Replace the path entry for print_python_deps.py with the one for the given
+  # module.
+  sys.path[0] = os.path.dirname(module_path)
+  if sys.version_info[0] == 2:
+    import imp  # Python 2 only, since it's deprecated in Python 3.
+    imp.load_source('NAME', module_path)
+  else:
+    # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
+    module_name = os.path.splitext(os.path.basename(module_path))[0]
+    import importlib.util  # Python 3 only, since it's unavailable in Python 2.
+    spec = importlib.util.spec_from_file_location(module_name, module_path)
+    module = importlib.util.module_from_spec(spec)
+    sys.modules[module_name] = module
+    spec.loader.exec_module(module)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Prints all non-system dependencies for the given module.')
+  parser.add_argument('module',
+                      help='The python module to analyze.')
+  parser.add_argument('--root', default='.',
+                      help='Directory to make paths relative to.')
+  parser.add_argument('--output',
+                      help='Write output to a file rather than stdout.')
+  parser.add_argument('--inplace', action='store_true',
+                      help='Write output to a file with the same path as the '
+                      'module, but with a .pydeps extension. Also sets the '
+                      'root to the module\'s directory.')
+  parser.add_argument('--no-header', action='store_true',
+                      help='Do not write the "# Generated by" header.')
+  parser.add_argument('--gn-paths', action='store_true',
+                      help='Write paths as //foo/bar/baz.py')
+  parser.add_argument('--did-relaunch', action='store_true',
+                      help=argparse.SUPPRESS)
+  parser.add_argument('--allowlist',
+                      default=[],
+                      action='append',
+                      dest='allowlists',
+                      help='Recursively include all non-test python files '
+                      'within this directory. May be specified multiple times.')
+  options = parser.parse_args()
+
+  if options.inplace:
+    if options.output:
+      parser.error('Cannot use --inplace and --output at the same time!')
+    if not options.module.endswith('.py'):
+      parser.error('Input module path should end with .py suffix!')
+    options.output = options.module + 'deps'
+    options.root = os.path.dirname(options.module)
+
+  modules = [options.module]
+  if os.path.isdir(options.module):
+    modules = list(_FindPythonInDirectory(options.module, allow_test=True))
+  if not modules:
+    parser.error('Input directory does not contain any python files!')
+
+  target_versions = [_GetTargetPythonVersion(m) for m in modules]
+  target_version = target_versions[0]
+  assert target_version in [2, 3]
+  assert all(v == target_version for v in target_versions)
+
+  current_version = sys.version_info[0]
+
+  # Trybots run with vpython as default Python, but with a different config
+  # from //.vpython. To make the is_vpython test work, and to match the behavior
+  # of dev machines, the shebang line must be run with python2.7.
+  #
+  # E.g. $HOME/.vpython-root/dd50d3/bin/python
+  # E.g. /b/s/w/ir/cache/vpython/ab5c79/bin/python
+  is_vpython = 'vpython' in sys.executable
+  if not is_vpython or target_version != current_version:
+    # Prevent infinite relaunch if something goes awry.
+    assert not options.did_relaunch
+    # Re-launch using vpython will cause us to pick up modules specified in
+    # //.vpython, but does not cause it to pick up modules defined inline via
+    # [VPYTHON:BEGIN] ... [VPYTHON:END] comments.
+    # TODO(agrieve): Add support for this if the need ever arises.
+    vpython_to_use = {2: 'vpython', 3: 'vpython3'}[target_version]
+    os.execvp(vpython_to_use, [vpython_to_use] + sys.argv + ['--did-relaunch'])
+
+  if current_version == 3:
+    # Work-around for protobuf library not being loadable via importlib
+    # This is needed due to compile_resources.py.
+    import importlib._bootstrap_external
+    importlib._bootstrap_external._NamespacePath.sort = lambda self, **_: 0
+
+  paths_set = set()
+  try:
+    for module in modules:
+      _ImportModuleByPath(module)
+      paths_set.update(ComputePythonDependencies())
+  except Exception:
+    # Output extra diagnostics when loading the script fails.
+    sys.stderr.write('Error running print_python_deps.py.\n')
+    sys.stderr.write('is_vpython={}\n'.format(is_vpython))
+    sys.stderr.write('did_relanuch={}\n'.format(options.did_relaunch))
+    sys.stderr.write('python={}\n'.format(sys.executable))
+    raise
+
+  for path in options.allowlists:
+    paths_set.update(
+        os.path.abspath(p)
+        for p in _FindPythonInDirectory(path, allow_test=False))
+
+  paths = [os.path.relpath(p, options.root) for p in paths_set]
+
+  normalized_cmdline = _NormalizeCommandLine(options)
+  out = open(options.output, 'w') if options.output else sys.stdout
+  with out:
+    if not options.no_header:
+      out.write('# Generated by running:\n')
+      out.write('#   %s\n' % normalized_cmdline)
+    prefix = '//' if options.gn_paths else ''
+    for path in sorted(paths):
+      out.write(prefix + path + '\n')
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/protoc_java.py b/src/build/protoc_java.py
new file mode 100755
index 0000000..fe602a9
--- /dev/null
+++ b/src/build/protoc_java.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is the action script for the proto_java_library template.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+   --srcjar).
+4. Creates a new stamp file.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), 'android', 'gyp'))
+from util import build_utils
+
+
+def _HasJavaPackage(proto_lines):
+  return any(line.strip().startswith('option java_package')
+             for line in proto_lines)
+
+
+def _EnforceJavaPackage(proto_srcs):
+  for proto_path in proto_srcs:
+    with open(proto_path) as in_proto:
+      if not _HasJavaPackage(in_proto.readlines()):
+        raise Exception('Proto files for java must contain a "java_package" '
+                        'line: {}'.format(proto_path))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--protoc', required=True, help='Path to protoc binary.')
+  parser.add_argument('--proto-path',
+                      required=True,
+                      help='Path to proto directory.')
+  parser.add_argument('--java-out-dir',
+                      help='Path to output directory for java files.')
+  parser.add_argument('--srcjar', help='Path to output srcjar.')
+  parser.add_argument('--stamp', help='File to touch on success.')
+  parser.add_argument(
+      '--import-dir',
+      action='append',
+      default=[],
+      help='Extra import directory for protos, can be repeated.')
+  parser.add_argument('protos', nargs='+', help='proto source files')
+  options = parser.parse_args(argv)
+
+  if not options.java_out_dir and not options.srcjar:
+    raise Exception('One of --java-out-dir or --srcjar must be specified.')
+
+  _EnforceJavaPackage(options.protos)
+
+  with build_utils.TempDir() as temp_dir:
+    out_arg = '--java_out=lite:' + temp_dir
+
+    proto_path_args = ['--proto_path', options.proto_path]
+    for path in options.import_dir:
+      proto_path_args += ["--proto_path", path]
+
+    # Generate Java files using protoc.
+    build_utils.CheckOutput(
+        [options.protoc] + proto_path_args + [out_arg] + options.protos,
+        # protoc generates superfluous warnings about LITE_RUNTIME deprecation
+        # even though we are using the new non-deprecated method.
+        stderr_filter=lambda output: build_utils.FilterLines(
+            output, '|'.join([r'optimize_for = LITE_RUNTIME', r'java/lite\.md'])
+        ))
+
+    if options.java_out_dir:
+      build_utils.DeleteDirectory(options.java_out_dir)
+      shutil.copytree(temp_dir, options.java_out_dir)
+    else:
+      build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    assert options.srcjar
+    deps = options.protos + [options.protoc]
+    build_utils.WriteDepfile(options.depfile, options.srcjar, deps)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/protoc_java.pydeps b/src/build/protoc_java.pydeps
new file mode 100644
index 0000000..c3ed2be
--- /dev/null
+++ b/src/build/protoc_java.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+#   build/print_python_deps.py --root build --output build/protoc_java.pydeps build/protoc_java.py
+android/gyp/util/__init__.py
+android/gyp/util/build_utils.py
+gn_helpers.py
+protoc_java.py
diff --git a/src/build/redirect_stdout.py b/src/build/redirect_stdout.py
new file mode 100644
index 0000000..166293c
--- /dev/null
+++ b/src/build/redirect_stdout.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import subprocess
+import sys
+
+# This script executes a command and redirects the stdout to a file. This is
+# equivalent to |command... > output_file|.
+#
+# Usage: python redirect_stdout.py output_file command...
+
+if __name__ == '__main__':
+  if len(sys.argv) < 2:
+    print("Usage: %s output_file command..." % sys.argv[0], file=sys.stderr)
+    sys.exit(1)
+
+  with open(sys.argv[1], 'w') as fp:
+    sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp))
diff --git a/src/build/rm.py b/src/build/rm.py
new file mode 100755
index 0000000..43a663d
--- /dev/null
+++ b/src/build/rm.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Delete a file.
+
+This module works much like the rm posix command.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+
+def Main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('files', nargs='+')
+  parser.add_argument('-f', '--force', action='store_true',
+                      help="don't err on missing")
+  parser.add_argument('--stamp', required=True, help='touch this file')
+  args = parser.parse_args()
+  for f in args.files:
+    try:
+      os.remove(f)
+    except OSError:
+      if not args.force:
+        print("'%s' does not exist" % f, file=sys.stderr)
+        return 1
+
+  with open(args.stamp, 'w'):
+    os.utime(args.stamp, None)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/sample_arg_file.gn b/src/build/sample_arg_file.gn
new file mode 100644
index 0000000..91e9045
--- /dev/null
+++ b/src/build/sample_arg_file.gn
@@ -0,0 +1,6 @@
+# Build arguments go here. Here are some of the most commonly set ones.
+# Run `gn args <out_dir> --list` for the full list.
+#   is_component_build = true
+#   is_debug = true
+#   symbol_level = 2
+#   use_goma = false
diff --git a/src/build/sanitize-mac-build-log.sed b/src/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000..b4111c7
--- /dev/null
+++ b/src/build/sanitize-mac-build-log.sed
@@ -0,0 +1,33 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding\.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
diff --git a/src/build/sanitize-mac-build-log.sh b/src/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..df5a7af2
--- /dev/null
+++ b/src/build/sanitize-mac-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/src/build/sanitize-win-build-log.sed b/src/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000..c18e664
--- /dev/null
+++ b/src/build/sanitize-win-build-log.sed
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully\./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]+>//
+
+# Shorten bindings generation lines
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
diff --git a/src/build/sanitize-win-build-log.sh b/src/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..df5a7af2
--- /dev/null
+++ b/src/build/sanitize-win-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/src/build/sanitizers/asan_suppressions.cc b/src/build/sanitizers/asan_suppressions.cc
new file mode 100644
index 0000000..f3b9459
--- /dev/null
+++ b/src/build/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines. Entries should look, for example, like:
+//
+//   // http://crbug.com/178677
+//   "interceptor_via_lib:libsqlite3.so\n"
+char kASanDefaultSuppressions[] =
+
+    // End of suppressions.
+    // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+    "";  // Please keep this semicolon.
+
+#endif  // ADDRESS_SANITIZER
diff --git a/src/build/sanitizers/lsan_suppressions.cc b/src/build/sanitizers/lsan_suppressions.cc
new file mode 100644
index 0000000..9c0fec4
--- /dev/null
+++ b/src/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,77 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+    // Intentional leak used as sanity test for Valgrind/memcheck.
+    "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+    // ================ Leaks in third-party code ================
+
+    // False positives in libfontconfig. http://crbug.com/39050
+    "leak:libfontconfig\n"
+    // eglibc-2.19/string/strdup.c creates false positive leak errors because of
+    // the same reason as crbug.com/39050. The leak error stack trace, when
+    // unwind on malloc, includes a call to libfontconfig. But the default stack
+    // trace is too short in leak sanitizer bot to make the libfontconfig
+    // suppression works. http://crbug.com/605286
+    "leak:__strdup\n"
+
+    // Leaks in Nvidia's libGL.
+    "leak:libGL.so\n"
+    "leak:libGLX_nvidia.so\n"
+    "leak:libnvidia-glcore.so\n"
+
+    // XRandR has several one time leaks.
+    "leak:libxrandr\n"
+
+    // xrandr leak. http://crbug.com/119677
+    "leak:XRRFindDisplay\n"
+
+    // http://crbug.com/431213, http://crbug.com/416665
+    "leak:gin/object_template_builder.h\n"
+
+    // Leaks in swrast_dri.so. http://crbug.com/540042
+    "leak:swrast_dri.so\n"
+
+    // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
+    "leak:__gconv_lookup_cache\n"
+
+    // ================ Leaks in Chromium code ================
+    // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+    // Instead, commits that introduce memory leaks should be reverted.
+    // Suppressing the leak is acceptable in some cases when reverting is
+    // impossible, i.e. when enabling leak detection for the first time for a
+    // test target with pre-existing leaks.
+
+    // https://crbug.com/755670
+    "leak:third_party/yasm/\n"
+
+    // v8 leaks caused by weak ref not call
+    "leak:blink::DOMWrapperWorld::Create\n"
+    "leak:blink::ScriptState::Create\n"
+
+    // Crash keys are intentionally leaked.
+    "leak:crash_reporter::(anonymous "
+    "namespace)::CrashKeyBaseSupport::Allocate\n"
+
+    // Suppress leaks in CreateCdmInstance. https://crbug.com/961062
+    "leak:media::CdmAdapter::CreateCdmInstance\n"
+
+    // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+    // End of suppressions.
+    ;  // Please keep this semicolon.
+
+#endif  // LEAK_SANITIZER
diff --git a/src/build/sanitizers/sanitizer_options.cc b/src/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000..0df3e5a
--- /dev/null
+++ b/src/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,196 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) ||  \
+    defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
+    defined(UNDEFINED_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE                                           \
+  extern "C"                                                               \
+  __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \
+  __attribute__((visibility("default")))                                   \
+  __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+//   check_printf=1 - check the memory accesses to printf (and other formatted
+//     output routines) arguments.
+//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+//     for stack overflow detection.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports
+//   fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+//     to print error reports. V8 doesn't generate debug info for the JIT code,
+//     so the slow unwinder may not work properly.
+//   detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+//     stack allocations and detect stack-use-after-return errors.
+//   symbolize=1 - enable in-process symbolization.
+//   external_symbolizer_path=... - provides the path to llvm-symbolizer
+//     relative to the main executable
+#if defined(OS_LINUX) || defined(OS_CHROMEOS)
+const char kAsanDefaultOptions[] =
+    "check_printf=1 use_sigaltstack=1 strip_path_prefix=/../../ "
+    "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 "
+    "symbolize=1 detect_leaks=0 allow_user_segv_handler=1 "
+    "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/"
+    "bin/llvm-symbolizer";
+
+#elif defined(OS_APPLE)
+const char* kAsanDefaultOptions =
+    "check_printf=1 use_sigaltstack=1 strip_path_prefix=/../../ "
+    "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 ";
+
+#elif defined(OS_WIN)
+const char* kAsanDefaultOptions =
+    "check_printf=1 use_sigaltstack=1 strip_path_prefix=\\..\\..\\ "
+    "fast_unwind_on_fatal=1 detect_stack_use_after_return=1 "
+    "symbolize=1 external_symbolizer_path=%d/../../third_party/"
+    "llvm-build/Release+Asserts/bin/llvm-symbolizer.exe";
+#endif  // defined(OS_LINUX) || defined(OS_CHROMEOS)
+
+#if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_APPLE) || \
+    defined(OS_WIN)
+// Allow NaCl to override the default asan options.
+extern const char* kAsanDefaultOptionsNaCl;
+__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+  if (kAsanDefaultOptionsNaCl)
+    return kAsanDefaultOptionsNaCl;
+  return kAsanDefaultOptions;
+}
+
+extern char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+  return kASanDefaultSuppressions;
+}
+#endif  // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_APPLE) ||
+        // defined(OS_WIN)
+#endif  // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && (defined(OS_LINUX) || defined(OS_CHROMEOS))
+// Default options for ThreadSanitizer in various configurations:
+//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+//   second_deadlock_stack=1 - more verbose deadlock reports.
+//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
+//     called from signal handlers.
+//   report_thread_leaks=0 - do not report unjoined threads at the end of
+//     the program execution.
+//   print_suppressions=1 - print the list of matched suppressions.
+//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
+//     value) to keep more stack traces.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+//   external_symbolizer_path=... - provides the path to llvm-symbolizer
+//     relative to the main executable
+const char kTsanDefaultOptions[] =
+    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+    "strip_path_prefix=/../../ external_symbolizer_path=%d/../../third_party/"
+    "llvm-build/Release+Asserts/bin/llvm-symbolizer";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+  return kTsanDefaultOptions;
+}
+
+extern char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+  return kTSanDefaultSuppressions;
+}
+
+#endif  // defined(THREAD_SANITIZER) && (defined(OS_LINUX) ||
+        // defined(OS_CHROMEOS))
+
+#if defined(MEMORY_SANITIZER)
+// Default options for MemorySanitizer:
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+//   external_symbolizer_path=... - provides the path to llvm-symbolizer
+//     relative to the main executable
+const char kMsanDefaultOptions[] =
+    "strip_path_prefix=/../../ "
+
+#if !defined(OS_APPLE)
+    "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/"
+    "bin/llvm-symbolizer"
+#endif
+;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() {
+  return kMsanDefaultOptions;
+}
+
+#endif  // MEMORY_SANITIZER
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+//   print_suppressions=1 - print the list of matched suppressions.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+//   external_symbolizer_path=... - provides the path to llvm-symbolizer
+//     relative to the main executable
+const char kLsanDefaultOptions[] =
+    "print_suppressions=1 strip_path_prefix=/../../ "
+
+#if !defined(OS_APPLE)
+    "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/"
+    "bin/llvm-symbolizer "
+#endif
+
+#if defined(ARCH_CPU_64_BITS)
+    // When pointer compression in V8 is enabled the external pointers in the
+    // heap are guaranteed to be only 4 bytes aligned. So we need this option
+    // in order to ensure that LSAN will find all the external pointers.
+    // TODO(crbug.com/328552): see updates from 2019.
+    "use_unaligned=1 "
+#endif  // ARCH_CPU_64_BITS
+    ;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+  return kLsanDefaultOptions;
+}
+
+extern char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+  return kLSanDefaultSuppressions;
+}
+
+#endif  // LEAK_SANITIZER
+
+#if defined(UNDEFINED_SANITIZER)
+// Default options for UndefinedBehaviorSanitizer:
+//   print_stacktrace=1 - print the stacktrace when UBSan reports an error.
+const char kUbsanDefaultOptions[] =
+    "print_stacktrace=1 strip_path_prefix=/../../ "
+
+#if !defined(OS_APPLE)
+    "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/"
+    "bin/llvm-symbolizer"
+#endif
+    ;
+
+SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() {
+  return kUbsanDefaultOptions;
+}
+
+#endif  // UNDEFINED_SANITIZER
diff --git a/src/build/sanitizers/tsan_suppressions.cc b/src/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000..6704a34
--- /dev/null
+++ b/src/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,141 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+    // False positives in libdbus.so, libdconfsettings.so, libflashplayer.so,
+    // libgio.so, libglib.so and libgobject.so.
+    // Since we don't instrument them, we cannot reason about the
+    // synchronization in them.
+    "race:libdbus*.so\n"
+    "race:libdconfsettings*.so\n"
+    "race:libflashplayer.so\n"
+    "race:libgio*.so\n"
+    "race:libglib*.so\n"
+    "race:libgobject*.so\n"
+
+    // Intentional race in ToolsSanityTest.DataRace in base_unittests.
+    "race:base/tools_sanity_unittest.cc\n"
+
+    // Data race on WatchdogCounter [test-only].
+    "race:base/threading/watchdog_unittest.cc\n"
+
+    // Data race caused by swapping out the network change notifier with a mock
+    // [test-only]. http://crbug.com/927330.
+    "race:content/browser/net_info_browsertest.cc\n"
+
+    // http://crbug.com/120808
+    "race:base/threading/watchdog.cc\n"
+
+    // http://crbug.com/157586
+    "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+    // http://crbug.com/244856
+    "race:libpulsecommon*.so\n"
+
+    // http://crbug.com/258479
+    "race:g_trace_state\n"
+
+    // http://crbug.com/268924
+    "race:base::g_power_monitor\n"
+    "race:base::PowerMonitor::PowerMonitor\n"
+    "race:base::PowerMonitor::AddObserver\n"
+    "race:base::PowerMonitor::RemoveObserver\n"
+    "race:base::PowerMonitor::IsOnBatteryPower\n"
+
+    // http://crbug.com/272095
+    "race:base::g_top_manager\n"
+
+    // http://crbug.com/308590
+    "race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+    // http://crbug.com/476529
+    "deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+    // http://crbug.com/328826
+    "race:skia::(anonymous namespace)::g_pixel_geometry\n"
+
+    // http://crbug.com/328868
+    "race:PR_Lock\n"
+
+    // http://crbug.com/348984
+    "race:sctp_express_handle_sack\n"
+    "race:system_base_info\n"
+
+    // False positive in libc's tzset_internal, http://crbug.com/379738.
+    "race:tzset_internal\n"
+
+    // http://crbug.com/380554
+    "deadlock:g_type_add_interface_static\n"
+
+    // http:://crbug.com/386385
+    "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+    // http://crbug.com/397022
+    "deadlock:"
+    "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
+    "TestBody\n"
+
+    // http://crbug.com/415472
+    "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+    // Lock inversion in third party code, won't fix.
+    // https://crbug.com/455638
+    "deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+    // https://crbug.com/459429
+    "race:randomnessPid\n"
+
+    // http://crbug.com/582274
+    "race:usrsctp_close\n"
+
+    // http://crbug.com/633145
+    "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
+
+    // http://crbug.com/691029
+    "deadlock:libGLX.so*\n"
+
+    //  http://crbug.com/973947
+    "deadlock:libnvidia-glsi.so*\n"
+
+    // http://crbug.com/695929
+    "race:base::i18n::IsRTL\n"
+    "race:base::i18n::SetICUDefaultLocale\n"
+
+    // https://crbug.com/794920
+    "race:base::debug::SetCrashKeyString\n"
+    "race:crash_reporter::internal::CrashKeyStringImpl::Set\n"
+
+    // http://crbug.com/927330
+    "race:net::(anonymous namespace)::g_network_change_notifier\n"
+
+    // https://crbug.com/965722
+    "race:content::(anonymous namespace)::CorruptDBRequestHandler\n"
+
+    // https://crbug.com/977085
+    "race:vp3_update_thread_context\n"
+
+    // Benign data race in libjpeg-turbo, won't fix
+    // (https://github.com/libjpeg-turbo/libjpeg-turbo/issues/87).
+    // https://crbug.com/1056011
+    "race:third_party/libjpeg_turbo/simd/x86_64/jsimd.c\n"
+
+    // https://crbug.com/1158622
+    "race:absl::synchronization_internal::Waiter::Post\n"
+
+    // End of suppressions.
+    ;  // Please keep this semicolon.
+
+#endif  // THREAD_SANITIZER
diff --git a/src/build/shim_headers.gni b/src/build/shim_headers.gni
new file mode 100644
index 0000000..0900cba
--- /dev/null
+++ b/src/build/shim_headers.gni
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+template("shim_headers") {
+  action_name = "gen_${target_name}"
+  config_name = "${target_name}_config"
+  shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}"
+
+  config(config_name) {
+    include_dirs = [ shim_headers_path ]
+  }
+
+  action(action_name) {
+    script = "//tools/generate_shim_headers/generate_shim_headers.py"
+    args = [
+      "--generate",
+      "--headers-root",
+      rebase_path(invoker.root_path),
+      "--output-directory",
+      rebase_path(shim_headers_path),
+    ]
+    if (defined(invoker.prefix)) {
+      args += [
+        "--prefix",
+        invoker.prefix,
+      ]
+    }
+    args += invoker.headers
+
+    outputs = process_file_template(invoker.headers,
+                                    "${shim_headers_path}/{{source_file_part}}")
+  }
+
+  group(target_name) {
+    deps = [ ":${action_name}" ]
+    all_dependent_configs = [ ":${config_name}" ]
+  }
+}
diff --git a/src/build/skia_gold_common/.style.yapf b/src/build/skia_gold_common/.style.yapf
new file mode 100644
index 0000000..239e0a2
--- /dev/null
+++ b/src/build/skia_gold_common/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+
+column_limit = 80
+indent_width = 2
+
diff --git a/src/build/skia_gold_common/README.md b/src/build/skia_gold_common/README.md
new file mode 100644
index 0000000..ec72111
--- /dev/null
+++ b/src/build/skia_gold_common/README.md
@@ -0,0 +1,6 @@
+This directory contains Python code used for interacting with the Skia Gold
+image diff service. It is used by multiple test harnesses, e.g.
+`//build/android/test_runner.py` and
+`//content/test/gpu/run_gpu_integration_test.py`. A place such as
+`//testing/` would likely be a better location, but causes issues with
+V8 since it imports `//build/` but not all of Chromium src.
diff --git a/src/build/skia_gold_common/__init__.py b/src/build/skia_gold_common/__init__.py
new file mode 100644
index 0000000..ae1922e
--- /dev/null
+++ b/src/build/skia_gold_common/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/src/build/skia_gold_common/output_managerless_skia_gold_session.py b/src/build/skia_gold_common/output_managerless_skia_gold_session.py
new file mode 100644
index 0000000..59e662a
--- /dev/null
+++ b/src/build/skia_gold_common/output_managerless_skia_gold_session.py
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Implementation of skia_gold_session.py without output managers.
+
+Diff output is instead stored in a directory and pointed to with file:// URLs.
+"""
+
+import os
+import subprocess
+import tempfile
+
+from skia_gold_common import skia_gold_session
+
+
+class OutputManagerlessSkiaGoldSession(skia_gold_session.SkiaGoldSession):
+  def RunComparison(  # pylint: disable=too-many-arguments
+      self,
+      name,
+      png_file,
+      output_manager=True,
+      inexact_matching_args=None,
+      use_luci=True,
+      optional_keys=None):
+    # Passing True for the output manager is a bit of a hack, as we don't
+    # actually need an output manager and just need to get past the truthy
+    # check.
+    return super(OutputManagerlessSkiaGoldSession, self).RunComparison(
+        name=name,
+        png_file=png_file,
+        output_manager=output_manager,
+        inexact_matching_args=inexact_matching_args,
+        use_luci=use_luci,
+        optional_keys=optional_keys)
+
+  def _CreateDiffOutputDir(self):
+    # We intentionally don't clean this up and don't put it in self._working_dir
+    # since we need it to stick around after the test completes so the user
+    # can look at its contents.
+    return tempfile.mkdtemp()
+
+  def _StoreDiffLinks(self, image_name, _, output_dir):
+    results = self._comparison_results.setdefault(image_name,
+                                                  self.ComparisonResults())
+    # The directory should contain "input-<hash>.png", "closest-<hash>.png",
+    # and "diff.png".
+    for f in os.listdir(output_dir):
+      file_url = 'file://%s' % os.path.join(output_dir, f)
+      if f.startswith('input-'):
+        results.local_diff_given_image = file_url
+      elif f.startswith('closest-'):
+        results.local_diff_closest_image = file_url
+      elif f == 'diff.png':
+        results.local_diff_diff_image = file_url
+
+  @staticmethod
+  def _RunCmdForRcAndOutput(cmd):
+    try:
+      output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+      return 0, output
+    except subprocess.CalledProcessError as e:
+      return e.returncode, e.output
diff --git a/src/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py b/src/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py
new file mode 100755
index 0000000..0c6acd4
--- /dev/null
+++ b/src/build/skia_gold_common/output_managerless_skia_gold_session_unittest.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#pylint: disable=protected-access
+
+import os
+import tempfile
+import unittest
+
+import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+from skia_gold_common import output_managerless_skia_gold_session as omsgs
+from skia_gold_common import skia_gold_properties
+from skia_gold_common import unittest_utils
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+def assertArgWith(test, arg_list, arg, value):
+  i = arg_list.index(arg)
+  test.assertEqual(arg_list[i + 1], value)
+
+
+class GpuSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession,
+                     '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir,
+                                                     sgp,
+                                                     self._json_keys,
+                                                     'corpus',
+                                                     instance='instance')
+    session.Diff('name', 'png_file', None)
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('diff', call_args)
+    assertArgWith(self, call_args, '--corpus', 'corpus')
+    # TODO(skbug.com/10610): Remove the -public once we go back to using the
+    # non-public instance, or add a second test for testing that the correct
+    # instance is chosen if we decide to support both depending on what the
+    # user is authenticated for.
+    assertArgWith(self, call_args, '--instance', 'instance-public')
+    assertArgWith(self, call_args, '--input', 'png_file')
+    assertArgWith(self, call_args, '--test', 'name')
+    # TODO(skbug.com/10611): Re-add this assert and remove the check for the
+    # absence of the directory once we switch back to using the proper working
+    # directory.
+    # assertArgWith(self, call_args, '--work-dir', self._working_dir)
+    self.assertNotIn(self._working_dir, call_args)
+    i = call_args.index('--out-dir')
+    # The output directory should not be a subdirectory of the working
+    # directory.
+    self.assertNotIn(self._working_dir, call_args[i + 1])
+
+
+class OutputManagerlessSkiaGoldSessionStoreDiffLinksTest(
+    fake_filesystem_unittest.TestCase):
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  def test_outputManagerNotNeeded(self):
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp,
+                                                     self._json_keys, None,
+                                                     None)
+    input_filepath = os.path.join(self._working_dir, 'input-inputhash.png')
+    with open(input_filepath, 'w') as f:
+      f.write('')
+    closest_filepath = os.path.join(self._working_dir,
+                                    'closest-closesthash.png')
+    with open(closest_filepath, 'w') as f:
+      f.write('')
+    diff_filepath = os.path.join(self._working_dir, 'diff.png')
+    with open(diff_filepath, 'w') as f:
+      f.write('')
+
+    session._StoreDiffLinks('foo', None, self._working_dir)
+    self.assertEqual(session.GetGivenImageLink('foo'),
+                     'file://' + input_filepath)
+    self.assertEqual(session.GetClosestImageLink('foo'),
+                     'file://' + closest_filepath)
+    self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + diff_filepath)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/skia_gold_common/skia_gold_properties.py b/src/build/skia_gold_common/skia_gold_properties.py
new file mode 100644
index 0000000..ebf7098
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_properties.py
@@ -0,0 +1,141 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Class for storing Skia Gold comparison properties.
+
+Examples:
+* git revision being tested
+* Whether the test is being run locally or on a bot
+* What the continuous integration system is
+"""
+
+import logging
+import os
+import subprocess
+import sys
+
+
+class SkiaGoldProperties(object):
+  def __init__(self, args):
+    """Abstract class to validate and store properties related to Skia Gold.
+
+    Args:
+      args: The parsed arguments from an argparse.ArgumentParser.
+    """
+    self._git_revision = None
+    self._issue = None
+    self._patchset = None
+    self._job_id = None
+    self._local_pixel_tests = None
+    self._no_luci_auth = None
+    self._bypass_skia_gold_functionality = None
+    self._code_review_system = None
+    # Could in theory be configurable, but hard-coded for now since there's
+    # no plan to support anything else.
+    self._continuous_integration_system = 'buildbucket'
+
+    self._InitializeProperties(args)
+
+  def IsTryjobRun(self):
+    return self.issue is not None
+
+  @property
+  def continuous_integration_system(self):
+    return self._continuous_integration_system
+
+  @property
+  def code_review_system(self):
+    return self._code_review_system or 'gerrit'
+
+  @property
+  def git_revision(self):
+    return self._GetGitRevision()
+
+  @property
+  def issue(self):
+    return self._issue
+
+  @property
+  def job_id(self):
+    return self._job_id
+
+  @property
+  def local_pixel_tests(self):
+    return self._IsLocalRun()
+
+  @property
+  def no_luci_auth(self):
+    return self._no_luci_auth
+
+  @property
+  def patchset(self):
+    return self._patchset
+
+  @property
+  def bypass_skia_gold_functionality(self):
+    return self._bypass_skia_gold_functionality
+
+  @staticmethod
+  def _GetGitOriginMasterHeadSha1():
+    raise NotImplementedError()
+
+  def _GetGitRevision(self):
+    if not self._git_revision:
+      # Automated tests should always pass the revision, so assume we're on
+      # a workstation and try to get the local origin/master HEAD.
+      if not self._IsLocalRun():
+        raise RuntimeError(
+            '--git-revision was not passed when running on a bot')
+      revision = self._GetGitOriginMasterHeadSha1()
+      if not revision or len(revision) != 40:
+        raise RuntimeError(
+            '--git-revision not passed and unable to determine from git')
+      self._git_revision = revision
+    return self._git_revision
+
+  def _IsLocalRun(self):
+    if self._local_pixel_tests is None:
+      # Look for the presence of the SWARMING_SERVER environment variable as a
+      # heuristic to determine whether we're running on a workstation or a bot.
+      # This should always be set on swarming, but would be strange to be set on
+      # a workstation.
+      self._local_pixel_tests = 'SWARMING_SERVER' not in os.environ
+      if self._local_pixel_tests:
+        logging.warning(
+            'Automatically determined that test is running on a workstation')
+      else:
+        logging.warning(
+            'Automatically determined that test is running on a bot')
+    return self._local_pixel_tests
+
+  def _InitializeProperties(self, args):
+    if hasattr(args, 'local_pixel_tests'):
+      # If not set, will be automatically determined later if needed.
+      self._local_pixel_tests = args.local_pixel_tests
+
+    if hasattr(args, 'no_luci_auth'):
+      self._no_luci_auth = args.no_luci_auth
+
+    if hasattr(args, 'bypass_skia_gold_functionality'):
+      self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality
+
+    if hasattr(args, 'code_review_system'):
+      self._code_review_system = args.code_review_system
+
+    # Will be automatically determined later if needed.
+    if not hasattr(args, 'git_revision') or not args.git_revision:
+      return
+    self._git_revision = args.git_revision
+
+    # Only expected on tryjob runs.
+    if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue:
+      return
+    self._issue = args.gerrit_issue
+    if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset:
+      raise RuntimeError(
+          '--gerrit-issue passed, but --gerrit-patchset not passed.')
+    self._patchset = args.gerrit_patchset
+    if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id:
+      raise RuntimeError(
+          '--gerrit-issue passed, but --buildbucket-id not passed.')
+    self._job_id = args.buildbucket_id
diff --git a/src/build/skia_gold_common/skia_gold_properties_unittest.py b/src/build/skia_gold_common/skia_gold_properties_unittest.py
new file mode 100755
index 0000000..26444b7
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_properties_unittest.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#pylint: disable=protected-access
+
+import os
+import unittest
+
+import mock
+
+from skia_gold_common import skia_gold_properties
+from skia_gold_common import unittest_utils
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+class SkiaGoldPropertiesInitializationTest(unittest.TestCase):
+  """Tests that SkiaGoldProperties initializes (or doesn't) when expected."""
+
+  def verifySkiaGoldProperties(self, instance, expected):
+    self.assertEqual(instance._local_pixel_tests,
+                     expected.get('local_pixel_tests'))
+    self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth'))
+    self.assertEqual(instance._code_review_system,
+                     expected.get('code_review_system'))
+    self.assertEqual(instance._git_revision, expected.get('git_revision'))
+    self.assertEqual(instance._issue, expected.get('gerrit_issue'))
+    self.assertEqual(instance._patchset, expected.get('gerrit_patchset'))
+    self.assertEqual(instance._job_id, expected.get('buildbucket_id'))
+    self.assertEqual(instance._bypass_skia_gold_functionality,
+                     expected.get('bypass_skia_gold_functionality'))
+
+  def test_initializeSkiaGoldAttributes_unsetLocal(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {})
+
+  def test_initializeSkiaGoldAttributes_explicitLocal(self):
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True})
+
+  def test_initializeSkiaGoldAttributes_explicitNonLocal(self):
+    args = createSkiaGoldArgs(local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False})
+
+  def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self):
+    args = createSkiaGoldArgs(no_luci_auth=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True})
+
+  def test_initializeSkiaGoldAttributes_explicitCrs(self):
+    args = createSkiaGoldArgs(code_review_system='foo')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'code_review_system': 'foo'})
+
+  def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self):
+    args = createSkiaGoldArgs(bypass_skia_gold_functionality=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True})
+
+  def test_initializeSkiaGoldAttributes_explicitGitRevision(self):
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'})
+
+  def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision(self):
+    args = createSkiaGoldArgs(gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(sgp, {})
+
+  def test_initializeSkiaGoldAttributes_tryjobArgs(self):
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.verifySkiaGoldProperties(
+        sgp, {
+            'git_revision': 'a',
+            'gerrit_issue': 1,
+            'gerrit_patchset': 2,
+            'buildbucket_id': 3
+        })
+
+  def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self):
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              buildbucket_id=3)
+    with self.assertRaises(RuntimeError):
+      skia_gold_properties.SkiaGoldProperties(args)
+
+  def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self):
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2)
+    with self.assertRaises(RuntimeError):
+      skia_gold_properties.SkiaGoldProperties(args)
+
+
+class SkiaGoldPropertiesCalculationTest(unittest.TestCase):
+  """Tests that SkiaGoldProperties properly calculates certain properties."""
+
+  def testLocalPixelTests_determineTrue(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with mock.patch.dict(os.environ, {}, clear=True):
+      self.assertTrue(sgp.local_pixel_tests)
+
+  def testLocalPixelTests_determineFalse(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True):
+      self.assertFalse(sgp.local_pixel_tests)
+
+  def testIsTryjobRun_noIssue(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.assertFalse(sgp.IsTryjobRun())
+
+  def testIsTryjobRun_issue(self):
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.assertTrue(sgp.IsTryjobRun())
+
+  def testGetGitRevision_revisionSet(self):
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self.assertEqual(sgp.git_revision, 'a')
+
+  def testGetGitRevision_findValidRevision(self):
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with mock.patch.object(skia_gold_properties.SkiaGoldProperties,
+                           '_GetGitOriginMasterHeadSha1') as patched_head:
+      expected = 'a' * 40
+      patched_head.return_value = expected
+      self.assertEqual(sgp.git_revision, expected)
+      # Should be cached.
+      self.assertEqual(sgp._git_revision, expected)
+
+  def testGetGitRevision_noExplicitOnBot(self):
+    args = createSkiaGoldArgs(local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with self.assertRaises(RuntimeError):
+      _ = sgp.git_revision
+
+  def testGetGitRevision_findEmptyRevision(self):
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with mock.patch.object(skia_gold_properties.SkiaGoldProperties,
+                           '_GetGitOriginMasterHeadSha1') as patched_head:
+      patched_head.return_value = ''
+      with self.assertRaises(RuntimeError):
+        _ = sgp.git_revision
+
+  def testGetGitRevision_findMalformedRevision(self):
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    with mock.patch.object(skia_gold_properties.SkiaGoldProperties,
+                           '_GetGitOriginMasterHeadSha1') as patched_head:
+      patched_head.return_value = 'a' * 39
+      with self.assertRaises(RuntimeError):
+        _ = sgp.git_revision
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/skia_gold_common/skia_gold_session.py b/src/build/skia_gold_common/skia_gold_session.py
new file mode 100644
index 0000000..cb737ec
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_session.py
@@ -0,0 +1,540 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Class for interacting with the Skia Gold image diffing service."""
+
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+CHROMIUM_SRC = os.path.realpath(
+    os.path.join(os.path.dirname(__file__), '..', '..'))
+
+GOLDCTL_BINARY = os.path.join(CHROMIUM_SRC, 'tools', 'skia_goldctl')
+if sys.platform == 'win32':
+  GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'win', 'goldctl') + '.exe'
+elif sys.platform == 'darwin':
+  GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac', 'goldctl')
+else:
+  GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'linux', 'goldctl')
+
+
+class SkiaGoldSession(object):
+  class StatusCodes(object):
+    """Status codes for RunComparison."""
+    SUCCESS = 0
+    AUTH_FAILURE = 1
+    INIT_FAILURE = 2
+    COMPARISON_FAILURE_REMOTE = 3
+    COMPARISON_FAILURE_LOCAL = 4
+    LOCAL_DIFF_FAILURE = 5
+    NO_OUTPUT_MANAGER = 6
+
+  class ComparisonResults(object):
+    """Struct-like object for storing results of an image comparison."""
+
+    def __init__(self):
+      self.public_triage_link = None
+      self.internal_triage_link = None
+      self.triage_link_omission_reason = None
+      self.local_diff_given_image = None
+      self.local_diff_closest_image = None
+      self.local_diff_diff_image = None
+
+  def __init__(self,
+               working_dir,
+               gold_properties,
+               keys_file,
+               corpus,
+               instance,
+               bucket=None):
+    """Abstract class to handle all aspects of image comparison via Skia Gold.
+
+    A single SkiaGoldSession is valid for a single instance/corpus/keys_file
+    combination.
+
+    Args:
+      working_dir: The directory to store config files, etc.
+      gold_properties: A skia_gold_properties.SkiaGoldProperties instance for
+          the current test run.
+      keys_file: A path to a JSON file containing various comparison config data
+          such as corpus and debug information like the hardware/software
+          configuration the images will be produced on.
+      corpus: The corpus that images that will be compared belong to.
+      instance: The name of the Skia Gold instance to interact with.
+      bucket: Overrides the formulaic Google Storage bucket name generated by
+          goldctl
+    """
+    self._working_dir = working_dir
+    self._gold_properties = gold_properties
+    self._corpus = corpus
+    self._instance = instance
+    self._bucket = bucket
+    self._triage_link_file = tempfile.NamedTemporaryFile(suffix='.txt',
+                                                         dir=working_dir,
+                                                         delete=False).name
+    # A map of image name (string) to ComparisonResults for that image.
+    self._comparison_results = {}
+    self._authenticated = False
+    self._initialized = False
+
+    # Copy the given keys file to the working directory in case it ends up
+    # getting deleted before we try to use it.
+    self._keys_file = os.path.join(working_dir, 'gold_keys.json')
+    shutil.copy(keys_file, self._keys_file)
+
+  def RunComparison(self,
+                    name,
+                    png_file,
+                    output_manager,
+                    inexact_matching_args=None,
+                    use_luci=True,
+                    optional_keys=None):
+    """Helper method to run all steps to compare a produced image.
+
+    Handles authentication, itnitialization, comparison, and, if necessary,
+    local diffing.
+
+    Args:
+      name: The name of the image being compared.
+      png_file: A path to a PNG file containing the image to be compared.
+      output_manager: An output manager to use to store diff links. The
+          argument's type depends on what type a subclasses' _StoreDiffLinks
+          implementation expects. Can be None even if _StoreDiffLinks expects
+          a valid input, but will fail if it ever actually needs to be used.
+      inexact_matching_args: A list of strings containing extra command line
+          arguments to pass to Gold for inexact matching. Can be omitted to use
+          exact matching.
+      use_luci: If true, authentication will use the service account provided by
+          the LUCI context. If false, will attempt to use whatever is set up in
+          gsutil, which is only supported for local runs.
+      optional_keys: A dict containing optional key/value pairs to pass to Gold
+          for this comparison. Optional keys are keys unrelated to the
+          configuration the image was produced on, e.g. a comment or whether
+          Gold should treat the image as ignored.
+
+    Returns:
+      A tuple (status, error). |status| is a value from
+      SkiaGoldSession.StatusCodes signifying the result of the comparison.
+      |error| is an error message describing the status if not successful.
+    """
+    auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci)
+    if auth_rc:
+      return self.StatusCodes.AUTH_FAILURE, auth_stdout
+
+    init_rc, init_stdout = self.Initialize()
+    if init_rc:
+      return self.StatusCodes.INIT_FAILURE, init_stdout
+
+    compare_rc, compare_stdout = self.Compare(
+        name=name,
+        png_file=png_file,
+        inexact_matching_args=inexact_matching_args,
+        optional_keys=optional_keys)
+    if not compare_rc:
+      return self.StatusCodes.SUCCESS, None
+
+    logging.error('Gold comparison failed: %s', compare_stdout)
+    if not self._gold_properties.local_pixel_tests:
+      return self.StatusCodes.COMPARISON_FAILURE_REMOTE, compare_stdout
+
+    if not output_manager:
+      return (self.StatusCodes.NO_OUTPUT_MANAGER,
+              'No output manager for local diff images')
+
+    diff_rc, diff_stdout = self.Diff(name=name,
+                                     png_file=png_file,
+                                     output_manager=output_manager)
+    if diff_rc:
+      return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout
+    return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout
+
+  def Authenticate(self, use_luci=True):
+    """Authenticates with Skia Gold for this session.
+
+    Args:
+      use_luci: If true, authentication will use the service account provided
+          by the LUCI context. If false, will attempt to use whatever is set up
+          in gsutil, which is only supported for local runs.
+
+    Returns:
+      A tuple (return_code, output). |return_code| is the return code of the
+      authentication process. |output| is the stdout + stderr of the
+      authentication process.
+    """
+    if self._authenticated:
+      return 0, None
+    if self._gold_properties.bypass_skia_gold_functionality:
+      logging.warning('Not actually authenticating with Gold due to '
+                      '--bypass-skia-gold-functionality being present.')
+      return 0, None
+
+    auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir]
+    if use_luci:
+      auth_cmd.append('--luci')
+    elif not self._gold_properties.local_pixel_tests:
+      raise RuntimeError(
+          'Cannot authenticate to Skia Gold with use_luci=False unless running '
+          'local pixel tests')
+
+    rc, stdout = self._RunCmdForRcAndOutput(auth_cmd)
+    if rc == 0:
+      self._authenticated = True
+    return rc, stdout
+
+  def Initialize(self):
+    """Initializes the working directory if necessary.
+
+    This can technically be skipped if the same information is passed to the
+    command used for image comparison, but that is less efficient under the
+    hood. Doing it that way effectively requires an initialization for every
+    comparison (~250 ms) instead of once at the beginning.
+
+    Returns:
+      A tuple (return_code, output). |return_code| is the return code of the
+      initialization process. |output| is the stdout + stderr of the
+      initialization process.
+    """
+    if self._initialized:
+      return 0, None
+    if self._gold_properties.bypass_skia_gold_functionality:
+      logging.warning('Not actually initializing Gold due to '
+                      '--bypass-skia-gold-functionality being present.')
+      return 0, None
+
+    init_cmd = [
+        GOLDCTL_BINARY,
+        'imgtest',
+        'init',
+        '--passfail',
+        '--instance',
+        self._instance,
+        '--corpus',
+        self._corpus,
+        '--keys-file',
+        self._keys_file,
+        '--work-dir',
+        self._working_dir,
+        '--failure-file',
+        self._triage_link_file,
+        '--commit',
+        self._gold_properties.git_revision,
+    ]
+    if self._bucket:
+      init_cmd.extend(['--bucket', self._bucket])
+    if self._gold_properties.IsTryjobRun():
+      init_cmd.extend([
+          '--issue',
+          str(self._gold_properties.issue),
+          '--patchset',
+          str(self._gold_properties.patchset),
+          '--jobid',
+          str(self._gold_properties.job_id),
+          '--crs',
+          str(self._gold_properties.code_review_system),
+          '--cis',
+          str(self._gold_properties.continuous_integration_system),
+      ])
+
+    rc, stdout = self._RunCmdForRcAndOutput(init_cmd)
+    if rc == 0:
+      self._initialized = True
+    return rc, stdout
+
+  def Compare(self,
+              name,
+              png_file,
+              inexact_matching_args=None,
+              optional_keys=None):
+    """Compares the given image to images known to Gold.
+
+    Triage links can later be retrieved using GetTriageLinks().
+
+    Args:
+      name: The name of the image being compared.
+      png_file: A path to a PNG file containing the image to be compared.
+      inexact_matching_args: A list of strings containing extra command line
+          arguments to pass to Gold for inexact matching. Can be omitted to use
+          exact matching.
+      optional_keys: A dict containing optional key/value pairs to pass to Gold
+          for this comparison. Optional keys are keys unrelated to the
+          configuration the image was produced on, e.g. a comment or whether
+          Gold should treat the image as ignored.
+
+    Returns:
+      A tuple (return_code, output). |return_code| is the return code of the
+      comparison process. |output| is the stdout + stderr of the comparison
+      process.
+    """
+    if self._gold_properties.bypass_skia_gold_functionality:
+      logging.warning('Not actually comparing with Gold due to '
+                      '--bypass-skia-gold-functionality being present.')
+      return 0, None
+
+    compare_cmd = [
+        GOLDCTL_BINARY,
+        'imgtest',
+        'add',
+        '--test-name',
+        name,
+        '--png-file',
+        png_file,
+        '--work-dir',
+        self._working_dir,
+    ]
+    if self._gold_properties.local_pixel_tests:
+      compare_cmd.append('--dryrun')
+    if inexact_matching_args:
+      logging.info('Using inexact matching arguments for image %s: %s', name,
+                   inexact_matching_args)
+      compare_cmd.extend(inexact_matching_args)
+
+    optional_keys = optional_keys or {}
+    for k, v in optional_keys.iteritems():
+      compare_cmd.extend([
+          '--add-test-optional-key',
+          '%s:%s' % (k, v),
+      ])
+
+    self._ClearTriageLinkFile()
+    rc, stdout = self._RunCmdForRcAndOutput(compare_cmd)
+
+    self._comparison_results[name] = self.ComparisonResults()
+    if rc == 0:
+      self._comparison_results[name].triage_link_omission_reason = (
+          'Comparison succeeded, no triage link')
+    elif self._gold_properties.IsTryjobRun():
+      cl_triage_link = ('https://{instance}-gold.skia.org/cl/{crs}/{issue}')
+      cl_triage_link = cl_triage_link.format(
+          instance=self._instance,
+          crs=self._gold_properties.code_review_system,
+          issue=self._gold_properties.issue)
+      self._comparison_results[name].internal_triage_link = cl_triage_link
+      self._comparison_results[name].public_triage_link =\
+          self._GeneratePublicTriageLink(cl_triage_link)
+    else:
+      try:
+        with open(self._triage_link_file) as tlf:
+          triage_link = tlf.read().strip()
+        if not triage_link:
+          self._comparison_results[name].triage_link_omission_reason = (
+              'Gold did not provide a triage link. This is likely a bug on '
+              "Gold's end.")
+          self._comparison_results[name].internal_triage_link = None
+          self._comparison_results[name].public_triage_link = None
+        else:
+          self._comparison_results[name].internal_triage_link = triage_link
+          self._comparison_results[name].public_triage_link =\
+              self._GeneratePublicTriageLink(triage_link)
+      except IOError:
+        self._comparison_results[name].triage_link_omission_reason = (
+            'Failed to read triage link from file')
+    return rc, stdout
+
+  def Diff(self, name, png_file, output_manager):
+    """Performs a local image diff against the closest known positive in Gold.
+
+    This is used for running tests on a workstation, where uploading data to
+    Gold for ingestion is not allowed, and thus the web UI is not available.
+
+    Image links can later be retrieved using Get*ImageLink().
+
+    Args:
+      name: The name of the image being compared.
+      png_file: The path to a PNG file containing the image to be diffed.
+      output_manager: An output manager to use to store diff links. The
+          argument's type depends on what type a subclasses' _StoreDiffLinks
+          implementation expects.
+
+    Returns:
+      A tuple (return_code, output). |return_code| is the return code of the
+      diff process. |output| is the stdout + stderr of the diff process.
+    """
+    # Instead of returning that everything is okay and putting in dummy links,
+    # just fail since this should only be called when running locally and
+    # --bypass-skia-gold-functionality is only meant for use on the bots.
+    if self._gold_properties.bypass_skia_gold_functionality:
+      raise RuntimeError(
+          '--bypass-skia-gold-functionality is not supported when running '
+          'tests locally.')
+
+    output_dir = self._CreateDiffOutputDir()
+    # TODO(skbug.com/10611): Remove this temporary work dir and instead just use
+    # self._working_dir once `goldctl diff` stops clobbering the auth files in
+    # the provided work directory.
+    temp_work_dir = tempfile.mkdtemp()
+    # shutil.copytree() fails if the destination already exists, so use a
+    # subdirectory of the temporary directory.
+    temp_work_dir = os.path.join(temp_work_dir, 'diff_work_dir')
+    try:
+      shutil.copytree(self._working_dir, temp_work_dir)
+      diff_cmd = [
+          GOLDCTL_BINARY,
+          'diff',
+          '--corpus',
+          self._corpus,
+          '--instance',
+          self._GetDiffGoldInstance(),
+          '--input',
+          png_file,
+          '--test',
+          name,
+          '--work-dir',
+          temp_work_dir,
+          '--out-dir',
+          output_dir,
+      ]
+      rc, stdout = self._RunCmdForRcAndOutput(diff_cmd)
+      self._StoreDiffLinks(name, output_manager, output_dir)
+      return rc, stdout
+    finally:
+      shutil.rmtree(os.path.realpath(os.path.join(temp_work_dir, '..')))
+
+  def GetTriageLinks(self, name):
+    """Gets the triage links for the given image.
+
+    Args:
+      name: The name of the image to retrieve the triage link for.
+
+    Returns:
+      A tuple (public, internal). |public| is a string containing the triage
+      link for the public Gold instance if it is available, or None if it is not
+      available for some reason. |internal| is the same as |public|, but
+      containing a link to the internal Gold instance. The reason for links not
+      being available can be retrieved using GetTriageLinkOmissionReason.
+    """
+    comparison_results = self._comparison_results.get(name,
+                                                      self.ComparisonResults())
+    return (comparison_results.public_triage_link,
+            comparison_results.internal_triage_link)
+
+  def GetTriageLinkOmissionReason(self, name):
+    """Gets the reason why a triage link is not available for an image.
+
+    Args:
+      name: The name of the image whose triage link does not exist.
+
+    Returns:
+      A string containing the reason why a triage link is not available.
+    """
+    if name not in self._comparison_results:
+      return 'No image comparison performed for %s' % name
+    results = self._comparison_results[name]
+    # This method should not be called if there is a valid triage link.
+    assert results.public_triage_link is None
+    assert results.internal_triage_link is None
+    if results.triage_link_omission_reason:
+      return results.triage_link_omission_reason
+    if results.local_diff_given_image:
+      return 'Gold only used to do a local image diff'
+    raise RuntimeError(
+        'Somehow have a ComparisonResults instance for %s that should not '
+        'exist' % name)
+
+  def GetGivenImageLink(self, name):
+    """Gets the link to the given image used for local diffing.
+
+    Args:
+      name: The name of the image that was diffed.
+
+    Returns:
+      A string containing the link to where the image is saved, or None if it
+      does not exist.
+    """
+    assert name in self._comparison_results
+    return self._comparison_results[name].local_diff_given_image
+
+  def GetClosestImageLink(self, name):
+    """Gets the link to the closest known image used for local diffing.
+
+    Args:
+      name: The name of the image that was diffed.
+
+    Returns:
+      A string containing the link to where the image is saved, or None if it
+      does not exist.
+    """
+    assert name in self._comparison_results
+    return self._comparison_results[name].local_diff_closest_image
+
+  def GetDiffImageLink(self, name):
+    """Gets the link to the diff between the given and closest images.
+
+    Args:
+      name: The name of the image that was diffed.
+
+    Returns:
+      A string containing the link to where the image is saved, or None if it
+      does not exist.
+    """
+    assert name in self._comparison_results
+    return self._comparison_results[name].local_diff_diff_image
+
+  def _GeneratePublicTriageLink(self, internal_link):
+    """Generates a public triage link given an internal one.
+
+    Args:
+      internal_link: A string containing a triage link pointing to an internal
+          Gold instance.
+
+    Returns:
+      A string containing a triage link pointing to the public mirror of the
+      link pointed to by |internal_link|.
+    """
+    return internal_link.replace('%s-gold' % self._instance,
+                                 '%s-public-gold' % self._instance)
+
+  def _ClearTriageLinkFile(self):
+    """Clears the contents of the triage link file.
+
+    This should be done before every comparison since goldctl appends to the
+    file instead of overwriting its contents, which results in multiple triage
+    links getting concatenated together if there are multiple failures.
+    """
+    open(self._triage_link_file, 'w').close()
+
+  def _CreateDiffOutputDir(self):
+    return tempfile.mkdtemp(dir=self._working_dir)
+
+  def _GetDiffGoldInstance(self):
+    """Gets the Skia Gold instance to use for the Diff step.
+
+    This can differ based on how a particular instance is set up, mainly
+    depending on whether it is set up for internal results or not.
+    """
+    # TODO(skbug.com/10610): Decide whether to use the public or
+    # non-public instance once authentication is fixed for the non-public
+    # instance.
+    return str(self._instance) + '-public'
+
+  def _StoreDiffLinks(self, image_name, output_manager, output_dir):
+    """Stores the local diff files as links.
+
+    The ComparisonResults entry for |image_name| should have its *_image fields
+    filled after this unless corresponding images were not found on disk.
+
+    Args:
+      image_name: A string containing the name of the image that was diffed.
+      output_manager: An output manager used used to surface links to users,
+          if necessary. The expected argument type depends on each subclasses'
+          implementation of this method.
+      output_dir: A string containing the path to the directory where diff
+          output image files where saved.
+    """
+    raise NotImplementedError()
+
+  @staticmethod
+  def _RunCmdForRcAndOutput(cmd):
+    """Runs |cmd| and returns its returncode and output.
+
+    Args:
+      cmd: A list containing the command line to run.
+
+    Returns:
+      A tuple (rc, output), where, |rc| is the returncode of the command and
+      |output| is the stdout + stderr of the command.
+    """
+    raise NotImplementedError()
diff --git a/src/build/skia_gold_common/skia_gold_session_manager.py b/src/build/skia_gold_common/skia_gold_session_manager.py
new file mode 100644
index 0000000..d4166e1
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_session_manager.py
@@ -0,0 +1,121 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Class for managing multiple SkiaGoldSessions."""
+
+import json
+import tempfile
+
+
+class SkiaGoldSessionManager(object):
+  def __init__(self, working_dir, gold_properties):
+    """Abstract class to manage one or more skia_gold_session.SkiaGoldSessions.
+
+    A separate session is required for each instance/corpus/keys_file
+    combination, so this class will lazily create them as necessary.
+
+    Args:
+      working_dir: The working directory under which each individual
+          SkiaGoldSessions' working directory will be created.
+      gold_properties: A SkiaGoldProperties instance that will be used to create
+          any SkiaGoldSessions.
+    """
+    self._working_dir = working_dir
+    self._gold_properties = gold_properties
+    self._sessions = {}
+
+  def GetSkiaGoldSession(self,
+                         keys_input,
+                         corpus=None,
+                         instance=None,
+                         bucket=None):
+    """Gets a SkiaGoldSession for the given arguments.
+
+    Lazily creates one if necessary.
+
+    Args:
+      keys_input: A way of retrieving various comparison config data such as
+          corpus and debug information like the hardware/software configuration
+          the image was produced on. Can be either a dict or a filepath to a
+          file containing JSON to read.
+      corpus: A string containing the corpus the session is for. If None, the
+          corpus will be determined using available information.
+      instance: The name of the Skia Gold instance to interact with. If None,
+          will use whatever default the subclass sets.
+      bucket: Overrides the formulaic Google Storage bucket name generated by
+          goldctl
+    """
+    instance = instance or self._GetDefaultInstance()
+    keys_dict = _GetKeysAsDict(keys_input)
+    keys_string = json.dumps(keys_dict, sort_keys=True)
+    if corpus is None:
+      corpus = keys_dict.get('source_type', instance)
+    # Use the string representation of the keys JSON as a proxy for a hash since
+    # dicts themselves are not hashable.
+    session = self._sessions.setdefault(instance,
+                                        {}).setdefault(corpus, {}).setdefault(
+                                            keys_string, None)
+    if not session:
+      working_dir = tempfile.mkdtemp(dir=self._working_dir)
+      keys_file = _GetKeysAsJson(keys_input, working_dir)
+      session = self.GetSessionClass()(working_dir, self._gold_properties,
+                                       keys_file, corpus, instance, bucket)
+      self._sessions[instance][corpus][keys_string] = session
+    return session
+
+  @staticmethod
+  def _GetDefaultInstance():
+    """Gets the default Skia Gold instance.
+
+    Returns:
+      A string containing the default instance.
+    """
+    return 'chrome'
+
+  @staticmethod
+  def GetSessionClass():
+    """Gets the SkiaGoldSession class to use for session creation.
+
+    Returns:
+      A reference to a SkiaGoldSession class.
+    """
+    raise NotImplementedError
+
+
+def _GetKeysAsDict(keys_input):
+  """Converts |keys_input| into a dictionary.
+
+  Args:
+    keys_input: A dictionary or a string pointing to a JSON file. The contents
+        of either should be Skia Gold config data.
+
+  Returns:
+    A dictionary containing the Skia Gold config data.
+  """
+  if isinstance(keys_input, dict):
+    return keys_input
+  assert isinstance(keys_input, str)
+  with open(keys_input) as f:
+    return json.load(f)
+
+
+def _GetKeysAsJson(keys_input, session_work_dir):
+  """Converts |keys_input| into a JSON file on disk.
+
+  Args:
+    keys_input: A dictionary or a string pointing to a JSON file. The contents
+        of either should be Skia Gold config data.
+
+  Returns:
+    A string containing a filepath to a JSON file with containing |keys_input|'s
+    data.
+  """
+  if isinstance(keys_input, str):
+    return keys_input
+  assert isinstance(keys_input, dict)
+  keys_file = tempfile.NamedTemporaryFile(suffix='.json',
+                                          dir=session_work_dir,
+                                          delete=False).name
+  with open(keys_file, 'w') as f:
+    json.dump(keys_input, f)
+  return keys_file
diff --git a/src/build/skia_gold_common/skia_gold_session_manager_unittest.py b/src/build/skia_gold_common/skia_gold_session_manager_unittest.py
new file mode 100755
index 0000000..73c21e2
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_session_manager_unittest.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#pylint: disable=protected-access
+
+import json
+import os
+import tempfile
+import unittest
+
+import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+from skia_gold_common import skia_gold_properties
+from skia_gold_common import skia_gold_session
+from skia_gold_common import skia_gold_session_manager
+from skia_gold_common import unittest_utils
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+class SkiaGoldSessionManagerGetSessionTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._patcher = mock.patch.object(
+        skia_gold_session_manager.SkiaGoldSessionManager, 'GetSessionClass')
+    self._session_class_mock = self._patcher.start()
+    self._session_class_mock.return_value = skia_gold_session.SkiaGoldSession
+    self.addCleanup(self._patcher.stop)
+
+  def test_ArgsForwardedToSession(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance')
+    self.assertTrue(session._keys_file.startswith(self._working_dir))
+    self.assertEqual(session._corpus, 'corpus')
+    self.assertEqual(session._instance, 'instance')
+    # Make sure the session's working directory is a subdirectory of the
+    # manager's working directory.
+    self.assertEqual(os.path.dirname(session._working_dir), self._working_dir)
+
+  def test_corpusFromJson(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session = sgsm.GetSkiaGoldSession({'source_type': 'foobar'}, None,
+                                      'instance')
+    self.assertTrue(session._keys_file.startswith(self._working_dir))
+    self.assertEqual(session._corpus, 'foobar')
+    self.assertEqual(session._instance, 'instance')
+
+  def test_corpusDefaultsToInstance(self):
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session = sgsm.GetSkiaGoldSession({}, None, 'instance')
+    self.assertTrue(session._keys_file.startswith(self._working_dir))
+    self.assertEqual(session._corpus, 'instance')
+    self.assertEqual(session._instance, 'instance')
+
+  @mock.patch.object(skia_gold_session_manager.SkiaGoldSessionManager,
+                     '_GetDefaultInstance')
+  def test_getDefaultInstance(self, default_instance_mock):
+    default_instance_mock.return_value = 'default'
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session = sgsm.GetSkiaGoldSession({}, None, None)
+    self.assertTrue(session._keys_file.startswith(self._working_dir))
+    self.assertEqual(session._corpus, 'default')
+    self.assertEqual(session._instance, 'default')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__')
+  def test_matchingSessionReused(self, session_mock):
+    session_mock.return_value = None
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance')
+    session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance')
+    self.assertEqual(session1, session2)
+    # For some reason, session_mock.assert_called_once() always passes,
+    # so check the call count directly.
+    self.assertEqual(session_mock.call_count, 1)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__')
+  def test_separateSessionsFromKeys(self, session_mock):
+    session_mock.return_value = None
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance')
+    session2 = sgsm.GetSkiaGoldSession({'something_different': 1}, 'corpus',
+                                       'instance')
+    self.assertNotEqual(session1, session2)
+    self.assertEqual(session_mock.call_count, 2)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__')
+  def test_separateSessionsFromCorpus(self, session_mock):
+    session_mock.return_value = None
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session1 = sgsm.GetSkiaGoldSession({}, 'corpus1', 'instance')
+    session2 = sgsm.GetSkiaGoldSession({}, 'corpus2', 'instance')
+    self.assertNotEqual(session1, session2)
+    self.assertEqual(session_mock.call_count, 2)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__')
+  def test_separateSessionsFromInstance(self, session_mock):
+    session_mock.return_value = None
+    args = createSkiaGoldArgs()
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    self._working_dir = tempfile.mkdtemp()
+    sgsm = skia_gold_session_manager.SkiaGoldSessionManager(
+        self._working_dir, sgp)
+    session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance1')
+    session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance2')
+    self.assertNotEqual(session1, session2)
+    self.assertEqual(session_mock.call_count, 2)
+
+
+class SkiaGoldSessionManagerKeyConversionTest(fake_filesystem_unittest.TestCase
+                                              ):
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+
+  def test_getKeysAsDict(self):
+    keys_dict = {'foo': 'bar'}
+    keys_file_contents = {'bar': 'baz'}
+    keys_file = tempfile.NamedTemporaryFile(delete=False).name
+    with open(keys_file, 'w') as f:
+      json.dump(keys_file_contents, f)
+
+    self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_dict),
+                     keys_dict)
+    self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_file),
+                     keys_file_contents)
+    with self.assertRaises(AssertionError):
+      skia_gold_session_manager._GetKeysAsDict(1)
+
+  def test_getKeysAsJson(self):
+    keys_dict = {'foo': 'bar'}
+    keys_file_contents = {'bar': 'baz'}
+    keys_file = tempfile.NamedTemporaryFile(delete=False).name
+    with open(keys_file, 'w') as f:
+      json.dump(keys_file_contents, f)
+
+    self.assertEqual(skia_gold_session_manager._GetKeysAsJson(keys_file, None),
+                     keys_file)
+    keys_dict_as_json = skia_gold_session_manager._GetKeysAsJson(
+        keys_dict, self._working_dir)
+    self.assertTrue(keys_dict_as_json.startswith(self._working_dir))
+    with open(keys_dict_as_json) as f:
+      self.assertEqual(json.load(f), keys_dict)
+    with self.assertRaises(AssertionError):
+      skia_gold_session_manager._GetKeysAsJson(1, None)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/skia_gold_common/skia_gold_session_unittest.py b/src/build/skia_gold_common/skia_gold_session_unittest.py
new file mode 100755
index 0000000..76c0799
--- /dev/null
+++ b/src/build/skia_gold_common/skia_gold_session_unittest.py
@@ -0,0 +1,787 @@
+#!/usr/bin/env vpython
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#pylint: disable=protected-access
+
+import json
+import os
+import tempfile
+import unittest
+
+import mock
+
+from pyfakefs import fake_filesystem_unittest
+
+from skia_gold_common import skia_gold_properties
+from skia_gold_common import skia_gold_session
+from skia_gold_common import unittest_utils
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+def assertArgWith(test, arg_list, arg, value):
+  i = arg_list.index(arg)
+  test.assertEqual(arg_list[i + 1], value)
+
+
+class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.RunComparison."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+    with open(self._json_keys, 'w') as f:
+      json.dump({}, f)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_comparisonSuccess(self, auth_mock, init_mock, compare_mock,
+                             diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (0, None)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+                                                self._json_keys, None, None)
+    status, _ = session.RunComparison(None, None, None)
+    self.assertEqual(status,
+                     skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS)
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 0)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_authFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
+    auth_mock.return_value = (1, 'Auth failed')
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None, None)
+    self.assertEqual(status,
+                     skia_gold_session.SkiaGoldSession.StatusCodes.AUTH_FAILURE)
+    self.assertEqual(error, 'Auth failed')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 0)
+    self.assertEqual(compare_mock.call_count, 0)
+    self.assertEqual(diff_mock.call_count, 0)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_initFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (1, 'Init failed')
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None, None)
+    self.assertEqual(status,
+                     skia_gold_session.SkiaGoldSession.StatusCodes.INIT_FAILURE)
+    self.assertEqual(error, 'Init failed')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 0)
+    self.assertEqual(diff_mock.call_count, 0)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_compareFailureRemote(self, auth_mock, init_mock, compare_mock,
+                                diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (1, 'Compare failed')
+    args = createSkiaGoldArgs(local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None, None)
+    self.assertEqual(
+        status,
+        skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE)
+    self.assertEqual(error, 'Compare failed')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 0)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_compareFailureLocal(self, auth_mock, init_mock, compare_mock,
+                               diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (1, 'Compare failed')
+    diff_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None,
+                                          'Definitely an output manager')
+    self.assertEqual(
+        status,
+        skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL)
+    self.assertEqual(error, 'Compare failed')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 1)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_compareInexactMatching(self, auth_mock, init_mock, compare_mock,
+                                  diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (0, None)
+    diff_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, _ = session.RunComparison(None,
+                                      None,
+                                      None,
+                                      inexact_matching_args=['--inexact'])
+    self.assertEqual(status,
+                     skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS)
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 0)
+    compare_mock.assert_called_with(name=None,
+                                    png_file=mock.ANY,
+                                    inexact_matching_args=['--inexact'],
+                                    optional_keys=None)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_compareOptionalKeys(self, auth_mock, init_mock, compare_mock,
+                               diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (0, None)
+    diff_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, _ = session.RunComparison(None,
+                                      None,
+                                      None,
+                                      optional_keys={'foo': 'bar'})
+    self.assertEqual(status,
+                     skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS)
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 0)
+    compare_mock.assert_called_with(name=None,
+                                    png_file=mock.ANY,
+                                    inexact_matching_args=None,
+                                    optional_keys={'foo': 'bar'})
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_diffFailure(self, auth_mock, init_mock, compare_mock, diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (1, 'Compare failed')
+    diff_mock.return_value = (1, 'Diff failed')
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None,
+                                          'Definitely an output manager')
+    self.assertEqual(
+        status,
+        skia_gold_session.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE)
+    self.assertEqual(error, 'Diff failed')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(init_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 1)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Diff')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Compare')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Initialize')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, 'Authenticate')
+  def test_noOutputManagerLocal(self, auth_mock, init_mock, compare_mock,
+                                diff_mock):
+    auth_mock.return_value = (0, None)
+    init_mock.return_value = (0, None)
+    compare_mock.return_value = (1, 'Compare failed')
+    diff_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    status, error = session.RunComparison(None, None, None)
+    self.assertEqual(
+        status, skia_gold_session.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER)
+    self.assertEqual(error, 'No output manager for local diff images')
+    self.assertEqual(auth_mock.call_count, 1)
+    self.assertEqual(compare_mock.call_count, 1)
+    self.assertEqual(diff_mock.call_count, 0)
+
+
+class SkiaGoldSessionAuthenticateTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.Authenticate."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandOutputReturned(self, cmd_mock):
+    cmd_mock.return_value = (1, 'Something bad :(')
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, stdout = session.Authenticate()
+    self.assertEqual(cmd_mock.call_count, 1)
+    self.assertEqual(rc, 1)
+    self.assertEqual(stdout, 'Something bad :(')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_bypassSkiaGoldFunctionality(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              bypass_skia_gold_functionality=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, _ = session.Authenticate()
+    self.assertEqual(rc, 0)
+    cmd_mock.assert_not_called()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_shortCircuitAlreadyAuthenticated(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session._authenticated = True
+    rc, _ = session.Authenticate()
+    self.assertEqual(rc, 0)
+    cmd_mock.assert_not_called()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_successSetsShortCircuit(self, cmd_mock):
+    cmd_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    self.assertFalse(session._authenticated)
+    rc, _ = session.Authenticate()
+    self.assertEqual(rc, 0)
+    self.assertTrue(session._authenticated)
+    cmd_mock.assert_called_once()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_failureDoesNotSetShortCircuit(self, cmd_mock):
+    cmd_mock.return_value = (1, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    self.assertFalse(session._authenticated)
+    rc, _ = session.Authenticate()
+    self.assertEqual(rc, 1)
+    self.assertFalse(session._authenticated)
+    cmd_mock.assert_called_once()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithUseLuciTrue(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Authenticate(use_luci=True)
+    self.assertIn('--luci', cmd_mock.call_args[0][0])
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithUseLuciFalse(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Authenticate(use_luci=False)
+    self.assertNotIn('--luci', cmd_mock.call_args[0][0])
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithUseLuciFalseNotLocal(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    with self.assertRaises(RuntimeError):
+      session.Authenticate(use_luci=False)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Authenticate()
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('auth', call_args)
+    assertArgWith(self, call_args, '--work-dir', self._working_dir)
+
+
+class SkiaGoldSessionInitializeTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.Initialize."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_bypassSkiaGoldFunctionality(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              bypass_skia_gold_functionality=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, _ = session.Initialize()
+    self.assertEqual(rc, 0)
+    cmd_mock.assert_not_called()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_shortCircuitAlreadyInitialized(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session._initialized = True
+    rc, _ = session.Initialize()
+    self.assertEqual(rc, 0)
+    cmd_mock.assert_not_called()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_successSetsShortCircuit(self, cmd_mock):
+    cmd_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    self.assertFalse(session._initialized)
+    rc, _ = session.Initialize()
+    self.assertEqual(rc, 0)
+    self.assertTrue(session._initialized)
+    cmd_mock.assert_called_once()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_failureDoesNotSetShortCircuit(self, cmd_mock):
+    cmd_mock.return_value = (1, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    self.assertFalse(session._initialized)
+    rc, _ = session.Initialize()
+    self.assertEqual(rc, 1)
+    self.assertFalse(session._initialized)
+    cmd_mock.assert_called_once()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                'corpus',
+                                                instance='instance',
+                                                bucket='bucket')
+    session.Initialize()
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('imgtest', call_args)
+    self.assertIn('init', call_args)
+    self.assertIn('--passfail', call_args)
+    assertArgWith(self, call_args, '--instance', 'instance')
+    assertArgWith(self, call_args, '--bucket', 'bucket')
+    assertArgWith(self, call_args, '--corpus', 'corpus')
+    # The keys file should have been copied to the working directory.
+    assertArgWith(self, call_args, '--keys-file',
+                  os.path.join(self._working_dir, 'gold_keys.json'))
+    assertArgWith(self, call_args, '--work-dir', self._working_dir)
+    assertArgWith(self, call_args, '--failure-file', session._triage_link_file)
+    assertArgWith(self, call_args, '--commit', 'a')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandTryjobArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Initialize()
+    call_args = cmd_mock.call_args[0][0]
+    assertArgWith(self, call_args, '--issue', '1')
+    assertArgWith(self, call_args, '--patchset', '2')
+    assertArgWith(self, call_args, '--jobid', '3')
+    assertArgWith(self, call_args, '--crs', 'gerrit')
+    assertArgWith(self, call_args, '--cis', 'buildbucket')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandTryjobArgsNonDefaultCrs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(code_review_system='foo',
+                              git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Initialize()
+    call_args = cmd_mock.call_args[0][0]
+    assertArgWith(self, call_args, '--issue', '1')
+    assertArgWith(self, call_args, '--patchset', '2')
+    assertArgWith(self, call_args, '--jobid', '3')
+    assertArgWith(self, call_args, '--crs', 'foo')
+    assertArgWith(self, call_args, '--cis', 'buildbucket')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandTryjobArgsMissing(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Initialize()
+    call_args = cmd_mock.call_args[0][0]
+    self.assertNotIn('--issue', call_args)
+    self.assertNotIn('--patchset', call_args)
+    self.assertNotIn('--jobid', call_args)
+    self.assertNotIn('--crs', call_args)
+    self.assertNotIn('--cis', call_args)
+
+
+class SkiaGoldSessionCompareTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.Compare."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandOutputReturned(self, cmd_mock):
+    cmd_mock.return_value = (1, 'Something bad :(')
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, stdout = session.Compare(None, None)
+    self.assertEqual(cmd_mock.call_count, 1)
+    self.assertEqual(rc, 1)
+    self.assertEqual(stdout, 'Something bad :(')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_bypassSkiaGoldFunctionality(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              bypass_skia_gold_functionality=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, _ = session.Compare(None, None)
+    self.assertEqual(rc, 0)
+    cmd_mock.assert_not_called()
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithLocalPixelTestsTrue(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Compare(None, None)
+    self.assertIn('--dryrun', cmd_mock.call_args[0][0])
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithLocalPixelTestsFalse(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Compare(None, None)
+    self.assertNotIn('--dryrun', cmd_mock.call_args[0][0])
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandWithInexactArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Compare(None, None, inexact_matching_args=['--inexact', 'foobar'])
+    self.assertIn('--inexact', cmd_mock.call_args[0][0])
+    self.assertIn('foobar', cmd_mock.call_args[0][0])
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandCommonArgs(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                'corpus',
+                                                instance='instance')
+    session.Compare('name', 'png_file')
+    call_args = cmd_mock.call_args[0][0]
+    self.assertIn('imgtest', call_args)
+    self.assertIn('add', call_args)
+    assertArgWith(self, call_args, '--test-name', 'name')
+    assertArgWith(self, call_args, '--png-file', 'png_file')
+    assertArgWith(self, call_args, '--work-dir', self._working_dir)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_noLinkOnSuccess(self, cmd_mock):
+    cmd_mock.return_value = (0, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, _ = session.Compare('name', 'png_file')
+    self.assertEqual(rc, 0)
+    comparison_result = session._comparison_results['name']
+    self.assertEqual(comparison_result.public_triage_link, None)
+    self.assertEqual(comparison_result.internal_triage_link, None)
+    self.assertNotEqual(comparison_result.triage_link_omission_reason, None)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_clLinkOnTrybot(self, cmd_mock):
+    cmd_mock.return_value = (1, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              gerrit_issue=1,
+                              gerrit_patchset=2,
+                              buildbucket_id=3)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                None,
+                                                instance='instance')
+    rc, _ = session.Compare('name', 'png_file')
+    self.assertEqual(rc, 1)
+    comparison_result = session._comparison_results['name']
+    self.assertNotEqual(comparison_result.public_triage_link, None)
+    self.assertNotEqual(comparison_result.internal_triage_link, None)
+    internal_link = 'https://instance-gold.skia.org/cl/gerrit/1'
+    public_link = 'https://instance-public-gold.skia.org/cl/gerrit/1'
+    self.assertEqual(comparison_result.internal_triage_link, internal_link)
+    self.assertEqual(comparison_result.public_triage_link, public_link)
+    self.assertEqual(comparison_result.triage_link_omission_reason, None)
+    self.assertEqual(session.GetTriageLinks('name'),
+                     (public_link, internal_link))
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_individualLinkOnCi(self, cmd_mock):
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir,
+                                                sgp,
+                                                self._json_keys,
+                                                None,
+                                                instance='foobar')
+
+    internal_link = 'foobar-gold.skia.org'
+    public_link = 'foobar-public-gold.skia.org'
+
+    def WriteTriageLinkFile(_):
+      with open(session._triage_link_file, 'w') as f:
+        f.write(internal_link)
+      return (1, None)
+
+    cmd_mock.side_effect = WriteTriageLinkFile
+    rc, _ = session.Compare('name', 'png_file')
+    self.assertEqual(rc, 1)
+    comparison_result = session._comparison_results['name']
+    self.assertNotEqual(comparison_result.public_triage_link, None)
+    self.assertNotEqual(comparison_result.internal_triage_link, None)
+    self.assertEqual(comparison_result.internal_triage_link, internal_link)
+    self.assertEqual(comparison_result.public_triage_link, public_link)
+    self.assertEqual(comparison_result.triage_link_omission_reason, None)
+    self.assertEqual(session.GetTriageLinks('name'),
+                     (public_link, internal_link))
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_validOmissionOnMissingLink(self, cmd_mock):
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+
+    def WriteTriageLinkFile(_):
+      with open(session._triage_link_file, 'w'):
+        pass
+      return (1, None)
+
+    cmd_mock.side_effect = WriteTriageLinkFile
+    rc, _ = session.Compare('name', 'png_file')
+    self.assertEqual(rc, 1)
+    comparison_result = session._comparison_results['name']
+    self.assertEqual(comparison_result.public_triage_link, None)
+    self.assertEqual(comparison_result.internal_triage_link, None)
+    self.assertIn('Gold did not provide a triage link',
+                  comparison_result.triage_link_omission_reason)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_validOmissionOnIoError(self, cmd_mock):
+    cmd_mock.return_value = (1, None)
+    args = createSkiaGoldArgs(git_revision='a')
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+
+    def DeleteTriageLinkFile(_):
+      os.remove(session._triage_link_file)
+      return (1, None)
+
+    cmd_mock.side_effect = DeleteTriageLinkFile
+    rc, _ = session.Compare('name', 'png_file')
+    self.assertEqual(rc, 1)
+    comparison_result = session._comparison_results['name']
+    self.assertEqual(comparison_result.public_triage_link, None)
+    self.assertEqual(comparison_result.internal_triage_link, None)
+    self.assertNotEqual(comparison_result.triage_link_omission_reason, None)
+    self.assertIn('Failed to read',
+                  comparison_result.triage_link_omission_reason)
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_optionalKeysPassedToGoldctl(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    session.Compare(None, None, optional_keys={'foo': 'bar'})
+    assertArgWith(self, cmd_mock.call_args[0][0], '--add-test-optional-key',
+                  'foo:bar')
+
+
+class SkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.Diff."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_StoreDiffLinks')
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_commandOutputReturned(self, cmd_mock, _):
+    cmd_mock.return_value = (1, 'Something bad :(')
+    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    rc, stdout = session.Diff(None, None, None)
+    self.assertEqual(cmd_mock.call_count, 1)
+    self.assertEqual(rc, 1)
+    self.assertEqual(stdout, 'Something bad :(')
+
+  @mock.patch.object(skia_gold_session.SkiaGoldSession, '_RunCmdForRcAndOutput')
+  def test_bypassSkiaGoldFunctionality(self, cmd_mock):
+    cmd_mock.return_value = (None, None)
+    args = createSkiaGoldArgs(git_revision='a',
+                              bypass_skia_gold_functionality=True)
+    sgp = skia_gold_properties.SkiaGoldProperties(args)
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp,
+                                                self._json_keys, None, None)
+    with self.assertRaises(RuntimeError):
+      session.Diff(None, None, None)
+
+
+class SkiaGoldSessionTriageLinkOmissionTest(fake_filesystem_unittest.TestCase):
+  """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason."""
+
+  def setUp(self):
+    self.setUpPyfakefs()
+    self._working_dir = tempfile.mkdtemp()
+
+  def _CreateSession(self):
+    json_keys = tempfile.NamedTemporaryFile(delete=False).name
+    session = skia_gold_session.SkiaGoldSession(self._working_dir, None,
+                                                json_keys, None, None)
+    session._comparison_results = {
+        'foo': skia_gold_session.SkiaGoldSession.ComparisonResults(),
+    }
+    return session
+
+  def test_noComparison(self):
+    session = self._CreateSession()
+    session._comparison_results = {}
+    reason = session.GetTriageLinkOmissionReason('foo')
+    self.assertEqual(reason, 'No image comparison performed for foo')
+
+  def test_validReason(self):
+    session = self._CreateSession()
+    session._comparison_results['foo'].triage_link_omission_reason = 'bar'
+    reason = session.GetTriageLinkOmissionReason('foo')
+    self.assertEqual(reason, 'bar')
+
+  def test_onlyLocal(self):
+    session = self._CreateSession()
+    session._comparison_results['foo'].local_diff_given_image = 'bar'
+    reason = session.GetTriageLinkOmissionReason('foo')
+    self.assertEqual(reason, 'Gold only used to do a local image diff')
+
+  def test_onlyWithoutTriageLink(self):
+    session = self._CreateSession()
+    comparison_result = session._comparison_results['foo']
+    comparison_result.public_triage_link = 'bar'
+    with self.assertRaises(AssertionError):
+      session.GetTriageLinkOmissionReason('foo')
+    comparison_result.public_triage_link = None
+    comparison_result.internal_triage_link = 'bar'
+    with self.assertRaises(AssertionError):
+      session.GetTriageLinkOmissionReason('foo')
+
+  def test_resultsShouldNotExist(self):
+    session = self._CreateSession()
+    with self.assertRaises(RuntimeError):
+      session.GetTriageLinkOmissionReason('foo')
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/src/build/skia_gold_common/unittest_utils.py b/src/build/skia_gold_common/unittest_utils.py
new file mode 100644
index 0000000..fc4b546
--- /dev/null
+++ b/src/build/skia_gold_common/unittest_utils.py
@@ -0,0 +1,30 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utility methods for Skia Gold functionality unittests."""
+
+import collections
+
+_SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [
+    'local_pixel_tests',
+    'no_luci_auth',
+    'code_review_system',
+    'git_revision',
+    'gerrit_issue',
+    'gerrit_patchset',
+    'buildbucket_id',
+    'bypass_skia_gold_functionality',
+])
+
+
+def createSkiaGoldArgs(local_pixel_tests=None,
+                       no_luci_auth=None,
+                       code_review_system=None,
+                       git_revision=None,
+                       gerrit_issue=None,
+                       gerrit_patchset=None,
+                       buildbucket_id=None,
+                       bypass_skia_gold_functionality=None):
+  return _SkiaGoldArgs(local_pixel_tests, no_luci_auth, code_review_system,
+                       git_revision, gerrit_issue, gerrit_patchset,
+                       buildbucket_id, bypass_skia_gold_functionality)
diff --git a/src/build/symlink.gni b/src/build/symlink.gni
new file mode 100644
index 0000000..dcaa5e5
--- /dev/null
+++ b/src/build/symlink.gni
@@ -0,0 +1,79 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a symlink.
+# Args:
+#   source: Path to link to.
+#   output: Where to create the symlink.
+template("symlink") {
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    outputs = [ invoker.output ]
+    script = "//build/symlink.py"
+    args = [
+      "-f",
+      rebase_path(invoker.source, get_path_info(invoker.output, "dir")),
+      rebase_path(invoker.output, root_build_dir),
+    ]
+  }
+}
+
+# Creates a symlink from root_build_dir/target_name to |binary_label|. This rule
+# is meant to be used within if (current_toolchain == default_toolchain) blocks
+# and point to targets in the non-default toolchain.
+# Note that for executables, using a copy (as opposed to a symlink) does not
+# work when is_component_build=true, since dependent libraries are found via
+# relative location.
+#
+# Args:
+#   binary_label: Target that builds the file to symlink to. e.g.:
+#       ":$target_name($host_toolchain)".
+#   binary_output_name: The output_name set by the binary_label target
+#       (if applicable).
+#   output_name: Where to create the symlink
+#       (default="$root_out_dir/$binary_output_name").
+#
+# Example:
+#   if (current_toolchain == host_toolchain) {
+#     executable("foo") { ... }
+#   } else if (current_toolchain == default_toolchain) {
+#     binary_symlink("foo") {
+#       binary_label = ":foo($host_toolchain)"
+#     }
+#   }
+template("binary_symlink") {
+  symlink(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "output",
+                             "testonly",
+                             "visibility",
+                           ])
+    deps = [ invoker.binary_label ]
+    data_deps = [ invoker.binary_label ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+
+    _out_dir = get_label_info(invoker.binary_label, "root_out_dir")
+    if (defined(invoker.binary_output_name)) {
+      _name = invoker.binary_output_name
+    } else {
+      _name = get_label_info(invoker.binary_label, "name")
+    }
+    source = "$_out_dir/$_name"
+
+    _output_name = _name
+    if (defined(invoker.output_name)) {
+      _output_name = invoker.output_name
+    }
+    output = "$root_out_dir/$_output_name"
+  }
+}
diff --git a/src/build/symlink.py b/src/build/symlink.py
new file mode 100755
index 0000000..0f90696
--- /dev/null
+++ b/src/build/symlink.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+description = """
+Make a symlink and optionally touch a file (to handle dependencies).
+"""
+usage = "%prog [options] source[ source ...] linkname"
+epilog = """\
+A symlink to source is created at linkname. If multiple sources are specified,
+then linkname is assumed to be a directory, and will contain all the links to
+the sources (basenames identical to their source).
+
+On Windows, this will use hard links (mklink /H) to avoid requiring elevation.
+This means that if the original is deleted and replaced, the link will still
+have the old contents.
+"""
+
+import errno
+import optparse
+import os.path
+import shutil
+import subprocess
+import sys
+
+
+def Main(argv):
+  parser = optparse.OptionParser(usage=usage, description=description,
+                                 epilog=epilog)
+  parser.add_option('-f', '--force', action='store_true')
+  parser.add_option('--touch')
+
+  options, args = parser.parse_args(argv[1:])
+  if len(args) < 2:
+    parser.error('at least two arguments required.')
+
+  target = args[-1]
+  sources = args[:-1]
+  for s in sources:
+    t = os.path.join(target, os.path.basename(s))
+    if len(sources) == 1 and not os.path.isdir(target):
+      t = target
+    t = os.path.expanduser(t)
+    if os.path.realpath(t) == os.path.realpath(s):
+      continue
+    try:
+      # N.B. Python 2.x does not have os.symlink for Windows.
+      #   Python 3 has os.symlink for Windows, but requires either the admin-
+      #   granted privilege SeCreateSymbolicLinkPrivilege or, as of Windows 10
+      #   1703, that Developer Mode be enabled. Hard links and junctions do not
+      #   require any extra privileges to create.
+      if os.name == 'nt':
+        # mklink does not tolerate /-delimited path names.
+        t = t.replace('/', '\\')
+        s = s.replace('/', '\\')
+        # N.B. This tool only handles file hardlinks, not directory junctions.
+        subprocess.check_output(['cmd.exe', '/c', 'mklink', '/H', t, s],
+                                stderr=subprocess.STDOUT)
+      else:
+        os.symlink(s, t)
+    except OSError as e:
+      if e.errno == errno.EEXIST and options.force:
+        if os.path.isdir(t):
+          shutil.rmtree(t, ignore_errors=True)
+        else:
+          os.remove(t)
+        os.symlink(s, t)
+      else:
+        raise
+    except subprocess.CalledProcessError as e:
+      # Since subprocess.check_output does not return an easily checked error
+      # number, in the 'force' case always assume it is 'file already exists'
+      # and retry.
+      if options.force:
+        if os.path.isdir(t):
+          shutil.rmtree(t, ignore_errors=True)
+        else:
+          os.remove(t)
+        subprocess.check_output(e.cmd, stderr=subprocess.STDOUT)
+      else:
+        raise
+
+
+  if options.touch:
+    with open(options.touch, 'w'):
+      pass
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/src/build/timestamp.gni b/src/build/timestamp.gni
new file mode 100644
index 0000000..4d805c0
--- /dev/null
+++ b/src/build/timestamp.gni
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Defines the build_timestamp variable.
+
+import("//build/util/lastchange.gni")
+
+declare_args() {
+  # This should be the filename of a script that prints a single line
+  # containing an integer that's a unix timestamp in UTC.
+  # This timestamp is used as build time and will be compiled into
+  # other code.
+  #
+  # This argument may look unused. Before removing please check with the
+  # chromecast team to see if they still use it internally.
+  compute_build_timestamp = "compute_build_timestamp.py"
+}
+
+if (is_official_build) {
+  official_name = "official"
+} else {
+  official_name = "default"
+}
+
+# This will return a timestamp that's different each day (official builds)
+# or each month (regular builds).  Just rely on gn rerunning due to other
+# changes to keep this up to date.  (Bots run gn on each build, and for devs
+# the timestamp being 100% accurate doesn't matter.)
+# See compute_build_timestamp.py for tradeoffs for picking the timestamp.
+build_timestamp = exec_script(compute_build_timestamp,
+                              [ official_name ],
+                              "trim string",
+                              [ lastchange_file ])
diff --git a/src/build/toolchain/BUILD.gn b/src/build/toolchain/BUILD.gn
new file mode 100644
index 0000000..6cf8f1b
--- /dev/null
+++ b/src/build/toolchain/BUILD.gn
@@ -0,0 +1,26 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+
+declare_args() {
+  # Pool for non goma tasks.
+  action_pool_depth = -1
+}
+
+if (current_toolchain == default_toolchain) {
+  if (action_pool_depth == -1 || (use_goma || use_rbe)) {
+    action_pool_depth = exec_script("get_cpu_count.py", [], "value")
+  }
+
+  pool("link_pool") {
+    depth = concurrent_links
+  }
+
+  pool("action_pool") {
+    depth = action_pool_depth
+  }
+}
diff --git a/src/build/toolchain/aix/BUILD.gn b/src/build/toolchain/aix/BUILD.gn
new file mode 100644
index 0000000..523b54e
--- /dev/null
+++ b/src/build/toolchain/aix/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+gcc_toolchain("ppc64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "ppc64"
+    current_os = "aix"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
diff --git a/src/build/toolchain/android/BUILD.gn b/src/build/toolchain/android/BUILD.gn
new file mode 100644
index 0000000..3299dce
--- /dev/null
+++ b/src/build/toolchain/android/BUILD.gn
@@ -0,0 +1,152 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/ozone.gni")
+import("//build/config/sysroot.gni")  # Imports android/config.gni.
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  # Whether unstripped binaries, i.e. compiled with debug symbols, should be
+  # considered runtime_deps rather than stripped ones.
+  android_unstripped_runtime_outputs = true
+}
+
+# The Android clang toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+#  - binary_prefix
+#      Prefix of compiler executables.
+template("android_clang_toolchain") {
+  gcc_toolchain(target_name) {
+    assert(defined(invoker.toolchain_args),
+           "toolchain_args must be defined for android_clang_toolchain()")
+
+    # Android toolchains need to declare .dwp files as outputs, so need to know
+    # the value of "use_debug_fission" when defining them.
+    # The derived value of "use_debug_fission" varies based on current_os, but
+    # toolchain definitions are evaluated under the default toolchain.
+    # Rather than computing the value under current_os="android", just disable
+    # it if target_os != "android".
+    _use_debug_fission = use_debug_fission && target_os == "android"
+
+    toolchain_args = {
+      forward_variables_from(invoker.toolchain_args, "*")
+      current_os = "android"
+      use_debug_fission = _use_debug_fission
+    }
+
+    # Output linker map files for binary size analysis.
+    enable_linker_map = true
+
+    _android_tool_prefix =
+        "$android_toolchain_root/bin/${invoker.binary_prefix}-"
+
+    # The tools should be run relative to the build dir.
+    _tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir)
+
+    _prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cc = "$_prefix/clang"
+    cxx = "$_prefix/clang++"
+    ar = "$_prefix/llvm-ar"
+    ld = cxx
+    readelf = _tool_prefix + "readelf"
+    nm = "$_prefix/llvm-nm"
+    strip = "$_prefix/llvm-strip"
+    if (_use_debug_fission) {
+      dwp = _tool_prefix + "dwp"
+    }
+    use_unstripped_as_runtime_outputs = android_unstripped_runtime_outputs
+
+    # Don't use .cr.so for loadable_modules since they are always loaded via
+    # absolute path.
+    loadable_module_extension = ".so"
+  }
+}
+
+android_clang_toolchain("android_clang_x86") {
+  binary_prefix = "i686-linux-android"
+  toolchain_args = {
+    current_cpu = "x86"
+
+    # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot
+    # link any binaries that are generated with coverage instrumentation.
+    # Therefore we need to turn off 'use_clang_coverage' for this toolchain.
+    # TODO(crbug.com/865376)
+    use_clang_coverage = false
+
+    # This turns off all of the LaCrOS-specific flags. A LaCrOS build may use
+    # |ash_clang_x64| toolchain, which is a chromeos toolchain, to build
+    # Ash-Chrome in a subdirectory, and because chromeos toolchain uses android
+    # toolchain, which eventually resulted in that android toolchains being used
+    # inside a LaCrOS build.
+    also_build_ash_chrome = false
+    chromeos_is_browser_only = false
+    ozone_platform = ""
+    ozone_platform_wayland = false
+  }
+}
+
+android_clang_toolchain("android_clang_arm") {
+  binary_prefix = "arm-linux-androideabi"
+  toolchain_args = {
+    current_cpu = "arm"
+  }
+}
+
+android_clang_toolchain("android_clang_mipsel") {
+  binary_prefix = "mipsel-linux-android"
+  toolchain_args = {
+    current_cpu = "mipsel"
+  }
+}
+
+android_clang_toolchain("android_clang_x64") {
+  binary_prefix = "x86_64-linux-android"
+  toolchain_args = {
+    current_cpu = "x64"
+
+    # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot
+    # link any binaries that are generated with coverage instrumentation.
+    # Therefore we need to turn off 'use_clang_coverage' for this toolchain.
+    # TODO(crbug.com/865376)
+    use_clang_coverage = false
+
+    # This turns off all of the LaCrOS-specific flags. A LaCrOS build may use
+    # |ash_clang_x64| toolchain, which is a chromeos toolchain, to build
+    # Ash-Chrome in a subdirectory, and because chromeos toolchain uses android
+    # toolchain, which eventually resulted in that android toolchains being used
+    # inside a LaCrOS build.
+    also_build_ash_chrome = false
+    chromeos_is_browser_only = false
+    ozone_platform = ""
+    ozone_platform_wayland = false
+  }
+}
+
+android_clang_toolchain("android_clang_arm64") {
+  binary_prefix = "aarch64-linux-android"
+  toolchain_args = {
+    current_cpu = "arm64"
+  }
+}
+
+android_clang_toolchain("android_clang_arm64_hwasan") {
+  binary_prefix = "aarch64-linux-android"
+  toolchain_args = {
+    current_cpu = "arm64"
+    is_hwasan = true
+    android64_ndk_api_level = 29
+  }
+}
+
+android_clang_toolchain("android_clang_mips64el") {
+  binary_prefix = "mips64el-linux-android"
+  toolchain_args = {
+    current_cpu = "mips64el"
+  }
+}
diff --git a/src/build/toolchain/apple/BUILD.gn b/src/build/toolchain/apple/BUILD.gn
new file mode 100644
index 0000000..6f074fd
--- /dev/null
+++ b/src/build/toolchain/apple/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+
+declare_args() {
+  # Reduce the number of tasks using the copy_bundle_data and compile_xcassets
+  # tools as they can cause lots of I/O contention when invoking ninja with a
+  # large number of parallel jobs (e.g. when using distributed build like goma).
+  bundle_pool_depth = -1
+}
+
+if (current_toolchain == default_toolchain) {
+  pool("bundle_pool") {
+    if (bundle_pool_depth == -1) {
+      depth = concurrent_links
+    } else {
+      depth = bundle_pool_depth
+    }
+  }
+}
diff --git a/src/build/toolchain/apple/filter_libtool.py b/src/build/toolchain/apple/filter_libtool.py
new file mode 100644
index 0000000..44c1c44
--- /dev/null
+++ b/src/build/toolchain/apple/filter_libtool.py
@@ -0,0 +1,52 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import re
+import subprocess
+import sys
+
+# This script executes libool and filters out logspam lines like:
+#    '/path/to/libtool: file: foo.o has no symbols'
+
+SUPPRESSED_PATTERNS = [
+    re.compile(v) for v in [
+        r'^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$',
+        # Xcode 11 spelling of the "empty archive" warning.
+        # TODO(thakis): Remove once we require Xcode 12.
+        r'^.*libtool: warning for library: .* the table of contents is empty ' \
+            r'\(no object file members in the library define global symbols\)$',
+        # Xcode 12 spelling of the "empty archive" warning.
+        r'^warning: .*libtool: archive library: .* ' \
+            r'the table of contents is empty ',
+            r'\(no object file members in the library define global symbols\)$',
+        r'^.*libtool: warning same member name \(\S*\) in output file used ' \
+            r'for input files: \S* and: \S* \(due to use of basename, ' \
+            r'truncation, blank padding or duplicate input files\)$',
+    ]
+]
+
+
+def ShouldSuppressLine(line):
+  """Returns whether the line should be filtered out."""
+  for pattern in SUPPRESSED_PATTERNS:
+    if pattern.match(line):
+      return True
+  return False
+
+
+def Main(cmd_list):
+  env = os.environ.copy()
+  libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+  _, err = libtoolout.communicate()
+  for line in err.decode('UTF-8').splitlines():
+    if not ShouldSuppressLine(line):
+      print(line, file=sys.stderr)
+  return libtoolout.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/toolchain/apple/get_tool_mtime.py b/src/build/toolchain/apple/get_tool_mtime.py
new file mode 100644
index 0000000..ff0254c
--- /dev/null
+++ b/src/build/toolchain/apple/get_tool_mtime.py
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import sys
+
+# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py
+#
+# Prints a GN scope with the variable name being the basename sans-extension
+# and the value being the file modification time. A variable is emitted for
+# each file argument on the command line.
+
+if __name__ == '__main__':
+  for f in sys.argv[1:]:
+    variable = os.path.splitext(os.path.basename(f))[0]
+    print('%s = %d' % (variable, os.path.getmtime(f)))
diff --git a/src/build/toolchain/apple/linker_driver.py b/src/build/toolchain/apple/linker_driver.py
new file mode 100755
index 0000000..c21e18a
--- /dev/null
+++ b/src/build/toolchain/apple/linker_driver.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+
+# On mac, the values of these globals are modified when parsing -Wcrl, flags. On
+# ios, the script uses the defaults.
+DSYMUTIL_INVOKE = ['xcrun', 'dsymutil']
+STRIP_INVOKE = ['xcrun', 'strip']
+
+# Setting this flag will emit a deterministic binary by stripping dates from the
+# N_OSO field.
+DETERMINISTIC_FLAG = '--deterministic'
+
+# The linker_driver.py is responsible for forwarding a linker invocation to
+# the compiler driver, while processing special arguments itself.
+#
+# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out
+#
+# On Mac, the logical step of linking is handled by three discrete tools to
+# perform the image link, debug info link, and strip. The linker_driver.py
+# combines these three steps into a single tool.
+#
+# The command passed to the linker_driver.py should be the compiler driver
+# invocation for the linker. It is first invoked unaltered (except for the
+# removal of the special driver arguments, described below). Then the driver
+# performs additional actions, based on these arguments:
+#
+#   -Wcrl,dsym,<dsym_path_prefix>
+#       After invoking the linker, this will run `dsymutil` on the linker's
+#       output, producing a dSYM bundle, stored at dsym_path_prefix. As an
+#       example, if the linker driver were invoked with:
+#         "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
+#       The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
+#
+#   -Wcrl,dsymutilpath,<dsymutil_path>
+#       Sets the path to the dsymutil to run with -Wcrl,dsym, in which case
+#       `xcrun` is not used to invoke it.
+#
+#   -Wcrl,unstripped,<unstripped_path_prefix>
+#       After invoking the linker, and before strip, this will save a copy of
+#       the unstripped linker output in the directory unstripped_path_prefix.
+#
+#   -Wcrl,strip,<strip_arguments>
+#       After invoking the linker, and optionally dsymutil, this will run
+#       the strip command on the linker's output. strip_arguments are
+#       comma-separated arguments to be passed to the strip command.
+#
+#   -Wcrl,strippath,<strip_path>
+#       Sets the path to the strip to run with -Wcrl,strip, in which case
+#       `xcrun` is not used to invoke it.
+
+
+def Main(args):
+  """Main function for the linker driver. Separates out the arguments for
+  the main compiler driver and the linker driver, then invokes all the
+  required tools.
+
+  Args:
+    args: list of string, Arguments to the script.
+  """
+
+  if len(args) < 2:
+    raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
+
+  # Collect arguments to the linker driver (this script) and remove them from
+  # the arguments being passed to the compiler driver.
+  linker_driver_actions = {}
+  compiler_driver_args = []
+  deterministic = False
+  for arg in args[1:]:
+    if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+      # Convert driver actions into a map of name => lambda to invoke.
+      driver_action = ProcessLinkerDriverArg(arg)
+      assert driver_action[0] not in linker_driver_actions
+      linker_driver_actions[driver_action[0]] = driver_action[1]
+    elif arg == DETERMINISTIC_FLAG:
+      deterministic = True
+    else:
+      compiler_driver_args.append(arg)
+
+  linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)]
+
+  try:
+    # Zero the mtime in OSO fields for deterministic builds.
+    # https://crbug.com/330262.
+    env = os.environ.copy()
+    if deterministic:
+      env['ZERO_AR_DATE'] = '1'
+    # Run the linker by invoking the compiler driver.
+    subprocess.check_call(compiler_driver_args, env=env)
+
+    # Run the linker driver actions, in the order specified by the actions list.
+    for action in _LINKER_DRIVER_ACTIONS:
+      name = action[0]
+      if name in linker_driver_actions:
+        linker_driver_outputs += linker_driver_actions[name](args)
+  except:
+    # If a linker driver action failed, remove all the outputs to make the
+    # build step atomic.
+    map(_RemovePath, linker_driver_outputs)
+
+    # Re-report the original failure.
+    raise
+
+
+def ProcessLinkerDriverArg(arg):
+  """Processes a linker driver argument and returns a tuple containing the
+  name and unary lambda to invoke for that linker driver action.
+
+  Args:
+    arg: string, The linker driver argument.
+
+  Returns:
+    A 2-tuple:
+      0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
+      1: An 1-ary lambda that takes the full list of arguments passed to
+         Main(). The lambda should call the linker driver action that
+         corresponds to the argument and return a list of outputs from the
+         action.
+  """
+  if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+    raise ValueError('%s is not a linker driver argument' % (arg, ))
+
+  sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
+
+  for driver_action in _LINKER_DRIVER_ACTIONS:
+    (name, action) = driver_action
+    if sub_arg.startswith(name):
+      return (name, lambda full_args: action(sub_arg[len(name):], full_args))
+
+  raise ValueError('Unknown linker driver argument: %s' % (arg, ))
+
+
+def RunDsymUtil(dsym_path_prefix, full_args):
+  """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes dsymutil
+  on the linker's output and produces a dsym file at |dsym_file| path.
+
+  Args:
+    dsym_path_prefix: string, The path at which the dsymutil output should be
+        located.
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+      list of string, Build step outputs.
+  """
+  if not len(dsym_path_prefix):
+    raise ValueError('Unspecified dSYM output file')
+
+  linker_out = _FindLinkerOutput(full_args)
+  base = os.path.basename(linker_out)
+  dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
+
+  # Remove old dSYMs before invoking dsymutil.
+  _RemovePath(dsym_out)
+
+  tools_paths = _FindToolsPaths(full_args)
+  if os.environ.get('PATH'):
+    tools_paths.append(os.environ['PATH'])
+  dsymutil_env = os.environ.copy()
+  dsymutil_env['PATH'] = ':'.join(tools_paths)
+  subprocess.check_call(DSYMUTIL_INVOKE + ['-o', dsym_out, linker_out],
+                        env=dsymutil_env)
+  return [dsym_out]
+
+
+def SetDsymutilPath(dsymutil_path, full_args):
+  """Linker driver action for -Wcrl,dsymutilpath,<dsymutil_path>.
+
+  Sets the invocation command for dsymutil, which allows the caller to specify
+  an alternate dsymutil. This action is always processed before the RunDsymUtil
+  action.
+
+  Args:
+    dsymutil_path: string, The path to the dsymutil binary to run
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+    No output - this step is run purely for its side-effect.
+  """
+  global DSYMUTIL_INVOKE
+  DSYMUTIL_INVOKE = [dsymutil_path]
+  return []
+
+
+def RunSaveUnstripped(unstripped_path_prefix, full_args):
+  """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>. Copies
+  the linker output to |unstripped_path_prefix| before stripping.
+
+  Args:
+    unstripped_path_prefix: string, The path at which the unstripped output
+        should be located.
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+    list of string, Build step outputs.
+  """
+  if not len(unstripped_path_prefix):
+    raise ValueError('Unspecified unstripped output file')
+
+  linker_out = _FindLinkerOutput(full_args)
+  base = os.path.basename(linker_out)
+  unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
+
+  shutil.copyfile(linker_out, unstripped_out)
+  return [unstripped_out]
+
+
+def RunStrip(strip_args_string, full_args):
+  """Linker driver action for -Wcrl,strip,<strip_arguments>.
+
+  Args:
+      strip_args_string: string, Comma-separated arguments for `strip`.
+      full_args: list of string, Full arguments for the linker driver.
+
+  Returns:
+      list of string, Build step outputs.
+  """
+  strip_command = list(STRIP_INVOKE)
+  if len(strip_args_string) > 0:
+    strip_command += strip_args_string.split(',')
+  strip_command.append(_FindLinkerOutput(full_args))
+  subprocess.check_call(strip_command)
+  return []
+
+
+def SetStripPath(strip_path, full_args):
+  """Linker driver action for -Wcrl,strippath,<strip_path>.
+
+  Sets the invocation command for strip, which allows the caller to specify
+  an alternate strip. This action is always processed before the RunStrip
+  action.
+
+  Args:
+    strip_path: string, The path to the strip binary to run
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+    No output - this step is run purely for its side-effect.
+  """
+  global STRIP_INVOKE
+  STRIP_INVOKE = [strip_path]
+  return []
+
+
+def _FindLinkerOutput(full_args):
+  """Finds the output of the linker by looking for the output flag in its
+  argument list. As this is a required linker argument, raises an error if it
+  cannot be found.
+  """
+  # The linker_driver.py script may be used to wrap either the compiler linker
+  # (uses -o to configure the output) or lipo (uses -output to configure the
+  # output). Since wrapping the compiler linker is the most likely possibility
+  # use try/except and fallback to checking for -output if -o is not found.
+  try:
+    output_flag_index = full_args.index('-o')
+  except ValueError:
+    output_flag_index = full_args.index('-output')
+  return full_args[output_flag_index + 1]
+
+
+def _FindToolsPaths(full_args):
+  """Finds all paths where the script should look for additional tools."""
+  paths = []
+  for idx, arg in enumerate(full_args):
+    if arg in ['-B', '--prefix']:
+      paths.append(full_args[idx + 1])
+    elif arg.startswith('-B'):
+      paths.append(arg[2:])
+    elif arg.startswith('--prefix='):
+      paths.append(arg[9:])
+  return paths
+
+
+def _RemovePath(path):
+  """Removes the file or directory at |path| if it exists."""
+  if os.path.exists(path):
+    if os.path.isdir(path):
+      shutil.rmtree(path)
+    else:
+      os.unlink(path)
+
+
+_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
+"""List of linker driver actions. The sort order of this list affects the
+order in which the actions are invoked. The first item in the tuple is the
+argument's -Wcrl,<sub_argument> and the second is the function to invoke.
+"""
+_LINKER_DRIVER_ACTIONS = [
+    ('dsymutilpath,', SetDsymutilPath),
+    ('dsym,', RunDsymUtil),
+    ('unstripped,', RunSaveUnstripped),
+    ('strippath,', SetStripPath),
+    ('strip,', RunStrip),
+]
+
+if __name__ == '__main__':
+  Main(sys.argv)
+  sys.exit(0)
diff --git a/src/build/toolchain/apple/toolchain.gni b/src/build/toolchain/apple/toolchain.gni
new file mode 100644
index 0000000..3474a31
--- /dev/null
+++ b/src/build/toolchain/apple/toolchain.gni
@@ -0,0 +1,533 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("//build/config/apple/symbols.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert((target_os == "ios" && host_os == "mac") || host_os != "win")
+
+declare_args() {
+  # This makes the linker set timestamps in Mach-O files to 0. This isn't
+  # enabled by default because this breaks Xcode's lldb. This has been fixed in
+  # https://reviews.llvm.org/rL368199 but that has not yet made it into a public
+  # lldb release.
+  mac_deterministic_build = false
+
+  # This controls whether whole module optimization is enabled when building
+  # Swift modules. If enabled, the compiler will compile the module as one
+  # unit, generating just one single object file. Otherwise, it will generate
+  # one object file per .swift file. If unspecified, will default to "true"
+  # for official builds, and "false" for all other builds.
+  swift_whole_module_optimization = -1
+}
+
+if (swift_whole_module_optimization == -1) {
+  swift_whole_module_optimization = is_official_build
+}
+
+# When implementing tools using Python scripts, a TOOL_VERSION=N env
+# variable is placed in front of the command. The N should be incremented
+# whenever the script is changed, so that the build system rebuilds all
+# edges that utilize the script. Ideally this should be changed to use
+# proper input-dirty checking, but that could be expensive. Instead, use a
+# script to get the tool scripts' modification time to use as the version.
+# This won't cause a re-generation of GN files when the tool script changes
+# but it will cause edges to be marked as dirty if the ninja files are
+# regenerated. See https://crbug.com/619083 for details. A proper fix
+# would be to have inputs to tools (https://crbug.com/621119).
+tool_versions =
+    exec_script("get_tool_mtime.py",
+                rebase_path([
+                              "//build/toolchain/apple/filter_libtool.py",
+                              "//build/toolchain/apple/linker_driver.py",
+                              "//build/toolchain/ios/compile_xcassets.py",
+                              "//build/toolchain/ios/swiftc.py",
+                            ],
+                            root_build_dir),
+                "trim scope")
+
+# Shared toolchain definition. Invocations should set current_os to set the
+# build args in this definition.
+template("apple_toolchain") {
+  toolchain(target_name) {
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    assert(defined(invoker.toolchain_args),
+           "Toolchains must declare toolchain_args")
+    toolchain_args = {
+      # Populate toolchain args from the invoker.
+      forward_variables_from(invoker.toolchain_args, "*")
+
+      # The host toolchain value computed by the default toolchain's setup
+      # needs to be passed through unchanged to all secondary toolchains to
+      # ensure that it's always the same, regardless of the values that may be
+      # set on those toolchains.
+      host_toolchain = host_toolchain
+    }
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    if (defined(toolchain_args.use_goma)) {
+      toolchain_uses_goma = toolchain_args.use_goma
+    } else {
+      toolchain_uses_goma = use_goma
+    }
+    if (defined(toolchain_args.cc_wrapper)) {
+      toolchain_cc_wrapper = toolchain_args.cc_wrapper
+    } else {
+      toolchain_cc_wrapper = cc_wrapper
+    }
+    if (defined(toolchain_args.use_xcode_clang)) {
+      toolchain_uses_xcode_clang = toolchain_args.use_xcode_clang
+    } else {
+      toolchain_uses_xcode_clang = use_xcode_clang
+    }
+
+    # Supports building with the version of clang shipped with Xcode when
+    # targeting iOS by not respecting clang_base_path.
+    if (toolchain_uses_xcode_clang) {
+      prefix = invoker.bin_path
+    } else {
+      prefix = rebase_path("$clang_base_path/bin/", root_build_dir)
+    }
+
+    _cc = "${prefix}clang"
+    _cxx = "${prefix}clang++"
+
+    swiftmodule_switch = "-Wl,-add_ast_path,"
+
+    # Compute the compiler prefix.
+    if (toolchain_uses_goma) {
+      assert(toolchain_cc_wrapper == "",
+             "Goma and cc_wrapper can't be used together.")
+      compiler_prefix = "$goma_dir/gomacc "
+    } else if (toolchain_cc_wrapper != "") {
+      compiler_prefix = toolchain_cc_wrapper + " "
+    } else {
+      compiler_prefix = ""
+    }
+
+    cc = compiler_prefix + _cc
+    cxx = compiler_prefix + _cxx
+    ld = _cxx
+
+    # Set the explicit search path for clang++ so it uses the right linker
+    # binary.
+    if (!use_lld) {
+      ld += " -B " + invoker.bin_path
+    }
+
+    if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+      toolchain_coverage_instrumentation_input_file =
+          toolchain_args.coverage_instrumentation_input_file
+    } else {
+      toolchain_coverage_instrumentation_input_file =
+          coverage_instrumentation_input_file
+    }
+    _use_clang_coverage_wrapper =
+        toolchain_coverage_instrumentation_input_file != ""
+    if (_use_clang_coverage_wrapper) {
+      _coverage_wrapper =
+          rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+                      root_build_dir) + " --files-to-instrument=" +
+          rebase_path(toolchain_coverage_instrumentation_input_file,
+                      root_build_dir) + " --target-os=" + target_os
+      cc = "$python_path $_coverage_wrapper ${cc}"
+      cxx = "$python_path $_coverage_wrapper ${cxx}"
+    }
+
+    linker_driver =
+        "TOOL_VERSION=${tool_versions.linker_driver} " +
+        rebase_path("//build/toolchain/apple/linker_driver.py", root_build_dir)
+
+    # Specify an explicit path for the strip binary.
+    _strippath = invoker.bin_path + "strip"
+    linker_driver += " -Wcrl,strippath," + _strippath
+
+    if (mac_deterministic_build) {
+      linker_driver += " --deterministic"
+    }
+
+    # On iOS, the final applications are assembled using lipo (to support fat
+    # builds). The correct flags are passed to the linker_driver.py script
+    # directly during the lipo call. The test is against the target_os because
+    # there is no need to create .dSYMs for targets compiled for the host.
+    if (defined(invoker.strip_with_lipo) && invoker.strip_with_lipo) {
+      _enable_dsyms = false
+      _save_unstripped_output = false
+    } else {
+      _enable_dsyms = enable_dsyms
+      _save_unstripped_output = save_unstripped_output
+    }
+
+    # Make these apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    # Object files go in this directory. Use label_name instead of
+    # target_output_name since labels will generally have no spaces and will be
+    # unique in the directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    # If dSYMs are enabled, this flag will be added to the link tools.
+    if (_enable_dsyms) {
+      dsym_switch = " -Wcrl,dsym,{{root_out_dir}} "
+      dsym_switch += "-Wcrl,dsymutilpath," +
+                     rebase_path("//tools/clang/dsymutil/bin/dsymutil",
+                                 root_build_dir) + " "
+
+      dsym_output_dir =
+          "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM"
+      dsym_output = [
+        "$dsym_output_dir/Contents/Info.plist",
+        "$dsym_output_dir/Contents/Resources/DWARF/" +
+            "{{target_output_name}}{{output_extension}}",
+      ]
+    } else {
+      dsym_switch = ""
+    }
+
+    if (_save_unstripped_output) {
+      _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
+    }
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("objc") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJC {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("objcxx") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJCXX {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+
+      if (!use_lld) {
+        # Note about -filelist: Apple's linker reads the file list file and
+        # interprets each newline-separated chunk of text as a file name. It
+        # doesn't do the things one would expect from the shell like unescaping
+        # or handling quotes. In contrast, when Ninja finds a file name with
+        # spaces, it single-quotes them in $inputs_newline as it would normally
+        # do for command-line arguments. Thus any source names with spaces, or
+        # label names with spaces (which GN bases the output paths on) will be
+        # corrupted by this process. Don't use spaces for source files or
+        # labels.
+        rspfile_content = "{{inputs_newline}}"
+
+        script = rebase_path("//build/toolchain/apple/filter_libtool.py",
+                             root_build_dir)
+
+        # Specify explicit path for libtool.
+        libtool = invoker.bin_path + "libtool"
+        command = "rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} $python_path $script $libtool -static -D {{arflags}} -o {{output}} -filelist $rspfile"
+        description = "LIBTOOL-STATIC {{output}}"
+      } else {
+        rspfile_content = "{{inputs}}"
+        ar = "${prefix}llvm-ar"
+        command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} \"@$rspfile\""
+
+        # Remove the output file first so that ar doesn't try to modify the
+        # existing file.
+        command = "rm -f {{output}} && $command"
+        description = "AR {{output}}"
+      }
+      outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ]
+      default_output_dir = "{{target_out_dir}}"
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      # E.g. "./libfoo.dylib":
+      dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = dylib + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # These variables are not built into GN but are helpers that implement
+      # (1) linking to produce a .dylib, (2) extracting the symbols from that
+      # file to a temporary file, (3) if the temporary file has differences from
+      # the existing .TOC file, overwrite it, otherwise, don't change it.
+      #
+      # As a special case, if the library reexports symbols from other dynamic
+      # libraries, we always update the .TOC and skip the temporary file and
+      # diffing steps, since that library always needs to be re-linked.
+      tocname = dylib + ".TOC"
+      temporary_tocname = dylib + ".tmp"
+
+      # Use explicit paths to binaries. The binaries present on the default
+      # search path in /usr/bin are thin wrappers around xcrun, which requires a
+      # full CommandLineTools or Xcode install, and still may not choose the
+      # appropriate binary if there are multiple installs.
+      if (host_os == "mac") {
+        nm = invoker.bin_path + "nm"
+        otool = invoker.bin_path + "otool"
+      } else {
+        nm = "${prefix}llvm-nm"
+        otool = "${prefix}llvm-otool"
+      }
+
+      does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || $otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB"
+
+      link_command = "$linker_driver $ld -shared "
+      if (is_component_build) {
+        link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
+      }
+      link_command += dsym_switch
+      link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+
+      replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
+      extract_toc_command = "{ $otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; $nm -gPp \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+      command = "if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi"
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_dir = "{{root_out_dir}}"
+      default_output_extension = ".dylib"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the dylib but use the
+      # tocname for dependency management.
+      outputs = [
+        dylib,
+        tocname,
+      ]
+      link_output = dylib
+      depend_output = tocname
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+    }
+
+    tool("solink_module") {
+      # E.g. "./libfoo.so":
+      sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      link_command = "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
+      if (is_component_build) {
+        link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}"
+      }
+      link_command += dsym_switch
+      link_command += " {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+      command = link_command
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK_MODULE {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_dir = "{{root_out_dir}}"
+      default_output_extension = ".so"
+
+      outputs = [ sofile ]
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+    }
+
+    tool("link") {
+      outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # Note about -filelist: Apple's linker reads the file list file and
+      # interprets each newline-separated chunk of text as a file name. It
+      # doesn't do the things one would expect from the shell like unescaping
+      # or handling quotes. In contrast, when Ninja finds a file name with
+      # spaces, it single-quotes them in $inputs_newline as it would normally
+      # do for command-line arguments. Thus any source names with spaces, or
+      # label names with spaces (which GN bases the output paths on) will be
+      # corrupted by this process. Don't use spaces for source files or labels.
+      command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}}"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs_newline}}"
+      outputs = [ outfile ]
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+
+      default_output_dir = "{{root_out_dir}}"
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+    }
+
+    tool("copy_bundle_data") {
+      # copy_command use hardlink if possible but this does not work with
+      # directories. Also when running EG2 tests from Xcode, Xcode tries to
+      # copy some files into the application bundle which fails if source
+      # and destination are hardlinked together.
+      #
+      # Instead use clonefile to copy the files which is as efficient as
+      # hardlink but ensure the file have distinct metadata (thus avoid the
+      # error with ditto, see https://crbug.com/1042182).
+      if (host_os == "mac") {
+        command = "rm -rf {{output}} && cp -Rc {{source}} {{output}}"
+      } else {
+        command = "rm -rf {{output}} && cp -Rl {{source}} {{output}}"
+      }
+      description = "COPY_BUNDLE_DATA {{source}} {{output}}"
+      pool = "//build/toolchain/apple:bundle_pool($default_toolchain)"
+    }
+
+    # Swift is only used on iOS, not macOS. We want to minimize the number
+    # of Xcode-based tools used by the macOS toolchain, so we intentionally
+    # disallow future uses of Swift on macOS. https://crbug.com/965663.
+    if (toolchain_args.current_os == "ios") {
+      tool("swift") {
+        _tool = rebase_path("//build/toolchain/ios/swiftc.py", root_build_dir)
+
+        depfile = "{{target_out_dir}}/{{module_name}}.d"
+        depsformat = "gcc"
+
+        outputs = [
+          # The module needs to be the first output listed. The blank line after
+          # the module is required to prevent `gn format` from changing the file
+          # order.
+          "{{target_gen_dir}}/{{module_name}}.swiftmodule",
+
+          "{{target_gen_dir}}/{{module_name}}.h",
+          "{{target_gen_dir}}/{{module_name}}.swiftdoc",
+          "{{target_gen_dir}}/{{module_name}}.swiftsourceinfo",
+        ]
+
+        if (swift_whole_module_optimization) {
+          _extra_flags = "-whole-module-optimization"
+          _objects_dir = "{{target_out_dir}}"
+
+          outputs += [ "$_objects_dir/{{module_name}}.o" ]
+        } else {
+          _extra_flags = ""
+          _objects_dir = "{{target_out_dir}}/{{label_name}}"
+
+          partial_outputs = [ "$_objects_dir/{{source_name_part}}.o" ]
+        }
+
+        _env_vars = "TOOL_VERSION=${tool_versions.swiftc}"
+        if (invoker.sdk_developer_dir != "") {
+          _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}"
+        }
+
+        command =
+            "$_env_vars $_tool -module-name {{module_name}} " +
+            "-object-dir $_objects_dir " +
+            "-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " +
+            "-header-path {{target_gen_dir}}/{{module_name}}.h " +
+            "-depfile {{target_out_dir}}/{{module_name}}.d " +
+            "-depfile-filter {{target_gen_dir}}/{{module_name}}.swiftmodule " +
+            "-bridge-header {{bridge_header}} $_extra_flags " +
+            "{{swiftflags}} {{include_dirs}} {{module_dirs}} {{inputs}}"
+      }
+    }
+
+    # xcassets are only used on iOS, not macOS. We want to minimize the number
+    # of Xcode-based tools used by the macOS toolchain, so we intentionally
+    # disallow future uses of xcassets on macOS. https://crbug.com/965663.
+    if (toolchain_args.current_os == "ios") {
+      tool("compile_xcassets") {
+        _tool = rebase_path("//build/toolchain/ios/compile_xcassets.py",
+                            root_build_dir)
+
+        command =
+            "rm -f \"{{output}}\" && " +
+            "TOOL_VERSION=${tool_versions.compile_xcassets} " +
+            "$python_path $_tool -p \"${invoker.sdk_name}\" " +
+            "-t \"${invoker.deployment_target}\" " +
+            "-T \"{{bundle_product_type}}\" " +
+            "-P \"{{bundle_partial_info_plist}}\" " + "-o {{output}} {{inputs}}"
+
+        description = "COMPILE_XCASSETS {{output}}"
+        pool = "//build/toolchain/apple:bundle_pool($default_toolchain)"
+      }
+    }
+
+    tool("action") {
+      pool = "//build/toolchain:action_pool($default_toolchain)"
+    }
+  }
+}
diff --git a/src/build/toolchain/cc_wrapper.gni b/src/build/toolchain/cc_wrapper.gni
new file mode 100644
index 0000000..4c6d751
--- /dev/null
+++ b/src/build/toolchain/cc_wrapper.gni
@@ -0,0 +1,43 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+
+# Defines the configuration of cc wrapper
+# ccache: a c/c++ compiler cache which can greatly reduce recompilation times.
+# icecc, distcc: it takes compile jobs from a build and distributes them among
+#                remote machines allowing a parallel build.
+#
+# TIPS
+#
+# 1) ccache
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang.  (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+#
+# 2) icecc
+# Set clang_use_chrome_plugins=false because icecc cannot distribute custom
+# clang libraries.
+#
+# To use icecc and ccache together, set cc_wrapper = "ccache" with
+# export CCACHE_PREFIX=icecc
+
+declare_args() {
+  # Set to "ccache", "icecc" or "distcc".  Probably doesn't work on windows.
+  cc_wrapper = ""
+}
+
+assert(!use_goma || cc_wrapper == "",
+       "use_goma and cc_wrapper can not be used together.")
+assert(!use_rbe || cc_wrapper == "",
+       "use_rbe and cc_wrapper can not be used together.")
diff --git a/src/build/toolchain/clang_code_coverage_wrapper.py b/src/build/toolchain/clang_code_coverage_wrapper.py
new file mode 100755
index 0000000..7bd9222
--- /dev/null
+++ b/src/build/toolchain/clang_code_coverage_wrapper.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Removes code coverage flags from invocations of the Clang C/C++ compiler.
+
+If the GN arg `use_clang_coverage=true`, this script will be invoked by default.
+GN will add coverage instrumentation flags to almost all source files.
+
+This script is used to remove instrumentation flags from a subset of the source
+files. By default, it will not remove flags from any files. If the option
+--files-to-instrument is passed, this script will remove flags from all files
+except the ones listed in --files-to-instrument.
+
+This script also contains hard-coded exclusion lists of files to never
+instrument, indexed by target operating system. Files in these lists have their
+flags removed in both modes. The OS can be selected with --target-os.
+
+This script also contains hard-coded force lists of files to always instrument,
+indexed by target operating system. Files in these lists never have their flags
+removed in either mode. The OS can be selected with --target-os.
+
+The order of precedence is: force list, exclusion list, --files-to-instrument.
+
+The path to the coverage instrumentation input file should be relative to the
+root build directory, and the file consists of multiple lines where each line
+represents a path to a source file, and the specified paths must be relative to
+the root build directory. e.g. ../../base/task/post_task.cc for build
+directory 'out/Release'. The paths should be written using OS-native path
+separators for the current platform.
+
+One caveat with this compiler wrapper is that it may introduce unexpected
+behaviors in incremental builds when the file path to the coverage
+instrumentation input file changes between consecutive runs, so callers of this
+script are strongly advised to always use the same path such as
+"${root_build_dir}/coverage_instrumentation_input.txt".
+
+It's worth noting on try job builders, if the contents of the instrumentation
+file changes so that a file doesn't need to be instrumented any longer, it will
+be recompiled automatically because if try job B runs after try job A, the files
+that were instrumented in A will be updated (i.e., reverted to the checked in
+version) in B, and so they'll be considered out of date by ninja and recompiled.
+
+Example usage:
+  clang_code_coverage_wrapper.py \\
+      --files-to-instrument=coverage_instrumentation_input.txt
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+
+# Flags used to enable coverage instrumentation.
+# Flags should be listed in the same order that they are added in
+# build/config/coverage/BUILD.gn
+_COVERAGE_FLAGS = [
+    '-fprofile-instr-generate',
+    '-fcoverage-mapping',
+    # Following experimental flags remove unused header functions from the
+    # coverage mapping data embedded in the test binaries, and the reduction
+    # of binary size enables building Chrome's large unit test targets on
+    # MacOS. Please refer to crbug.com/796290 for more details.
+    '-mllvm',
+    '-limited-coverage-experimental=true',
+]
+
+# Files that should not be built with coverage flags by default.
+_DEFAULT_COVERAGE_EXCLUSION_LIST = [
+    # TODO(crbug.com/1051561): angle_unittests affected by coverage.
+    '../../base/message_loop/message_pump_default.cc',
+    '../../base/message_loop/message_pump_libevent.cc',
+    '../../base/message_loop/message_pump_win.cc',
+    '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc',  #pylint: disable=line-too-long
+]
+
+# Map of exclusion lists indexed by target OS.
+# If no target OS is defined, or one is defined that doesn't have a specific
+# entry, use _DEFAULT_COVERAGE_EXCLUSION_LIST.
+_COVERAGE_EXCLUSION_LIST_MAP = {
+    'android': [
+        # This file caused webview native library failed on arm64.
+        '../../device/gamepad/dualshock4_controller.cc',
+    ],
+    'fuchsia': [
+        # TODO(crbug.com/1174725): These files caused clang to crash while
+        # compiling them.
+        '../../base/allocator/partition_allocator/pcscan.cc',
+        '../../third_party/skia/src/core/SkOpts.cpp',
+        '../../third_party/skia/src/opts/SkOpts_hsw.cpp',
+        '../../third_party/skia/third_party/skcms/skcms.cc',
+    ],
+    'linux': [
+        # These files caused a static initializer to be generated, which
+        # shouldn't.
+        # TODO(crbug.com/990948): Remove when the bug is fixed.
+        '../../chrome/browser/media/router/providers/cast/cast_internal_message_util.cc',  #pylint: disable=line-too-long
+        '../../components/cast_channel/cast_channel_enum.cc',
+        '../../components/cast_channel/cast_message_util.cc',
+        '../../components/media_router/common/providers/cast/cast_media_source.cc',  #pylint: disable=line-too-long
+        '../../ui/events/keycodes/dom/keycode_converter.cc',
+        # TODO(crbug.com/1051561): angle_unittests affected by coverage.
+        '../../base/message_loop/message_pump_default.cc',
+        '../../base/message_loop/message_pump_libevent.cc',
+        '../../base/message_loop/message_pump_win.cc',
+        '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc',  #pylint: disable=line-too-long
+    ],
+    'chromeos': [
+        # These files caused clang to crash while compiling them. They are
+        # excluded pending an investigation into the underlying compiler bug.
+        '../../third_party/webrtc/p2p/base/p2p_transport_channel.cc',
+        '../../third_party/icu/source/common/uts46.cpp',
+        '../../third_party/icu/source/common/ucnvmbcs.cpp',
+        '../../base/android/android_image_reader_compat.cc',
+        # TODO(crbug.com/1051561): angle_unittests affected by coverage.
+        '../../base/message_loop/message_pump_default.cc',
+        '../../base/message_loop/message_pump_libevent.cc',
+        '../../base/message_loop/message_pump_win.cc',
+        '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc',  #pylint: disable=line-too-long
+    ],
+    'win': [
+        # TODO(crbug.com/1051561): angle_unittests affected by coverage.
+        '../../base/message_loop/message_pump_default.cc',
+        '../../base/message_loop/message_pump_libevent.cc',
+        '../../base/message_loop/message_pump_win.cc',
+        '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc',  #pylint: disable=line-too-long
+    ],
+}
+
+# Map of force lists indexed by target OS.
+_COVERAGE_FORCE_LIST_MAP = {
+    # clang_profiling.cc refers to the symbol `__llvm_profile_dump` from the
+    # profiling runtime. In a partial coverage build, it is possible for a
+    # binary to include clang_profiling.cc but have no instrumented files, thus
+    # causing an unresolved symbol error because the profiling runtime will not
+    # be linked in. Therefore we force coverage for this file to ensure that
+    # any target that includes it will also get the profiling runtime.
+    'win': [r'..\..\base\test\clang_profiling.cc'],
+    # TODO(crbug.com/1141727) We're seeing runtime LLVM errors in mac-rel when
+    # no files are changed, so we suspect that this is similar to the other
+    # problem with clang_profiling.cc on Windows. The TODO here is to force
+    # coverage for this specific file on ALL platforms, if it turns out to fix
+    # this issue on Mac as well. It's the only file that directly calls
+    # `__llvm_profile_dump` so it warrants some special treatment.
+    'mac': ['../../base/test/clang_profiling.cc'],
+}
+
+
+def _remove_flags_from_command(command):
+  # We need to remove the coverage flags for this file, but we only want to
+  # remove them if we see the exact sequence defined in _COVERAGE_FLAGS.
+  # That ensures that we only remove the flags added by GN when
+  # "use_clang_coverage" is true. Otherwise, we would remove flags set by
+  # other parts of the build system.
+  start_flag = _COVERAGE_FLAGS[0]
+  num_flags = len(_COVERAGE_FLAGS)
+  start_idx = 0
+  try:
+    while True:
+      idx = command.index(start_flag, start_idx)
+      if command[idx:idx + num_flags] == _COVERAGE_FLAGS:
+        del command[idx:idx + num_flags]
+        # There can be multiple sets of _COVERAGE_FLAGS. All of these need to be
+        # removed.
+        start_idx = idx
+      else:
+        start_idx = idx + 1
+  except ValueError:
+    pass
+
+
+def main():
+  arg_parser = argparse.ArgumentParser()
+  arg_parser.usage = __doc__
+  arg_parser.add_argument(
+      '--files-to-instrument',
+      type=str,
+      help='Path to a file that contains a list of file names to instrument.')
+  arg_parser.add_argument(
+      '--target-os', required=False, help='The OS to compile for.')
+  arg_parser.add_argument('args', nargs=argparse.REMAINDER)
+  parsed_args = arg_parser.parse_args()
+
+  if (parsed_args.files_to_instrument and
+      not os.path.isfile(parsed_args.files_to_instrument)):
+    raise Exception('Path to the coverage instrumentation file: "%s" doesn\'t '
+                    'exist.' % parsed_args.files_to_instrument)
+
+  compile_command = parsed_args.args
+  if not any('clang' in s for s in compile_command):
+    return subprocess.call(compile_command)
+
+  target_os = parsed_args.target_os
+
+  try:
+    # The command is assumed to use Clang as the compiler, and the path to the
+    # source file is behind the -c argument, and the path to the source path is
+    # relative to the root build directory. For example:
+    # clang++ -fvisibility=hidden -c ../../base/files/file_path.cc -o \
+    #   obj/base/base/file_path.o
+    # On Windows, clang-cl.exe uses /c instead of -c.
+    source_flag = '/c' if target_os == 'win' else '-c'
+    source_flag_index = compile_command.index(source_flag)
+  except ValueError:
+    print('%s argument is not found in the compile command.' % source_flag)
+    raise
+
+  if source_flag_index + 1 >= len(compile_command):
+    raise Exception('Source file to be compiled is missing from the command.')
+
+  # On Windows, filesystem paths should use '\', but GN creates build commands
+  # that use '/'. We invoke os.path.normpath to ensure that the path uses the
+  # correct separator for the current platform (i.e. '\' on Windows and '/'
+  # otherwise).
+  compile_source_file = os.path.normpath(compile_command[source_flag_index + 1])
+  exclusion_list = _COVERAGE_EXCLUSION_LIST_MAP.get(
+      target_os, _DEFAULT_COVERAGE_EXCLUSION_LIST)
+  force_list = _COVERAGE_FORCE_LIST_MAP.get(target_os, [])
+
+  should_remove_flags = False
+  if compile_source_file not in force_list:
+    if compile_source_file in exclusion_list:
+      should_remove_flags = True
+    elif parsed_args.files_to_instrument:
+      with open(parsed_args.files_to_instrument) as f:
+        if compile_source_file not in f.read():
+          should_remove_flags = True
+
+  if should_remove_flags:
+    _remove_flags_from_command(compile_command)
+
+  return subprocess.call(compile_command)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/concurrent_links.gni b/src/build/toolchain/concurrent_links.gni
new file mode 100644
index 0000000..f1a42c9
--- /dev/null
+++ b/src/build/toolchain/concurrent_links.gni
@@ -0,0 +1,106 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file should only be imported from files that define toolchains.
+# There's no way to enforce this exactly, but all toolchains are processed
+# in the context of the default_toolchain, so we can at least check for that.
+assert(current_toolchain == default_toolchain)
+
+import("//build/config/android/config.gni")
+import("//build/config/apple/symbols.gni")
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Limit the number of concurrent links; we often want to run fewer
+  # links at once than we do compiles, because linking is memory-intensive.
+  # The default to use varies by platform and by the amount of memory
+  # available, so we call out to a script to get the right value.
+  concurrent_links = -1
+}
+
+if (concurrent_links == -1) {
+  if (use_thin_lto) {
+    _args = [ "--reserve_mem_gb=10" ]
+    if (use_goma_thin_lto) {
+      _args += [ "--thin-lto=goma" ]
+    } else {
+      _args += [ "--thin-lto=local" ]
+    }
+    if (is_win) {
+      # Based on measurements of linking chrome.dll and chrome_child.dll, plus
+      # a little padding to account for future growth.
+      _args += [ "--mem_per_link_gb=45" ]
+    } else {
+      _args += [ "--mem_per_link_gb=10" ]
+    }
+  } else if ((use_clang_coverage &&
+              # When coverage_instrumentation_input_file is not empty it means
+              # we're only instrumenting changed files and not using a lot of
+              # memory. Likewise, when it's empty we're building everything with
+              # coverage, which requires more memory.
+              coverage_instrumentation_input_file == "") ||
+             use_sanitizer_coverage || use_fuzzing_engine) {
+    # Full sanitizer coverage instrumentation increases linker memory consumption
+    # significantly.
+    _args = [ "--mem_per_link_gb=16" ]
+  } else if (is_win && symbol_level == 1 && !is_debug && is_component_build) {
+    _args = [ "--mem_per_link_gb=3" ]
+  } else if (is_win) {
+    _args = [ "--mem_per_link_gb=6" ]
+  } else if (is_mac) {
+    if (enable_dsyms) {
+      _args = [ "--mem_per_link_gb=12" ]
+    } else {
+      _args = [ "--mem_per_link_gb=4" ]
+    }
+  } else if (is_android && !is_component_build && symbol_level == 2) {
+    # Full debug symbols require large memory for link.
+    _args = [ "--mem_per_link_gb=25" ]
+  } else if (is_android && !is_debug && !using_sanitizer && is_java_debug &&
+             disable_android_lint && symbol_level < 2) {
+    if (symbol_level == 1) {
+      _args = [ "--mem_per_link_gb=6" ]
+    } else {
+      _args = [ "--mem_per_link_gb=4" ]
+    }
+  } else if ((is_linux || is_chromeos_lacros) && symbol_level == 0) {
+    # Memory consumption on link without debug symbols is low on linux.
+    _args = [ "--mem_per_link_gb=3" ]
+  } else {
+    _args = []
+  }
+
+  # For Android builds, we also need to be wary of:
+  # * ProGuard / R8
+  # * Android Lint
+  # These both have a peak usage of < 2GB, but that is still large enough for
+  # them to need to use a pool since they both typically happen at the
+  # same time as linking.
+  if (is_android) {
+    _args += [ "--secondary_mem_per_link=2" ]
+  }
+
+  # TODO(crbug.com/617429) Pass more build configuration info to the script
+  # so that we can compute better values.
+  _command_dict = exec_script("get_concurrent_links.py", _args, "scope")
+
+  concurrent_links = _command_dict.primary_pool_size
+  concurrent_links_logs = _command_dict.explanation
+
+  if (_command_dict.secondary_pool_size >= concurrent_links) {
+    # Have R8 / Lint share the link pool unless we would safely get more
+    # concurrency out of using a separate one.
+    # On low-RAM machines, this allows an apk's native library to link at the
+    # same time as its java is optimized with R8.
+    java_cmd_pool_size = _command_dict.secondary_pool_size
+  }
+} else {
+  assert(!use_thin_lto, "can't explicitly set concurrent_links with thinlto")
+  concurrent_links_logs =
+      [ "concurrent_links set by GN arg (value=$concurrent_links)" ]
+}
diff --git a/src/build/toolchain/cros/BUILD.gn b/src/build/toolchain/cros/BUILD.gn
new file mode 100644
index 0000000..6d13678
--- /dev/null
+++ b/src/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/cros_toolchain.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# This is mostly identical to gcc_toolchain, but handles relativizing toolchain
+# paths. This is needed for CrOS since these paths often change based on the
+# environment. For example, cxx is a relative path picked up on $PATH in the
+# chroot. But in Simple Chrome, cxx is a system-absolute path.
+template("cros_toolchain") {
+  gcc_toolchain(target_name) {
+    forward_variables_from(invoker, "*")
+
+    # CrOS's target toolchain wrapper prefers to invoke gomacc itself, so pass
+    # it the gomacc path via cmd-line arg. Otherwise, for both CrOS's host
+    # wrapper (used in the ebuild) and Chrome's clang (used in Simple Chrome),
+    # prepend gomacc like normal.
+    if (use_goma && toolchain_args.needs_gomacc_path_arg) {
+      extra_cppflags += " --gomacc-path $goma_dir/gomacc"
+    }
+
+    # Relativize path if compiler is specified such that not to lookup from $PATH
+    # and cc/cxx does not contain additional flags.
+    if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) {
+      cc = rebase_path(cc, root_build_dir)
+    }
+    if (cxx != get_path_info(cxx, "file") &&
+        string_replace(cxx, " ", "") == cxx) {
+      cxx = rebase_path(cxx, root_build_dir)
+    }
+    if (ar != get_path_info(ar, "file") && string_replace(ar, " ", "") == ar) {
+      ar = rebase_path(ar, root_build_dir)
+    }
+    if (ld != get_path_info(ld, "file") && string_replace(ld, " ", "") == ld) {
+      ld = rebase_path(ld, root_build_dir)
+    }
+  }
+}
+
+# This is the normal toolchain for most targets.
+cros_toolchain("target") {
+  ar = cros_target_ar
+  cc = cros_target_cc
+  cxx = cros_target_cxx
+  ld = cros_target_ld
+
+  if (cros_target_nm != "") {
+    nm = cros_target_nm
+  }
+  if (cros_target_readelf != "") {
+    readelf = cros_target_readelf
+  }
+  extra_cflags = cros_target_extra_cflags
+  extra_cppflags = cros_target_extra_cppflags
+  extra_cxxflags = cros_target_extra_cxxflags
+  extra_ldflags = cros_target_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    needs_gomacc_path_arg = true
+    clang_use_chrome_plugins = false
+    current_cpu = target_cpu
+    current_os = "chromeos"
+    is_clang = is_clang
+    use_debug_fission = use_debug_fission
+    use_gold = use_gold
+    use_sysroot = use_sysroot
+    sysroot = target_sysroot
+  }
+}
+
+# This is a special toolchain needed just for the nacl_bootstrap target in
+# //native_client/src/trusted/service_runtime/linux. It is identical
+# to ":target" except that it forces use_debug_fission, use_gold, and
+# use_sysroot off, and allows the user to set different sets of extra flags.
+cros_toolchain("nacl_bootstrap") {
+  ar = cros_target_ar
+  cc = cros_target_cc
+  cxx = cros_target_cxx
+  ld = cros_target_ld
+
+  if (cros_target_nm != "") {
+    nm = cros_target_nm
+  }
+  if (cros_target_readelf != "") {
+    readelf = cros_target_readelf
+  }
+  extra_cflags = cros_nacl_bootstrap_extra_cflags
+  extra_cppflags = cros_nacl_bootstrap_extra_cppflags
+  extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags
+  extra_ldflags = cros_nacl_bootstrap_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    needs_gomacc_path_arg = true
+    clang_use_chrome_plugins = false
+    current_cpu = target_cpu
+    current_os = "chromeos"
+    is_clang = is_clang
+    use_debug_fission = false
+    use_gold = false
+    use_sysroot = false
+  }
+}
+
+cros_toolchain("host") {
+  # These are args for the template.
+  ar = cros_host_ar
+  cc = cros_host_cc
+  cxx = cros_host_cxx
+  ld = cros_host_ld
+
+  if (cros_host_nm != "") {
+    nm = cros_host_nm
+  }
+  if (cros_host_readelf != "") {
+    readelf = cros_host_readelf
+  }
+  extra_cflags = cros_host_extra_cflags
+  extra_cppflags = cros_host_extra_cppflags
+  extra_cxxflags = cros_host_extra_cxxflags
+  extra_ldflags = cros_host_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    needs_gomacc_path_arg = false
+    clang_use_chrome_plugins = false
+    is_clang = cros_host_is_clang
+    current_cpu = host_cpu
+    current_os = "linux"
+    use_sysroot = use_sysroot
+    sysroot = cros_host_sysroot
+  }
+}
+
+cros_toolchain("v8_snapshot") {
+  # These are args for the template.
+  ar = cros_v8_snapshot_ar
+  cc = cros_v8_snapshot_cc
+  cxx = cros_v8_snapshot_cxx
+  ld = cros_v8_snapshot_ld
+
+  if (cros_v8_snapshot_nm != "") {
+    nm = cros_v8_snapshot_nm
+  }
+  if (cros_v8_snapshot_readelf != "") {
+    readelf = cros_v8_snapshot_readelf
+  }
+  extra_cflags = cros_v8_snapshot_extra_cflags
+  extra_cppflags = cros_v8_snapshot_extra_cppflags
+  extra_cxxflags = cros_v8_snapshot_extra_cxxflags
+  extra_ldflags = cros_v8_snapshot_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    needs_gomacc_path_arg = false
+    clang_use_chrome_plugins = false
+    is_clang = cros_v8_snapshot_is_clang
+    if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") {
+      current_cpu = "x86"
+    } else {
+      current_cpu = "x64"
+    }
+    v8_current_cpu = v8_target_cpu
+    current_os = "linux"
+    use_sysroot = use_sysroot
+    sysroot = cros_v8_snapshot_sysroot
+  }
+}
diff --git a/src/build/toolchain/cros_toolchain.gni b/src/build/toolchain/cros_toolchain.gni
new file mode 100644
index 0000000..ccc4db2
--- /dev/null
+++ b/src/build/toolchain/cros_toolchain.gni
@@ -0,0 +1,88 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# CrOS builds must cross-compile on a Linux host for the actual CrOS
+# device target. There are many different CrOS devices so the build
+# system provides configuration variables that permit a CrOS build to
+# control the cross-compilation tool chain. However, requiring such
+# fine-grain specification is tedious for build-bots and developers.
+# Consequently, the CrOS build system defaults to a convenience
+# compilation mode where the compilation host is also the build target.
+#
+# Chrome can be compiled in this way with the gn variable:
+#
+# target_os = "chromeos"
+#
+# To perform a board-specific build, first obtain the correct system
+# root (http://goo.gl/aFB4XH) for the board. Then configure GN to use it
+# by setting appropriate cross-compilation variables.
+#
+# For example, to compile a Chrome source tree in /g/src for an
+# auron_paine CrOS device with the system root cached in /g/.cros_cache,
+# the following GN arguments must be provided to configure
+# cross-compilation with Goma acceleration. (NB: additional variables
+# will be necessary to successfully compile a working CrOS Chrome. See
+# the definition of GYP_DEFINES inside a sysroot shell.)
+#
+# goma_dir = "/g/.cros_cache/common/goma+2"
+# target_sysroot= /g/.cros_cache/chrome-sdk/tarballs/auron_paine+7644.0.0+sysroot_chromeos-base_chromeos-chrome.tar.xz"
+# cros_target_cc = "x86_64-cros-linux-gnu-gcc -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_cxx = "x86_64-cros-linux-gnu-g++ -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_ar = "x86_64-cros-linux-gnu-gcc-ar"
+# target_cpu = "x64"
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+
+declare_args() {
+  # These must be specified for a board-specific build.
+  cros_target_ar = "ar"
+  cros_target_cc = "gcc"
+  cros_target_cxx = "g++"
+  cros_target_nm = ""
+  cros_target_readelf = ""
+
+  # These can be optionally set. The "_cppflags"  will be applied to *both*
+  # C and C++ files; use "_cxxflags" for C++-only flags.
+  cros_target_extra_cflags = ""
+  cros_target_extra_cppflags = ""
+  cros_target_extra_cxxflags = ""
+  cros_target_extra_ldflags = ""
+
+  # is_clang is used instead of cros_target_is_clang
+  cros_host_ar = "${clang_base_path}/bin/llvm-ar"
+  cros_host_cc = "${clang_base_path}/bin/clang"
+  cros_host_cxx = "${clang_base_path}/bin/clang++"
+  cros_host_is_clang = true
+  cros_host_nm = ""
+  cros_host_readelf = ""
+  cros_host_extra_cflags = ""
+  cros_host_extra_cppflags = ""
+  cros_host_extra_cxxflags = ""
+  cros_host_extra_ldflags = ""
+  cros_host_sysroot = ""
+
+  cros_v8_snapshot_ar = "${clang_base_path}/bin/llvm-ar"
+  cros_v8_snapshot_cc = "${clang_base_path}/bin/clang"
+  cros_v8_snapshot_cxx = "${clang_base_path}/bin/clang++"
+  cros_v8_snapshot_is_clang = true
+  cros_v8_snapshot_nm = ""
+  cros_v8_snapshot_readelf = ""
+  cros_v8_snapshot_extra_cflags = ""
+  cros_v8_snapshot_extra_cppflags = ""
+  cros_v8_snapshot_extra_cxxflags = ""
+  cros_v8_snapshot_extra_ldflags = ""
+  cros_v8_snapshot_sysroot = ""
+
+  cros_nacl_bootstrap_extra_cflags = ""
+  cros_nacl_bootstrap_extra_cppflags = ""
+  cros_nacl_bootstrap_extra_cxxflags = ""
+  cros_nacl_bootstrap_extra_ldflags = ""
+}
+
+declare_args() {
+  cros_target_ld = cros_target_cxx
+  cros_host_ld = cros_host_cxx
+  cros_v8_snapshot_ld = cros_v8_snapshot_cxx
+}
diff --git a/src/build/toolchain/fuchsia/BUILD.gn b/src/build/toolchain/fuchsia/BUILD.gn
new file mode 100644
index 0000000..d77640b
--- /dev/null
+++ b/src/build/toolchain/fuchsia/BUILD.gn
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+# Fuchsia builds using the Clang toolchain, with most parameters common across
+# the different target architectures.
+template("fuchsia_clang_toolchain") {
+  clang_toolchain(target_name) {
+    assert(host_os == "linux" || host_os == "mac")
+    assert(defined(invoker.toolchain_args),
+           "toolchain_args must be defined for fuchsia_clang_toolchain()")
+
+    # We want to build and strip binaries, but retain the unstripped binaries
+    # in runtime_deps to make them available for isolates.
+    strip = rebase_path("${clang_base_path}/bin/llvm-strip", root_build_dir)
+    use_unstripped_as_runtime_outputs = true
+
+    default_shlib_subdir = "/lib"
+
+    toolchain_args = invoker.toolchain_args
+    toolchain_args.current_os = "fuchsia"
+  }
+}
+
+fuchsia_clang_toolchain("x64") {
+  toolchain_args = {
+    current_cpu = "x64"
+  }
+}
+
+fuchsia_clang_toolchain("arm64") {
+  toolchain_args = {
+    current_cpu = "arm64"
+  }
+}
diff --git a/src/build/toolchain/fuchsia/DIR_METADATA b/src/build/toolchain/fuchsia/DIR_METADATA
new file mode 100644
index 0000000..6d8f079
--- /dev/null
+++ b/src/build/toolchain/fuchsia/DIR_METADATA
@@ -0,0 +1,7 @@
+monorail {
+  component: "Fuchsia"
+}
+
+team_email: "cr-fuchsia@chromium.org"
+
+os: FUCHSIA
diff --git a/src/build/toolchain/gcc_link_wrapper.py b/src/build/toolchain/gcc_link_wrapper.py
new file mode 100755
index 0000000..b70de8b
--- /dev/null
+++ b/src/build/toolchain/gcc_link_wrapper.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a linking command and optionally a strip command.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("link"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+# When running on a Windows host and using a toolchain whose tools are
+# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+# executables, the "command" to run has to be prefixed with this magic.
+# The GN toolchain definitions take care of that for when GN/Ninja is
+# running the tool directly.  When that command is passed in to this
+# script, it appears as a unitary string but needs to be split up so that
+# just 'cmd' is the actual command given to Python's subprocess module.
+BAT_PREFIX = 'cmd /c call '
+
+def CommandToRun(command):
+  if command[0].startswith(BAT_PREFIX):
+    command = command[0].split(None, 3) + command[1:]
+  return command
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--strip',
+                      help='The strip binary to run',
+                      metavar='PATH')
+  parser.add_argument('--unstripped-file',
+                      help='Executable file produced by linking command',
+                      metavar='FILE')
+  parser.add_argument('--map-file',
+                      help=('Use --Wl,-Map to generate a map file. Will be '
+                            'gzipped if extension ends with .gz'),
+                      metavar='FILE')
+  parser.add_argument('--dwp', help=('The dwp binary to run'), metavar='FILE')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Final output executable file',
+                      metavar='FILE')
+  parser.add_argument('command', nargs='+',
+                      help='Linking command')
+  args = parser.parse_args()
+
+  # Work-around for gold being slow-by-default. http://crbug.com/632230
+  fast_env = dict(os.environ)
+  fast_env['LC_ALL'] = 'C'
+  result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env,
+                                                    map_file=args.map_file)
+  if result != 0:
+    return result
+
+  # If dwp is set, then package debug info for this exe.
+  dwp_proc = None
+  if args.dwp:
+    exe_file = args.output
+    if args.unstripped_file:
+      exe_file = args.unstripped_file
+    # Suppress output here because it doesn't seem to be useful. The most
+    # common error is a segfault, which will happen if files are missing.
+    with open(os.devnull, "w") as devnull:
+      dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun(
+          [args.dwp, '-e', exe_file, '-o', exe_file + '.dwp']),
+                                  stdout=devnull,
+                                  stderr=subprocess.STDOUT)
+
+  # Finally, strip the linked executable (if desired).
+  if args.strip:
+    result = subprocess.call(
+        CommandToRun([args.strip, '-o', args.output, args.unstripped_file]))
+
+  if dwp_proc:
+    dwp_result = dwp_proc.wait()
+    if dwp_result != 0:
+      sys.stderr.write('dwp failed with error code {}\n'.format(dwp_result))
+      return dwp_result
+
+  return result
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/toolchain/gcc_solink_wrapper.py b/src/build/toolchain/gcc_solink_wrapper.py
new file mode 100755
index 0000000..39aef4d
--- /dev/null
+++ b/src/build/toolchain/gcc_solink_wrapper.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("solink"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import shlex
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+def CollectSONAME(args):
+  """Replaces: readelf -d $sofile | grep SONAME"""
+  toc = ''
+  readelf = subprocess.Popen(wrapper_utils.CommandToRun(
+      [args.readelf, '-d', args.sofile]),
+                             stdout=subprocess.PIPE,
+                             bufsize=-1,
+                             universal_newlines=True)
+  for line in readelf.stdout:
+    if 'SONAME' in line:
+      toc += line
+  return readelf.wait(), toc
+
+
+def CollectDynSym(args):
+  """Replaces: nm --format=posix -g -D -p $sofile | cut -f1-2 -d' '"""
+  toc = ''
+  nm = subprocess.Popen(wrapper_utils.CommandToRun(
+      [args.nm, '--format=posix', '-g', '-D', '-p', args.sofile]),
+                        stdout=subprocess.PIPE,
+                        bufsize=-1,
+                        universal_newlines=True)
+  for line in nm.stdout:
+    toc += ' '.join(line.split(' ', 2)[:2]) + '\n'
+  return nm.wait(), toc
+
+
+def CollectTOC(args):
+  result, toc = CollectSONAME(args)
+  if result == 0:
+    result, dynsym = CollectDynSym(args)
+    toc += dynsym
+  return result, toc
+
+
+def UpdateTOC(tocfile, toc):
+  if os.path.exists(tocfile):
+    old_toc = open(tocfile, 'r').read()
+  else:
+    old_toc = None
+  if toc != old_toc:
+    open(tocfile, 'w').write(toc)
+
+
+def CollectInputs(out, args):
+  for x in args:
+    if x.startswith('@'):
+      with open(x[1:]) as rsp:
+        CollectInputs(out, shlex.split(rsp.read()))
+    elif not x.startswith('-') and (x.endswith('.o') or x.endswith('.a')):
+      out.write(x)
+      out.write('\n')
+
+
+def InterceptFlag(flag, command):
+  ret = flag in command
+  if ret:
+    command.remove(flag)
+  return ret
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--readelf',
+                      required=True,
+                      help='The readelf binary to run',
+                      metavar='PATH')
+  parser.add_argument('--nm',
+                      required=True,
+                      help='The nm binary to run',
+                      metavar='PATH')
+  parser.add_argument('--strip',
+                      help='The strip binary to run',
+                      metavar='PATH')
+  parser.add_argument('--dwp', help='The dwp binary to run', metavar='PATH')
+  parser.add_argument('--sofile',
+                      required=True,
+                      help='Shared object file produced by linking command',
+                      metavar='FILE')
+  parser.add_argument('--tocfile',
+                      required=True,
+                      help='Output table-of-contents file',
+                      metavar='FILE')
+  parser.add_argument('--map-file',
+                      help=('Use --Wl,-Map to generate a map file. Will be '
+                            'gzipped if extension ends with .gz'),
+                      metavar='FILE')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Final output shared object file',
+                      metavar='FILE')
+  parser.add_argument('command', nargs='+',
+                      help='Linking command')
+  args = parser.parse_args()
+
+  # Work-around for gold being slow-by-default. http://crbug.com/632230
+  fast_env = dict(os.environ)
+  fast_env['LC_ALL'] = 'C'
+
+  # Extract flags passed through ldflags but meant for this script.
+  # https://crbug.com/954311 tracks finding a better way to plumb these.
+  link_only = InterceptFlag('--link-only', args.command)
+  collect_inputs_only = InterceptFlag('--collect-inputs-only', args.command)
+
+  # If only linking, we are likely generating a partitioned .so that will be
+  # split apart later. In that case:
+  #
+  # - The TOC file optimization isn't useful, because the partition libraries
+  #   must always be re-extracted if the combined library changes (and nothing
+  #   should be depending on the combined library's dynamic symbol table).
+  # - Stripping isn't necessary, because the combined library is not used in
+  #   production or published.
+  #
+  # Both of these operations could still be done, they're needless work, and
+  # tools would need to be updated to handle and/or not complain about
+  # partitioned libraries. Instead, to keep Ninja happy, simply create dummy
+  # files for the TOC and stripped lib.
+  if link_only or collect_inputs_only:
+    open(args.output, 'w').close()
+    open(args.tocfile, 'w').close()
+    if args.dwp:
+      open(args.sofile + '.dwp', 'w').close()
+
+  # Instead of linking, records all inputs to a file. This is used by
+  # enable_resource_allowlist_generation in order to avoid needing to
+  # link (which is slow) to build the resources allowlist.
+  if collect_inputs_only:
+    with open(args.sofile, 'w') as f:
+      CollectInputs(f, args.command)
+    if args.map_file:
+      open(args.map_file, 'w').close()
+    return 0
+
+  # First, run the actual link.
+  command = wrapper_utils.CommandToRun(args.command)
+  result = wrapper_utils.RunLinkWithOptionalMapFile(command,
+                                                    env=fast_env,
+                                                    map_file=args.map_file)
+
+  if result != 0 or link_only:
+    return result
+
+  # If dwp is set, then package debug info for this SO.
+  dwp_proc = None
+  if args.dwp:
+    # Suppress output here because it doesn't seem to be useful. The most
+    # common error is a segfault, which will happen if files are missing.
+    with open(os.devnull, "w") as devnull:
+      dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun(
+          [args.dwp, '-e', args.sofile, '-o', args.sofile + '.dwp']),
+                                  stdout=devnull,
+                                  stderr=subprocess.STDOUT)
+
+  # Next, generate the contents of the TOC file.
+  result, toc = CollectTOC(args)
+  if result != 0:
+    return result
+
+  # If there is an existing TOC file with identical contents, leave it alone.
+  # Otherwise, write out the TOC file.
+  UpdateTOC(args.tocfile, toc)
+
+  # Finally, strip the linked shared object file (if desired).
+  if args.strip:
+    result = subprocess.call(wrapper_utils.CommandToRun(
+        [args.strip, '-o', args.output, args.sofile]))
+
+  if dwp_proc:
+    dwp_result = dwp_proc.wait()
+    if dwp_result != 0:
+      return dwp_result
+
+  return result
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/toolchain/gcc_toolchain.gni b/src/build/toolchain/gcc_toolchain.gni
new file mode 100644
index 0000000..9418f5c
--- /dev/null
+++ b/src/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,680 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/v8_target_cpu.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_nacl) {
+  # To keep NaCl variables out of builds that don't include NaCl, all
+  # variables defined in nacl/config.gni referenced here should be protected by
+  # is_nacl conditions.
+  import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+  # Enables allowlist generation for IDR_ grit defines seen by the compiler.
+  # Currently works only on some platforms and enabled by default for official
+  # builds. Requires debug info.
+  enable_resource_allowlist_generation =
+      is_official_build &&
+      # Don't enable for Android-on-Chrome OS so that they can build with
+      # symbol_level=0 without this failing (crbug.com/891164).
+      (target_os == "android" || target_os == "win")
+}
+
+# When the arg is set via args.gn, it applies to all toolchains. In order to not
+# hit the assert in grit_rule.gni, explicitly disable for host toolchains.
+if ((is_linux || is_chromeos) && target_os == "android") {
+  enable_resource_allowlist_generation = false
+}
+
+# Ensure enable_resource_allowlist_generation is enabled only when it will work.
+if (enable_resource_allowlist_generation) {
+  assert(symbol_level >= 1,
+         "enable_resource_allowlist_generation=true requires symbol_level >= 1")
+  assert(
+      !strip_debug_info,
+      "enable_resource_allowlist_generation=true requires strip_debug_info=false")
+  assert(
+      !is_component_build,
+      "enable_resource_allowlist_generation=true requires is_component_build=false")
+  assert(
+      target_os == "android" || target_os == "win",
+      "enable_resource_allowlist_generation=true does not work for target_os=$target_os")
+}
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+#  - ar
+#  - cc
+#  - cxx
+#  - ld
+#
+# Optional parameters that control the tools:
+#
+#  - extra_cflags
+#      Extra flags to be appended when compiling C files (but not C++ files).
+#  - extra_cppflags
+#      Extra flags to be appended when compiling both C and C++ files. "CPP"
+#      stands for "C PreProcessor" in this context, although it can be
+#      used for non-preprocessor flags as well. Not to be confused with
+#      "CXX" (which follows).
+#  - extra_cxxflags
+#      Extra flags to be appended when compiling C++ files (but not C files).
+#  - extra_asmflags
+#      Extra flags to be appended when compiling assembly.
+#  - extra_ldflags
+#      Extra flags to be appended when linking
+#
+#  - link_outputs
+#      The content of this array, if specified, will be added to the list of
+#      outputs from the link command. This can be useful in conjunction with
+#      the post_link parameter.
+#  - use_unstripped_as_runtime_outputs
+#      When |strip| is set, mark unstripped executables as runtime deps rather
+#      than stripped ones.
+#  - post_link
+#      The content of this string, if specified, will be run as a separate
+#      command following the the link command.
+#  - deps
+#      Just forwarded to the toolchain definition.
+#  - executable_extension
+#      If this string is specified it will be used for the file extension
+#      for an executable, rather than using no extension; targets will
+#      still be able to override the extension using the output_extension
+#      variable.
+#  - rebuild_define
+#      The contents of this string, if specified, will be passed as a #define
+#      to the toolchain. It can be used to force recompiles whenever a
+#      toolchain is updated.
+#  - shlib_extension
+#      If this string is specified it will be used for the file extension
+#      for a shared library, rather than default value specified in
+#      toolchain.gni
+#  - strip
+#      Location of the strip executable. When specified, strip will be run on
+#      all shared libraries and executables as they are built. The pre-stripped
+#      artifacts will be put in lib.unstripped/ and exe.unstripped/.
+template("gcc_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+
+    # This define changes when the toolchain changes, forcing a rebuild.
+    # Nothing should ever use this define.
+    if (defined(invoker.rebuild_define)) {
+      rebuild_string = "-D" + invoker.rebuild_define + " "
+    } else {
+      rebuild_string = ""
+    }
+
+    # GN's syntax can't handle more than one scope dereference at once, like
+    # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
+    # args so we can do "invoker_toolchain_args.foo".
+    assert(defined(invoker.toolchain_args),
+           "Toolchains must specify toolchain_args")
+    invoker_toolchain_args = invoker.toolchain_args
+    assert(defined(invoker_toolchain_args.current_cpu),
+           "toolchain_args must specify a current_cpu")
+    assert(defined(invoker_toolchain_args.current_os),
+           "toolchain_args must specify a current_os")
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args = {
+      # Populate toolchain args from the invoker.
+      forward_variables_from(invoker_toolchain_args, "*")
+
+      # The host toolchain value computed by the default toolchain's setup
+      # needs to be passed through unchanged to all secondary toolchains to
+      # ensure that it's always the same, regardless of the values that may be
+      # set on those toolchains.
+      host_toolchain = host_toolchain
+
+      if (!defined(invoker_toolchain_args.v8_current_cpu)) {
+        v8_current_cpu = invoker_toolchain_args.current_cpu
+      }
+    }
+
+    # When the invoker has explicitly overridden user_rbe, use_goma or
+    # cc_wrapper in the toolchain args, use those values, otherwise default
+    # to the global one.  This works because the only reasonable override
+    # that toolchains might supply for these values are to force-disable them.
+    if (defined(toolchain_args.use_rbe)) {
+      toolchain_uses_rbe = toolchain_args.use_rbe
+    } else {
+      toolchain_uses_rbe = use_rbe
+    }
+    if (defined(toolchain_args.use_goma)) {
+      toolchain_uses_goma = toolchain_args.use_goma
+    } else {
+      toolchain_uses_goma = use_goma
+    }
+
+    # x86_64-nacl-* is ELF-32 and Goma/RBE won't support ELF-32.
+    if (toolchain_uses_goma &&
+        get_path_info(invoker.cc, "name") == "x86_64-nacl-gcc") {
+      # it will also disable x86_64-nacl-g++ since these are in
+      # the same toolchain.
+      toolchain_uses_goma = false
+    }
+    if (defined(toolchain_args.cc_wrapper)) {
+      toolchain_cc_wrapper = toolchain_args.cc_wrapper
+    } else {
+      toolchain_cc_wrapper = cc_wrapper
+    }
+    assert(!(toolchain_uses_rbe && toolchain_uses_goma),
+           "Goma and RBE can't be used together.")
+    assert(!(toolchain_cc_wrapper != "" && toolchain_uses_rbe),
+           "RBE and cc_wrapper can't be used together.")
+    assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
+           "Goma and cc_wrapper can't be used together.")
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    # But if needs_gomacc_path_arg is set in a Chrome OS build, the toolchain
+    # wrapper will have picked up gomacc via cmd-line arg. So need to prepend
+    # gomacc in that case.
+    goma_path = "$goma_dir/gomacc"
+    if (toolchain_uses_rbe) {
+      if (defined(toolchain_args.rbe_cc_cfg_file)) {
+        toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file
+      } else {
+        toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file
+      }
+
+      # C/C++ (clang) rewrapper prefix to use when use_rbe is true.
+      compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
+    } else if (toolchain_uses_goma &&
+               (!defined(invoker_toolchain_args.needs_gomacc_path_arg) ||
+                !invoker_toolchain_args.needs_gomacc_path_arg)) {
+      compiler_prefix = "${goma_path} "
+    } else {
+      compiler_prefix = "${toolchain_cc_wrapper} "
+    }
+    if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) {
+      # goma_ld.py uses autoninja in an attempt to set a reasonable
+      # number of jobs, but this results in too low a value on
+      # Chrome OS builders. So we pass in an explicit value.
+      goma_ld =
+          "$python_path " +
+          rebase_path("//tools/clang/scripts/goma_ld.py", root_build_dir) +
+          " --gomacc ${goma_path} --jobs 200 -- "
+    } else {
+      goma_ld = ""
+      not_needed([ "goma_path" ])
+    }
+
+    # Create a distinct variable for "asm", since coverage runs pass a bunch of
+    # flags to clang/clang++ that are nonsensical on assembler runs.
+    asm_prefix = compiler_prefix
+
+    # A specific toolchain may wish to avoid coverage instrumentation, so we
+    # allow the global "use_clang_coverage" arg to be overridden.
+    if (defined(toolchain_args.use_clang_coverage)) {
+      toolchain_use_clang_coverage = toolchain_args.use_clang_coverage
+    } else {
+      toolchain_use_clang_coverage = use_clang_coverage
+    }
+
+    # For a coverage build, we use the wrapper script globally so that it can
+    # remove coverage cflags from files that should not have them.
+    if (toolchain_use_clang_coverage) {
+      # "coverage_instrumentation_input_file" is set in args.gn, but it can be
+      # overridden by a toolchain config.
+      if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+        toolchain_coverage_instrumentation_input_file =
+            toolchain_args.coverage_instrumentation_input_file
+      } else {
+        toolchain_coverage_instrumentation_input_file =
+            coverage_instrumentation_input_file
+      }
+
+      _coverage_wrapper =
+          rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+                      root_build_dir)
+
+      # The wrapper needs to know what OS we target because it uses that to
+      # select a list of files that should not be instrumented.
+      _coverage_wrapper = _coverage_wrapper + " --target-os=" + target_os
+
+      # We want to instrument everything if there is no input file set.
+      # If there is a file we need to give it to the wrapper script so it can
+      # instrument only those files.
+      if (toolchain_coverage_instrumentation_input_file != "") {
+        _coverage_wrapper =
+            _coverage_wrapper + " --files-to-instrument=" +
+            rebase_path(toolchain_coverage_instrumentation_input_file,
+                        root_build_dir)
+      }
+      compiler_prefix = "$python_path ${_coverage_wrapper} " + compiler_prefix
+    }
+
+    cc = compiler_prefix + invoker.cc
+    cxx = compiler_prefix + invoker.cxx
+    asm = asm_prefix + invoker.cc
+    ar = invoker.ar
+    ld = "$goma_ld${invoker.ld}"
+    if (defined(invoker.readelf)) {
+      readelf = invoker.readelf
+    } else {
+      readelf = "readelf"
+    }
+    if (defined(invoker.nm)) {
+      nm = invoker.nm
+    } else {
+      nm = "nm"
+    }
+    if (defined(invoker.dwp)) {
+      dwp_switch = " --dwp=\"${invoker.dwp}\""
+    } else {
+      dwp_switch = ""
+    }
+
+    if (defined(invoker.shlib_extension)) {
+      default_shlib_extension = invoker.shlib_extension
+    } else {
+      default_shlib_extension = shlib_extension
+    }
+
+    if (defined(invoker.default_shlib_subdir)) {
+      default_shlib_subdir = invoker.default_shlib_subdir
+    } else {
+      default_shlib_subdir = ""
+    }
+
+    if (defined(invoker.executable_extension)) {
+      default_executable_extension = invoker.executable_extension
+    } else {
+      default_executable_extension = ""
+    }
+
+    # Bring these into our scope for string interpolation with default values.
+    if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
+      extra_cflags = " " + invoker.extra_cflags
+    } else {
+      extra_cflags = ""
+    }
+
+    if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
+      extra_cppflags = " " + invoker.extra_cppflags
+    } else {
+      extra_cppflags = ""
+    }
+
+    if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
+      extra_cxxflags = " " + invoker.extra_cxxflags
+    } else {
+      extra_cxxflags = ""
+    }
+
+    if (defined(invoker.extra_asmflags) && invoker.extra_asmflags != "") {
+      extra_asmflags = " " + invoker.extra_asmflags
+    } else {
+      extra_asmflags = ""
+    }
+
+    if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
+      extra_ldflags = " " + invoker.extra_ldflags
+    } else {
+      extra_ldflags = ""
+    }
+
+    enable_linker_map = defined(invoker.enable_linker_map) &&
+                        invoker.enable_linker_map && generate_linker_map
+
+    # These library switches can apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    # Object files go in this directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+    }
+
+    tool("alink") {
+      if (current_os == "aix") {
+        # AIX does not support either -D (deterministic output) or response
+        # files.
+        command = "$ar -X64 {{arflags}} -r -c -s {{output}} {{inputs}}"
+      } else {
+        rspfile = "{{output}}.rsp"
+        rspfile_content = "{{inputs}}"
+        command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @\"$rspfile\""
+      }
+
+      # Remove the output file first so that ar doesn't try to modify the
+      # existing file.
+      if (host_os == "win") {
+        tool_wrapper_path =
+            rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+        command = "cmd /c $python_path $tool_wrapper_path delete-file {{output}} && $command"
+      } else {
+        command = "rm -f {{output}} && $command"
+      }
+
+      # Almost all targets build with //build/config/compiler:thin_archive which
+      # adds -T to arflags.
+      description = "AR {{output}}"
+      outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ]
+
+      # Shared libraries go in the target out directory by default so we can
+      # generate different targets with the same name and not have them collide.
+      default_output_dir = "{{target_out_dir}}"
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{output_dir}}/$soname"  # Possibly including toolchain dir.
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      if (defined(invoker.strip)) {
+        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+      } else {
+        unstripped_sofile = sofile
+      }
+
+      # These variables are not built into GN but are helpers that
+      # implement (1) linking to produce a .so, (2) extracting the symbols
+      # from that file (3) if the extracted list differs from the existing
+      # .TOC file, overwrite it, otherwise, don't change it.
+      tocfile = sofile + ".TOC"
+
+      link_command = "$ld -shared -Wl,-soname=\"$soname\" {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\""
+
+      # Generate a map file to be used for binary size analysis.
+      # Map file adds ~10% to the link time on a z620.
+      # With target_os="android", libchrome.so.map.gz is ~20MB.
+      map_switch = ""
+      if (enable_linker_map) {
+        map_file = "$unstripped_sofile.map.gz"
+        map_switch = " --map-file \"$map_file\""
+      }
+
+      assert(defined(readelf), "to solink you must have a readelf")
+      assert(defined(nm), "to solink you must have an nm")
+      strip_switch = ""
+      if (defined(invoker.strip)) {
+        strip_switch = "--strip=${invoker.strip} "
+      }
+
+      # This needs a Python script to avoid using a complex shell command
+      # requiring sh control structures, pipelines, and POSIX utilities.
+      # The host might not have a POSIX shell and utilities (e.g. Windows).
+      solink_wrapper =
+          rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir)
+      command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command"
+
+      if (target_cpu == "mipsel" && is_component_build && is_android) {
+        rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group"
+      } else {
+        rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
+      }
+
+      description = "SOLINK $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = default_shlib_extension
+
+      default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the sofile but use the
+      # tocfile for dependency management.
+      outputs = [
+        sofile,
+        tocfile,
+      ]
+      if (sofile != unstripped_sofile) {
+        outputs += [ unstripped_sofile ]
+        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+            invoker.use_unstripped_as_runtime_outputs) {
+          runtime_outputs = [ unstripped_sofile ]
+        }
+      }
+
+      # Clank build will generate DWP files when Fission is used.
+      # Other builds generate DWP files outside of the gn link targets, if at
+      # all.
+      if (defined(invoker.dwp)) {
+        outputs += [ unstripped_sofile + ".dwp" ]
+        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+            invoker.use_unstripped_as_runtime_outputs) {
+          runtime_outputs += [ unstripped_sofile + ".dwp" ]
+        }
+      }
+      if (defined(map_file)) {
+        outputs += [ map_file ]
+      }
+      link_output = sofile
+      depend_output = tocfile
+    }
+
+    tool("solink_module") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{output_dir}}/$soname"
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      if (defined(invoker.strip)) {
+        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+      } else {
+        unstripped_sofile = sofile
+      }
+
+      command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+      if (defined(invoker.strip)) {
+        strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
+        command += " && " + strip_command
+      }
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
+
+      description = "SOLINK_MODULE $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      if (defined(invoker.loadable_module_extension)) {
+        default_output_extension = invoker.loadable_module_extension
+      } else {
+        default_output_extension = default_shlib_extension
+      }
+
+      default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+
+      output_prefix = "lib"
+
+      outputs = [ sofile ]
+      if (sofile != unstripped_sofile) {
+        outputs += [ unstripped_sofile ]
+        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+            invoker.use_unstripped_as_runtime_outputs) {
+          runtime_outputs = [ unstripped_sofile ]
+        }
+      }
+    }
+
+    tool("link") {
+      exename = "{{target_output_name}}{{output_extension}}"
+      outfile = "{{output_dir}}/$exename"
+      rspfile = "$outfile.rsp"
+      unstripped_outfile = outfile
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = default_executable_extension
+
+      default_output_dir = "{{root_out_dir}}"
+
+      if (defined(invoker.strip)) {
+        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
+      }
+
+      start_group_flag = ""
+      end_group_flag = ""
+      if (current_os != "aix") {
+        # the "--start-group .. --end-group" feature isn't available on the aix ld.
+        start_group_flag = "-Wl,--start-group"
+        end_group_flag = "-Wl,--end-group "
+      }
+      link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag {{libs}}"
+
+      # Generate a map file to be used for binary size analysis.
+      # Map file adds ~10% to the link time on a z620.
+      # With target_os="android", libchrome.so.map.gz is ~20MB.
+      map_switch = ""
+      if (enable_linker_map) {
+        map_file = "$unstripped_outfile.map.gz"
+        map_switch = " --map-file \"$map_file\""
+      }
+
+      strip_switch = ""
+      if (defined(invoker.strip)) {
+        strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
+      }
+
+      link_wrapper =
+          rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
+      command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command"
+
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs}}"
+      outputs = [ outfile ]
+      if (outfile != unstripped_outfile) {
+        outputs += [ unstripped_outfile ]
+        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+            invoker.use_unstripped_as_runtime_outputs) {
+          runtime_outputs = [ unstripped_outfile ]
+        }
+      }
+
+      # Clank build will generate DWP files when Fission is used.
+      # Other builds generate DWP files outside of the gn link targets, if at
+      # all.
+      if (defined(invoker.dwp)) {
+        outputs += [ unstripped_outfile + ".dwp" ]
+        if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+            invoker.use_unstripped_as_runtime_outputs) {
+          runtime_outputs += [ unstripped_outfile + ".dwp" ]
+        }
+      }
+      if (defined(invoker.link_outputs)) {
+        outputs += invoker.link_outputs
+      }
+      if (defined(map_file)) {
+        outputs += [ map_file ]
+      }
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+    }
+
+    tool("action") {
+      pool = "//build/toolchain:action_pool($default_toolchain)"
+    }
+
+    forward_variables_from(invoker, [ "deps" ])
+  }
+}
+
+# This is a shorthand for gcc_toolchain instances based on the Chromium-built
+# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
+# be specified by the invoker, and optionally toolprefix if it's a
+# cross-compile case. Note that for a cross-compile case this toolchain
+# requires a config to pass the appropriate -target option, or else it will
+# actually just be doing a native compile. The invoker can optionally override
+# use_gold too.
+template("clang_toolchain") {
+  if (defined(invoker.toolprefix)) {
+    toolprefix = invoker.toolprefix
+  } else {
+    toolprefix = ""
+  }
+
+  gcc_toolchain(target_name) {
+    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cc = "$prefix/clang"
+    cxx = "$prefix/clang++"
+    ld = cxx
+    readelf = "${toolprefix}readelf"
+    ar = "${prefix}/llvm-ar"
+    nm = "${prefix}/llvm-nm"
+
+    forward_variables_from(invoker,
+                           [
+                             "strip",
+                             "default_shlib_subdir",
+                             "enable_linker_map",
+                             "use_unstripped_as_runtime_outputs",
+                           ])
+
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+      is_clang = true
+    }
+  }
+}
diff --git a/src/build/toolchain/get_concurrent_links.py b/src/build/toolchain/get_concurrent_links.py
new file mode 100755
index 0000000..d77bb43
--- /dev/null
+++ b/src/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+from __future__ import print_function
+
+import argparse
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
+import gn_helpers
+
+
+def _GetTotalMemoryInBytes():
+  if sys.platform in ('win32', 'cygwin'):
+    import ctypes
+
+    class MEMORYSTATUSEX(ctypes.Structure):
+      _fields_ = [
+          ("dwLength", ctypes.c_ulong),
+          ("dwMemoryLoad", ctypes.c_ulong),
+          ("ullTotalPhys", ctypes.c_ulonglong),
+          ("ullAvailPhys", ctypes.c_ulonglong),
+          ("ullTotalPageFile", ctypes.c_ulonglong),
+          ("ullAvailPageFile", ctypes.c_ulonglong),
+          ("ullTotalVirtual", ctypes.c_ulonglong),
+          ("ullAvailVirtual", ctypes.c_ulonglong),
+          ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+      ]
+
+    stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+    return stat.ullTotalPhys
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          return float(match.group(1)) * 2**10
+  elif sys.platform == 'darwin':
+    try:
+      return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+    except Exception:
+      return 0
+  # TODO(scottmg): Implement this for other platforms.
+  return 0
+
+
+def _GetDefaultConcurrentLinks(per_link_gb, reserve_gb, thin_lto_type,
+                               secondary_per_link_gb):
+  explanation = []
+  explanation.append(
+      'per_link_gb={} reserve_gb={} secondary_per_link_gb={}'.format(
+          per_link_gb, reserve_gb, secondary_per_link_gb))
+  mem_total_gb = float(_GetTotalMemoryInBytes()) / 2**30
+  mem_total_gb = max(0, mem_total_gb - reserve_gb)
+  mem_cap = int(max(1, mem_total_gb / per_link_gb))
+
+  try:
+    cpu_count = multiprocessing.cpu_count()
+  except:
+    cpu_count = 1
+
+  # A local LTO links saturate all cores, but only for some amount of the link.
+  # Goma LTO runs LTO codegen on goma, only run one of these tasks at once.
+  cpu_cap = cpu_count
+  if thin_lto_type is not None:
+    if thin_lto_type == 'goma':
+      cpu_cap = 1
+    else:
+      assert thin_lto_type == 'local'
+      cpu_cap = min(cpu_count, 6)
+
+  explanation.append('cpu_count={} cpu_cap={} mem_total_gb={:.1f}GiB'.format(
+      cpu_count, cpu_cap, mem_total_gb))
+
+  num_links = min(mem_cap, cpu_cap)
+  if num_links == cpu_cap:
+    if cpu_cap == cpu_count:
+      reason = 'cpu_count'
+    else:
+      reason = 'cpu_cap (thinlto)'
+  else:
+    reason = 'RAM'
+
+  explanation.append('concurrent_links={}  (reason: {})'.format(
+      num_links, reason))
+
+  # See if there is RAM leftover for a secondary pool.
+  if secondary_per_link_gb and num_links == mem_cap:
+    mem_remaining = mem_total_gb - mem_cap * per_link_gb
+    secondary_size = int(max(0, mem_remaining / secondary_per_link_gb))
+    explanation.append('secondary_size={} (mem_remaining={:.1f}GiB)'.format(
+        secondary_size, mem_remaining))
+  else:
+    secondary_size = 0
+
+  return num_links, secondary_size, explanation
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--mem_per_link_gb', type=int, default=8)
+  parser.add_argument('--reserve_mem_gb', type=int, default=0)
+  parser.add_argument('--secondary_mem_per_link', type=int, default=0)
+  parser.add_argument('--thin-lto')
+  options = parser.parse_args()
+
+  primary_pool_size, secondary_pool_size, explanation = (
+      _GetDefaultConcurrentLinks(options.mem_per_link_gb,
+                                 options.reserve_mem_gb, options.thin_lto,
+                                 options.secondary_mem_per_link))
+  sys.stdout.write(
+      gn_helpers.ToGNString({
+          'primary_pool_size': primary_pool_size,
+          'secondary_pool_size': secondary_pool_size,
+          'explanation': explanation,
+      }))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/get_cpu_count.py b/src/build/toolchain/get_cpu_count.py
new file mode 100644
index 0000000..765c7c7
--- /dev/null
+++ b/src/build/toolchain/get_cpu_count.py
@@ -0,0 +1,23 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script shows cpu count to specify capacity of action pool.
+
+from __future__ import print_function
+
+import multiprocessing
+import sys
+
+def main():
+  try:
+    cpu_count = multiprocessing.cpu_count()
+  except:
+    cpu_count = 1
+
+  print(cpu_count)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/get_goma_dir.py b/src/build/toolchain/get_goma_dir.py
new file mode 100644
index 0000000..114da6c
--- /dev/null
+++ b/src/build/toolchain/get_goma_dir.py
@@ -0,0 +1,42 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script gets default goma_dir for depot_tools goma.
+
+import os
+import sys
+
+
+def main():
+  gomacc = 'gomacc'
+  candidates = []
+  if sys.platform in ['win32', 'cygwin']:
+    gomacc = 'gomacc.exe'
+
+  for path in os.environ.get('PATH', '').split(os.pathsep):
+    # normpath() required to strip trailing slash when present.
+    if os.path.basename(os.path.normpath(path)) == 'depot_tools':
+      candidates.append(os.path.join(path, '.cipd_bin'))
+
+  for d in candidates:
+    if os.path.isfile(os.path.join(d, gomacc)):
+      sys.stdout.write(d)
+      return 0
+  # mb analyze step set use_goma=true, but goma_dir="",
+  # and bot doesn't have goma in default locataion above.
+  # to mitigate this, just use initial depot_tools path
+  # or default path as before (if depot_tools doesn't exist
+  # in PATH).
+  # TODO(ukai): crbug.com/1073276: fix mb analyze step and make it hard error?
+  if sys.platform in ['win32', 'cygwin']:
+    sys.stdout.write('C:\\src\\goma\\goma-win64')
+  elif 'GOMA_DIR' in os.environ:
+    sys.stdout.write(os.environ.get('GOMA_DIR'))
+  else:
+    sys.stdout.write(os.path.join(os.environ.get('HOME', ''), 'goma'))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/goma.gni b/src/build/toolchain/goma.gni
new file mode 100644
index 0000000..56787f1
--- /dev/null
+++ b/src/build/toolchain/goma.gni
@@ -0,0 +1,35 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+
+declare_args() {
+  # Set to true to enable distributed compilation using Goma.
+  use_goma = false
+
+  # This flag is for ChromeOS compiler wrapper.
+  # By passing gomacc path via cmd-line arg, ChromeOS' compiler wrapper
+  # invokes gomacc inside it.
+  needs_gomacc_path_arg = false
+
+  # Absolute directory containing the gomacc binary.
+  goma_dir = ""
+}
+
+if (use_goma && goma_dir == "") {
+  goma_dir = exec_script("get_goma_dir.py", [], "string")
+}
+
+declare_args() {
+  # TODO(crbug.com/726475): true if use_goma = true in the future.
+  use_java_goma = false
+
+  # Deprecated and ignored as Goma RBE is now the default. Still exists
+  # to avoid breaking the build on the bots. Will be removed when all
+  # bots have been configured to not set this variable.
+  ios_use_goma_rbe = -1
+}
+
+assert(!is_win || !use_goma || is_clang,
+       "cl.exe does not work on goma, use clang")
diff --git a/src/build/toolchain/ios/BUILD.gn b/src/build/toolchain/ios/BUILD.gn
new file mode 100644
index 0000000..bc73529
--- /dev/null
+++ b/src/build/toolchain/ios/BUILD.gn
@@ -0,0 +1,82 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+import("//build/toolchain/apple/toolchain.gni")
+
+# Specialisation of the apple_toolchain template to declare the toolchain
+# and its tools to build target for iOS platform.
+template("ios_toolchain") {
+  assert(defined(invoker.toolchain_args),
+         "Toolchains must declare toolchain_args")
+
+  apple_toolchain(target_name) {
+    forward_variables_from(invoker, "*", [ "toolchain_args" ])
+
+    sdk_developer_dir = ios_sdk_developer_dir
+    deployment_target = ios_deployment_target
+    sdk_name = ios_sdk_name
+    bin_path = ios_bin_path
+    strip_with_lipo = true
+
+    toolchain_args = {
+      forward_variables_from(invoker.toolchain_args, "*")
+      current_os = "ios"
+    }
+  }
+}
+
+ios_toolchain("ios_clang_arm64") {
+  toolchain_args = {
+    current_cpu = "arm64"
+  }
+}
+
+ios_toolchain("ios_clang_arm64_14_0") {
+  toolchain_args = {
+    current_cpu = "arm64"
+    ios_deployment_target = "14.0"
+  }
+}
+
+ios_toolchain("ios_clang_arm") {
+  toolchain_args = {
+    current_cpu = "arm"
+  }
+}
+
+ios_toolchain("ios_clang_arm_fat") {
+  toolchain_args = {
+    current_cpu = "arm"
+    is_fat_secondary_toolchain = true
+    primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_arm64"
+  }
+}
+
+ios_toolchain("ios_clang_x64") {
+  toolchain_args = {
+    current_cpu = "x64"
+  }
+}
+
+ios_toolchain("ios_clang_x64_14_0") {
+  toolchain_args = {
+    current_cpu = "x64"
+    ios_deployment_target = "14.0"
+  }
+}
+
+ios_toolchain("ios_clang_x86") {
+  toolchain_args = {
+    current_cpu = "x86"
+  }
+}
+
+ios_toolchain("ios_clang_x86_fat") {
+  toolchain_args = {
+    current_cpu = "x86"
+    is_fat_secondary_toolchain = true
+    primary_fat_toolchain_name = "//build/toolchain/ios:ios_clang_x64"
+  }
+}
diff --git a/src/build/toolchain/ios/compile_xcassets.py b/src/build/toolchain/ios/compile_xcassets.py
new file mode 100644
index 0000000..e160665
--- /dev/null
+++ b/src/build/toolchain/ios/compile_xcassets.py
@@ -0,0 +1,261 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+"""Wrapper around actool to compile assets catalog.
+
+The script compile_xcassets.py is a wrapper around actool to compile
+assets catalog to Assets.car that turns warning into errors. It also
+fixes some quirks of actool to make it work from ninja (mostly that
+actool seems to require absolute path but gn generates command-line
+with relative paths).
+
+The wrapper filter out any message that is not a section header and
+not a warning or error message, and fails if filtered output is not
+empty. This should to treat all warnings as error until actool has
+an option to fail with non-zero error code when there are warnings.
+"""
+
+# Pattern matching a section header in the output of actool.
+SECTION_HEADER = re.compile('^/\\* ([^ ]*) \\*/$')
+
+# Name of the section containing informational messages that can be ignored.
+NOTICE_SECTION = 'com.apple.actool.compilation-results'
+
+# Regular expressions matching spurious messages from actool that should be
+# ignored (as they are bogus). Generally a bug should be filed with Apple
+# when adding a pattern here.
+SPURIOUS_PATTERNS = [
+    re.compile(v) for v in [
+        # crbug.com/770634, likely a bug in Xcode 9.1 beta, remove once build
+        # requires a version of Xcode with a fix.
+        r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: \(null\)',
+
+        # crbug.com/770634, likely a bug in Xcode 9.2 beta, remove once build
+        # requires a version of Xcode with a fix.
+        r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: 76x76@1x app'
+        ' icons only apply to iPad apps targeting releases of iOS prior to'
+        ' 10.0.',
+    ]
+]
+
+# Map special type of asset catalog to the corresponding command-line
+# parameter that need to be passed to actool.
+ACTOOL_FLAG_FOR_ASSET_TYPE = {
+    '.appiconset': '--app-icon',
+    '.launchimage': '--launch-image',
+}
+
+
+def IsSpuriousMessage(line):
+  """Returns whether line contains a spurious message that should be ignored."""
+  for pattern in SPURIOUS_PATTERNS:
+    match = pattern.search(line)
+    if match is not None:
+      return True
+  return False
+
+
+def FilterCompilerOutput(compiler_output, relative_paths):
+  """Filers actool compilation output.
+
+  The compiler output is composed of multiple sections for each different
+  level of output (error, warning, notices, ...). Each section starts with
+  the section name on a single line, followed by all the messages from the
+  section.
+
+  The function filter any lines that are not in com.apple.actool.errors or
+  com.apple.actool.document.warnings sections (as spurious messages comes
+  before any section of the output).
+
+  See crbug.com/730054, crbug.com/739163 and crbug.com/770634 for some example
+  messages that pollute the output of actool and cause flaky builds.
+
+  Args:
+    compiler_output: string containing the output generated by the
+      compiler (contains both stdout and stderr)
+    relative_paths: mapping from absolute to relative paths used to
+      convert paths in the warning and error messages (unknown paths
+      will be left unaltered)
+
+  Returns:
+    The filtered output of the compiler. If the compilation was a
+    success, then the output will be empty, otherwise it will use
+    relative path and omit any irrelevant output.
+  """
+
+  filtered_output = []
+  current_section = None
+  data_in_section = False
+  for line in compiler_output.splitlines():
+    match = SECTION_HEADER.search(line)
+    if match is not None:
+      data_in_section = False
+      current_section = match.group(1)
+      continue
+    if current_section and current_section != NOTICE_SECTION:
+      if IsSpuriousMessage(line):
+        continue
+      absolute_path = line.split(':')[0]
+      relative_path = relative_paths.get(absolute_path, absolute_path)
+      if absolute_path != relative_path:
+        line = relative_path + line[len(absolute_path):]
+      if not data_in_section:
+        data_in_section = True
+        filtered_output.append('/* %s */\n' % current_section)
+      filtered_output.append(line + '\n')
+
+  return ''.join(filtered_output)
+
+
+def CompileAssetCatalog(output, platform, product_type, min_deployment_target,
+                        inputs, compress_pngs, partial_info_plist):
+  """Compile the .xcassets bundles to an asset catalog using actool.
+
+  Args:
+    output: absolute path to the containing bundle
+    platform: the targeted platform
+    product_type: the bundle type
+    min_deployment_target: minimum deployment target
+    inputs: list of absolute paths to .xcassets bundles
+    compress_pngs: whether to enable compression of pngs
+    partial_info_plist: path to partial Info.plist to generate
+  """
+  command = [
+      'xcrun',
+      'actool',
+      '--output-format=human-readable-text',
+      '--notices',
+      '--warnings',
+      '--errors',
+      '--platform',
+      platform,
+      '--minimum-deployment-target',
+      min_deployment_target,
+  ]
+
+  if compress_pngs:
+    command.extend(['--compress-pngs'])
+
+  if product_type != '':
+    command.extend(['--product-type', product_type])
+
+  if platform == 'macosx':
+    command.extend(['--target-device', 'mac'])
+  else:
+    command.extend(['--target-device', 'iphone', '--target-device', 'ipad'])
+
+  # Scan the input directories for the presence of asset catalog types that
+  # require special treatment, and if so, add them to the actool command-line.
+  for relative_path in inputs:
+
+    if not os.path.isdir(relative_path):
+      continue
+
+    for file_or_dir_name in os.listdir(relative_path):
+      if not os.path.isdir(os.path.join(relative_path, file_or_dir_name)):
+        continue
+
+      asset_name, asset_type = os.path.splitext(file_or_dir_name)
+      if asset_type not in ACTOOL_FLAG_FOR_ASSET_TYPE:
+        continue
+
+      command.extend([ACTOOL_FLAG_FOR_ASSET_TYPE[asset_type], asset_name])
+
+  # Always ask actool to generate a partial Info.plist file. If not path
+  # has been given by the caller, use a temporary file name.
+  temporary_file = None
+  if not partial_info_plist:
+    temporary_file = tempfile.NamedTemporaryFile(suffix='.plist')
+    partial_info_plist = temporary_file.name
+
+  command.extend(['--output-partial-info-plist', partial_info_plist])
+
+  # Dictionary used to convert absolute paths back to their relative form
+  # in the output of actool.
+  relative_paths = {}
+
+  # actool crashes if paths are relative, so convert input and output paths
+  # to absolute paths, and record the relative paths to fix them back when
+  # filtering the output.
+  absolute_output = os.path.abspath(output)
+  relative_paths[output] = absolute_output
+  relative_paths[os.path.dirname(output)] = os.path.dirname(absolute_output)
+  command.extend(['--compile', os.path.dirname(os.path.abspath(output))])
+
+  for relative_path in inputs:
+    absolute_path = os.path.abspath(relative_path)
+    relative_paths[absolute_path] = relative_path
+    command.append(absolute_path)
+
+  try:
+    # Run actool and redirect stdout and stderr to the same pipe (as actool
+    # is confused about what should go to stderr/stdout).
+    process = subprocess.Popen(command,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.STDOUT)
+    stdout, _ = process.communicate()
+
+    # Filter the output to remove all garbarge and to fix the paths.
+    stdout = FilterCompilerOutput(stdout.decode('UTF-8'), relative_paths)
+
+    if process.returncode or stdout:
+      sys.stderr.write(stdout)
+      sys.exit(1)
+
+  finally:
+    if temporary_file:
+      temporary_file.close()
+
+
+def Main():
+  parser = argparse.ArgumentParser(
+      description='compile assets catalog for a bundle')
+  parser.add_argument('--platform',
+                      '-p',
+                      required=True,
+                      choices=('macosx', 'iphoneos', 'iphonesimulator'),
+                      help='target platform for the compiled assets catalog')
+  parser.add_argument(
+      '--minimum-deployment-target',
+      '-t',
+      required=True,
+      help='minimum deployment target for the compiled assets catalog')
+  parser.add_argument('--output',
+                      '-o',
+                      required=True,
+                      help='path to the compiled assets catalog')
+  parser.add_argument('--compress-pngs',
+                      '-c',
+                      action='store_true',
+                      default=False,
+                      help='recompress PNGs while compiling assets catalog')
+  parser.add_argument('--product-type',
+                      '-T',
+                      help='type of the containing bundle')
+  parser.add_argument('--partial-info-plist',
+                      '-P',
+                      help='path to partial info plist to create')
+  parser.add_argument('inputs',
+                      nargs='+',
+                      help='path to input assets catalog sources')
+  args = parser.parse_args()
+
+  if os.path.basename(args.output) != 'Assets.car':
+    sys.stderr.write('output should be path to compiled asset catalog, not '
+                     'to the containing bundle: %s\n' % (args.output, ))
+    sys.exit(1)
+
+  CompileAssetCatalog(args.output, args.platform, args.product_type,
+                      args.minimum_deployment_target, args.inputs,
+                      args.compress_pngs, args.partial_info_plist)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/src/build/toolchain/ios/swiftc.py b/src/build/toolchain/ios/swiftc.py
new file mode 100755
index 0000000..2f4bb58
--- /dev/null
+++ b/src/build/toolchain/ios/swiftc.py
@@ -0,0 +1,176 @@
+#!/usr/bin/python3
+
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import json
+import os
+import subprocess
+import sys
+import tempfile
+
+
+class OrderedSet(collections.OrderedDict):
+  def add(self, value):
+    self[value] = True
+
+
+def compile_module(module, sources, settings, extras, tmpdir):
+  output_file_map = {}
+  if settings.whole_module_optimization:
+    output_file_map[''] = {
+        'object': os.path.join(settings.object_dir, module + '.o'),
+        'dependencies': os.path.join(tmpdir, module + '.d'),
+    }
+  else:
+    for source in sources:
+      name, _ = os.path.splitext(os.path.basename(source))
+      output_file_map[source] = {
+          'object': os.path.join(settings.object_dir, name + '.o'),
+          'dependencies': os.path.join(tmpdir, name + '.d'),
+      }
+
+  for key in ('module_path', 'header_path', 'depfile'):
+    path = getattr(settings, key)
+    if os.path.exists(path):
+      os.unlink(path)
+    if key == 'module_path':
+      for ext in '.swiftdoc', '.swiftsourceinfo':
+        path = os.path.splitext(getattr(settings, key))[0] + ext
+        if os.path.exists(path):
+          os.unlink(path)
+    directory = os.path.dirname(path)
+    if not os.path.exists(directory):
+      os.makedirs(directory)
+
+  if not os.path.exists(settings.object_dir):
+    os.makedirs(settings.object_dir)
+
+  for key in output_file_map:
+    path = output_file_map[key]['object']
+    if os.path.exists(path):
+      os.unlink(path)
+
+  output_file_map_path = os.path.join(tmpdir, module + '.json')
+  with open(output_file_map_path, 'w') as output_file_map_file:
+    output_file_map_file.write(json.dumps(output_file_map))
+    output_file_map_file.flush()
+
+  extra_args = []
+  if settings.bridge_header:
+    extra_args.extend([
+        '-import-objc-header',
+        os.path.abspath(settings.bridge_header),
+    ])
+
+  if settings.whole_module_optimization:
+    extra_args.append('-whole-module-optimization')
+
+  if settings.target:
+    extra_args.extend([
+        '-target',
+        settings.target,
+    ])
+
+  if settings.sdk:
+    extra_args.extend([
+        '-sdk',
+        os.path.abspath(settings.sdk),
+    ])
+
+  if settings.swift_version:
+    extra_args.extend([
+        '-swift-version',
+        settings.swift_version,
+    ])
+
+  if settings.include_dirs:
+    for include_dir in settings.include_dirs:
+      extra_args.append('-I' + include_dir)
+
+  process = subprocess.Popen([
+      'swiftc',
+      '-parse-as-library',
+      '-module-name',
+      module,
+      '-emit-object',
+      '-emit-dependencies',
+      '-emit-module',
+      '-emit-module-path',
+      settings.module_path,
+      '-emit-objc-header',
+      '-emit-objc-header-path',
+      settings.header_path,
+      '-output-file-map',
+      output_file_map_path,
+  ] + extra_args + extras + sources,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE,
+                             universal_newlines=True)
+
+  stdout, stderr = process.communicate()
+  if process.returncode:
+    sys.stdout.write(stdout)
+    sys.stderr.write(stderr)
+    sys.exit(process.returncode)
+
+  depfile_content = collections.OrderedDict()
+  for key in output_file_map:
+    for line in open(output_file_map[key]['dependencies']):
+      output, inputs = line.split(' : ', 2)
+      _, ext = os.path.splitext(output)
+      if ext == '.o':
+        key = output
+      else:
+        key = os.path.splitext(settings.module_path)[0] + ext
+      if key not in depfile_content:
+        depfile_content[key] = OrderedSet()
+      for path in inputs.split():
+        depfile_content[key].add(path)
+
+  with open(settings.depfile, 'w') as depfile:
+    for key in depfile_content:
+      if not settings.depfile_filter or key in settings.depfile_filter:
+        inputs = depfile_content[key]
+        depfile.write('%s : %s\n' % (key, ' '.join(inputs)))
+
+
+def main(args):
+  parser = argparse.ArgumentParser(add_help=False)
+  parser.add_argument('-module-name', help='name of the Swift module')
+  parser.add_argument('-include',
+                      '-I',
+                      action='append',
+                      dest='include_dirs',
+                      help='add directory to header search path')
+  parser.add_argument('sources', nargs='+', help='Swift source file to compile')
+  parser.add_argument('-whole-module-optimization',
+                      action='store_true',
+                      help='enable whole module optimization')
+  parser.add_argument('-object-dir',
+                      help='path to the generated object files directory')
+  parser.add_argument('-module-path', help='path to the generated module file')
+  parser.add_argument('-header-path', help='path to the generated header file')
+  parser.add_argument('-bridge-header',
+                      help='path to the Objective-C bridge header')
+  parser.add_argument('-depfile', help='path to the generated depfile')
+  parser.add_argument('-swift-version',
+                      help='version of Swift language to support')
+  parser.add_argument('-depfile-filter',
+                      action='append',
+                      help='limit depfile to those files')
+  parser.add_argument('-target',
+                      action='store',
+                      help='generate code for the given target <triple>')
+  parser.add_argument('-sdk', action='store', help='compile against sdk')
+
+  parsed, extras = parser.parse_known_args(args)
+  with tempfile.TemporaryDirectory() as tmpdir:
+    compile_module(parsed.module_name, parsed.sources, parsed, extras, tmpdir)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/toolchain/kythe.gni b/src/build/toolchain/kythe.gni
new file mode 100644
index 0000000..7486a4f
--- /dev/null
+++ b/src/build/toolchain/kythe.gni
@@ -0,0 +1,11 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines configuration for Kythe, an indexer and cross-referencer
+# that powers codesearch.
+
+declare_args() {
+  # Enables Kythe annotations necessary to build cross references.
+  enable_kythe_annotations = false
+}
diff --git a/src/build/toolchain/linux/BUILD.gn b/src/build/toolchain/linux/BUILD.gn
new file mode 100644
index 0000000..ee49454
--- /dev/null
+++ b/src/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,375 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ozone.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+clang_toolchain("clang_ppc64") {
+  enable_linker_map = true
+  toolchain_args = {
+    current_cpu = "ppc64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_arm") {
+  toolprefix = "arm-linux-gnueabihf-"
+  toolchain_args = {
+    current_cpu = "arm"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_arm64") {
+  toolprefix = "aarch64-linux-gnu-"
+  toolchain_args = {
+    current_cpu = "arm64"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("arm64") {
+  toolprefix = "aarch64-linux-gnu-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    current_cpu = "arm64"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+gcc_toolchain("arm") {
+  toolprefix = "arm-linux-gnueabihf-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    current_cpu = "arm"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_x86") {
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x86"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x86_v8_arm") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "arm"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x86_v8_mipsel") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "mipsel"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x86_v8_mips") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "mips"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("x86") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x86"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_x64") {
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_arm64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "arm64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_mips64el") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "mips64el"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_mips64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "mips64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_riscv64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "riscv64"
+    current_os = "linux"
+  }
+}
+
+# In a LaCrOS build, this toolchain is intended to be used as an alternate
+# toolchain to build Ash-Chrome in a subdirectory.
+clang_toolchain("ash_clang_x64") {
+  toolchain_args = {
+    # This turns the toolchain into the "Linux ChromeOS" build
+    current_os = "chromeos"
+    target_os = "chromeos"
+    current_cpu = current_cpu
+
+    # This turns off all of the LaCrOS-specific flags.
+    also_build_ash_chrome = false
+    chromeos_is_browser_only = false
+    ozone_platform = "x11"
+    ozone_platform_wayland = false
+    use_clang_coverage = false
+  }
+}
+
+gcc_toolchain("x64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_mipsel") {
+  toolchain_args = {
+    current_cpu = "mipsel"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_mips64el") {
+  toolchain_args = {
+    current_cpu = "mips64el"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("mipsel") {
+  toolprefix = "mipsel-linux-gnu-"
+
+  cc = "${toolprefix}gcc"
+  cxx = " ${toolprefix}g++"
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    cc_wrapper = ""
+    current_cpu = "mipsel"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+    use_goma = false
+  }
+}
+
+gcc_toolchain("mips64el") {
+  toolprefix = "mips64el-linux-gnuabi64-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    cc_wrapper = ""
+    current_cpu = "mips64el"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+    use_goma = false
+  }
+}
+
+gcc_toolchain("riscv64") {
+  toolprefix = "riscv64-linux-gnu"
+
+  cc = "${toolprefix}-gcc"
+  cxx = "${toolprefix}-g++"
+
+  readelf = "${toolprefix}-readelf"
+  nm = "${toolprefix}-nm"
+  ar = "${toolprefix}-ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "riscv64"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_s390x") {
+  toolchain_args = {
+    current_cpu = "s390x"
+    current_os = "linux"
+    is_clang = true
+  }
+}
+
+gcc_toolchain("s390x") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "s390x"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+gcc_toolchain("ppc64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "ppc64"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+gcc_toolchain("mips") {
+  toolprefix = "mips-linux-gnu-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+  ar = "${toolprefix}ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "mips"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
+
+gcc_toolchain("mips64") {
+  toolprefix = "mips64-linux-gnuabi64-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+  ar = "${toolprefix}ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "mips64"
+    current_os = "linux"
+
+    # reclient does not support gcc.
+    use_rbe = false
+    is_clang = false
+  }
+}
diff --git a/src/build/toolchain/linux/unbundle/BUILD.gn b/src/build/toolchain/linux/unbundle/BUILD.gn
new file mode 100644
index 0000000..4719d54
--- /dev/null
+++ b/src/build/toolchain/linux/unbundle/BUILD.gn
@@ -0,0 +1,41 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+gcc_toolchain("default") {
+  cc = getenv("CC")
+  cxx = getenv("CXX")
+  ar = getenv("AR")
+  nm = getenv("NM")
+  ld = cxx
+
+  extra_cflags = getenv("CFLAGS")
+  extra_cppflags = getenv("CPPFLAGS")
+  extra_cxxflags = getenv("CXXFLAGS")
+  extra_ldflags = getenv("LDFLAGS")
+
+  toolchain_args = {
+    current_cpu = current_cpu
+    current_os = current_os
+  }
+}
+
+gcc_toolchain("host") {
+  cc = getenv("BUILD_CC")
+  cxx = getenv("BUILD_CXX")
+  ar = getenv("BUILD_AR")
+  nm = getenv("BUILD_NM")
+  ld = cxx
+
+  extra_cflags = getenv("BUILD_CFLAGS")
+  extra_cppflags = getenv("BUILD_CPPFLAGS")
+  extra_cxxflags = getenv("BUILD_CXXFLAGS")
+  extra_ldflags = getenv("BUILD_LDFLAGS")
+
+  toolchain_args = {
+    current_cpu = current_cpu
+    current_os = current_os
+  }
+}
diff --git a/src/build/toolchain/linux/unbundle/README.md b/src/build/toolchain/linux/unbundle/README.md
new file mode 100644
index 0000000..17b93c9
--- /dev/null
+++ b/src/build/toolchain/linux/unbundle/README.md
@@ -0,0 +1,41 @@
+# Overview
+
+This directory contains files that make it possible for Linux
+distributions to build Chromium using system toolchain.
+
+For more info on established way such builds are configured,
+please read the following:
+
+ - https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html
+
+Why do distros want CFLAGS, LDFLAGS, etc? Please read the following
+for some examples. This is not an exhaustive list.
+
+ - https://wiki.debian.org/Hardening
+ - https://wiki.ubuntu.com/DistCompilerFlags
+ - https://fedoraproject.org/wiki/Changes/Harden_All_Packages
+ - https://fedoraproject.org/wiki/Changes/Modernise_GCC_Flags
+ - https://fedoraproject.org/wiki/Packaging:Guidelines#Compiler_flags
+ - https://blog.flameeyes.eu/2010/09/are-we-done-with-ldflags/
+ - https://blog.flameeyes.eu/2008/08/flags-and-flags/
+
+# Usage
+
+Add the following to GN args:
+
+```
+custom_toolchain="//build/toolchain/linux/unbundle:default"
+host_toolchain="//build/toolchain/linux/unbundle:default"
+```
+
+See [more docs on GN](https://gn.googlesource.com/gn/+/master/docs/quick_start.md).
+
+To cross-compile (not fully tested), add the following:
+
+```
+host_toolchain="//build/toolchain/linux/unbundle:host"
+v8_snapshot_toolchain="//build/toolchain/linux/unbundle:host"
+```
+
+Note: when cross-compiling for a 32-bit target, a matching 32-bit toolchain
+may be needed.
diff --git a/src/build/toolchain/mac/BUILD.gn b/src/build/toolchain/mac/BUILD.gn
new file mode 100644
index 0000000..1d0f2a2
--- /dev/null
+++ b/src/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,99 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/mac/mac_sdk.gni")
+import("//build/toolchain/apple/toolchain.gni")
+
+# Specialisation of the apple_toolchain template to declare the toolchain
+# and its tools to build target for macOS platform.
+template("mac_toolchain") {
+  assert(defined(invoker.toolchain_args),
+         "Toolchains must declare toolchain_args")
+
+  apple_toolchain(target_name) {
+    forward_variables_from(invoker, "*", [ "toolchain_args" ])
+
+    bin_path = mac_bin_path
+
+    toolchain_args = {
+      forward_variables_from(invoker.toolchain_args, "*")
+      current_os = "mac"
+
+      if (target_os == "ios") {
+        # TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
+        # not supported by the Chromium mac_clang_x64 toolchain on iOS
+        # distribution.
+        use_sanitizer_coverage = false
+
+        # Do not use Xcode version of clang when building macOS tools for the
+        # host even if this is the version used to build for the iOS target.
+        use_xcode_clang = false
+      }
+    }
+  }
+}
+
+mac_toolchain("clang_arm") {
+  toolchain_args = {
+    current_cpu = "arm"
+  }
+}
+
+mac_toolchain("clang_arm64") {
+  toolchain_args = {
+    current_cpu = "arm64"
+  }
+}
+
+mac_toolchain("clang_x64") {
+  toolchain_args = {
+    current_cpu = "x64"
+  }
+}
+
+mac_toolchain("clang_x86") {
+  toolchain_args = {
+    current_cpu = "x86"
+  }
+}
+
+mac_toolchain("clang_x86_v8_arm") {
+  toolchain_args = {
+    current_cpu = "x86"
+
+    if (defined(v8_current_cpu)) {
+      v8_current_cpu = "arm"
+    }
+  }
+}
+
+mac_toolchain("clang_x86_v8_mipsel") {
+  toolchain_args = {
+    current_cpu = "x86"
+
+    if (defined(v8_current_cpu)) {
+      v8_current_cpu = "mipsel"
+    }
+  }
+}
+
+mac_toolchain("clang_x64_v8_arm64") {
+  toolchain_args = {
+    current_cpu = "x64"
+
+    if (defined(v8_current_cpu)) {
+      v8_current_cpu = "arm64"
+    }
+  }
+}
+
+mac_toolchain("clang_x64_v8_mips64el") {
+  toolchain_args = {
+    current_cpu = "x64"
+
+    if (defined(v8_current_cpu)) {
+      v8_current_cpu = "mips64el"
+    }
+  }
+}
diff --git a/src/build/toolchain/nacl/BUILD.gn b/src/build/toolchain/nacl/BUILD.gn
new file mode 100644
index 0000000..66a88be
--- /dev/null
+++ b/src/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,296 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/nacl_toolchain.gni")
+
+# Add the toolchain revision as a preprocessor define so that sources are
+# rebuilt when a toolchain is updated.
+# Idea we could use the toolchain deps feature, but currently that feature is
+# bugged and does not trigger a rebuild.
+# https://code.google.com/p/chromium/issues/detail?id=431880
+# Calls to get the toolchain revision are relatively slow, so do them all in a
+# single batch to amortize python startup, etc.
+revisions = exec_script("//native_client/build/get_toolchain_revision.py",
+                        [
+                          "nacl_x86_glibc",
+                          "nacl_arm_glibc",
+                          "pnacl_newlib",
+                        ],
+                        "trim list lines")
+nacl_x86_glibc_rev = revisions[0]
+nacl_arm_glibc_rev = revisions[1]
+
+pnacl_newlib_rev = revisions[2]
+
+if (host_os == "win") {
+  toolsuffix = ".exe"
+} else {
+  toolsuffix = ""
+}
+
+# The PNaCl toolchain tools are all wrapper scripts rather than binary
+# executables.  On POSIX systems, nobody cares what kind of executable
+# file you are.  But on Windows, scripts (.bat files) cannot be run
+# directly and need the Windows shell (cmd.exe) specified explicily.
+if (host_os == "win") {
+  # NOTE!  The //build/toolchain/gcc_*_wrapper.py scripts recognize
+  # this exact prefix string, so they must be updated if this string
+  # is changed in any way.
+  scriptprefix = "cmd /c call "
+  scriptsuffix = ".bat"
+} else {
+  scriptprefix = ""
+  scriptsuffix = ""
+}
+
+# When the compilers are run via goma, rbe or ccache rather than directly by
+# GN/Ninja, the rbe/goma/ccache wrapper handles .bat files but gets confused
+# by being given the scriptprefix.
+if (host_os == "win" && !use_goma && !use_rbe && cc_wrapper == "") {
+  compiler_scriptprefix = scriptprefix
+} else {
+  compiler_scriptprefix = ""
+}
+
+template("pnacl_toolchain") {
+  assert(defined(invoker.executable_extension),
+         "Must define executable_extension")
+
+  nacl_toolchain(target_name) {
+    toolchain_package = "pnacl_newlib"
+    toolchain_revision = pnacl_newlib_rev
+    toolprefix =
+        rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-",
+                    root_build_dir)
+
+    if (host_os == "win") {
+      # Flip the slashes so that copy/paste of the commands works.
+      # This is also done throughout build\toolchain\win\BUILD.gn
+      toolprefix = string_replace(toolprefix, "/", "\\")
+    }
+
+    cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix
+    cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix
+    ar = toolprefix + "ar" + scriptsuffix
+    readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix
+    nm = scriptprefix + toolprefix + "nm" + scriptsuffix
+    if (defined(invoker.strip)) {
+      strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix
+    }
+    forward_variables_from(invoker,
+                           [
+                             "executable_extension",
+                             "is_clang_analysis_supported",
+                             "extra_cppflags",
+                           ])
+
+    # Note this is not the usual "ld = cxx" because "ld" uses are
+    # never run via goma, so this needs scriptprefix.
+    ld = scriptprefix + toolprefix + "clang++" + scriptsuffix
+
+    toolchain_args = {
+      is_clang = true
+      current_cpu = "pnacl"
+      use_lld = false
+    }
+  }
+}
+
+pnacl_toolchain("newlib_pnacl") {
+  executable_extension = ".pexe"
+
+  # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file.
+  # It's very similar in purpose to the traditional "strip" utility: it
+  # turns what comes out of the linker into what you actually want to
+  # distribute and run.  PNaCl doesn't have a "strip"-like utility that
+  # you ever actually want to use other than pnacl-finalize, so just
+  # make pnacl-finalize the strip tool rather than adding an additional
+  # step like "postlink" to run pnacl-finalize.
+  strip = "finalize"
+}
+
+pnacl_toolchain("newlib_pnacl_nonsfi") {
+  executable_extension = ""
+  strip = "strip"
+
+  # This macro is embedded on nonsfi toolchains but reclient can't figure
+  # that out itself, so we make it explicit.
+  extra_cppflags = "-D__native_client_nonsfi__"
+}
+
+template("nacl_glibc_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+  assert(defined(invoker.toolchain_package), "Must define toolchain_package")
+  assert(defined(invoker.toolchain_revision), "Must define toolchain_revision")
+  forward_variables_from(invoker,
+                         [
+                           "toolchain_package",
+                           "toolchain_revision",
+                         ])
+
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  if (host_os == "win") {
+    # Flip the slashes so that copy/paste of the commands works.
+    # This is also done throughout build\toolchain\win\BUILD.gn
+    toolprefix = string_replace(toolprefix, "/", "\\")
+  }
+
+  nacl_toolchain("glibc_" + toolchain_cpu) {
+    cc = toolprefix + "gcc" + toolsuffix
+    cxx = toolprefix + "g++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    ld = cxx
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+
+      # reclient does not support gcc.
+      use_rbe = false
+      is_clang = false
+      is_nacl_glibc = true
+      use_lld = false
+    }
+  }
+}
+
+nacl_glibc_toolchain("x86") {
+  toolchain_package = "nacl_x86_glibc"
+  toolchain_revision = nacl_x86_glibc_rev
+
+  # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather
+  # than using the i686-nacl binary directly.  This is a because i686-nacl-gcc
+  # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with
+  # compiler executables that are shell scripts (so the i686 'compiler' is not
+  # currently in goma).
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("x64") {
+  toolchain_package = "nacl_x86_glibc"
+  toolchain_revision = nacl_x86_glibc_rev
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("arm") {
+  toolchain_package = "nacl_arm_glibc"
+  toolchain_revision = nacl_arm_glibc_rev
+  toolchain_tuple = "arm-nacl"
+}
+
+template("nacl_clang_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+  toolchain_package = "pnacl_newlib"
+  toolchain_revision = pnacl_newlib_rev
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  if (host_os == "win") {
+    # Flip the slashes so that copy/paste of the commands works.
+    # This is also done throughout build\toolchain\win\BUILD.gn
+    toolprefix = string_replace(toolprefix, "/", "\\")
+  }
+
+  nacl_toolchain("clang_newlib_" + toolchain_cpu) {
+    cc = toolprefix + "clang" + toolsuffix
+    cxx = toolprefix + "clang++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    ld = cxx
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+      is_clang = true
+      use_lld = false
+    }
+  }
+}
+
+template("nacl_irt_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+  toolchain_package = "pnacl_newlib"
+  toolchain_revision = pnacl_newlib_rev
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  if (host_os == "win") {
+    # Flip the slashes so that copy/paste of the commands works.
+    # This is also done throughout build\toolchain\win\BUILD.gn
+    toolprefix = string_replace(toolprefix, "/", "\\")
+  }
+
+  link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir)
+
+  tls_edit_label =
+      "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)"
+  host_toolchain_out_dir =
+      rebase_path(get_label_info(tls_edit_label, "root_out_dir"),
+                  root_build_dir)
+  tls_edit = "${host_toolchain_out_dir}/tls_edit"
+
+  nacl_toolchain("irt_" + toolchain_cpu) {
+    cc = toolprefix + "clang" + toolsuffix
+    cxx = toolprefix + "clang++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    # Some IRT implementations (notably, Chromium's) contain C++ code,
+    # so we need to link w/ the C++ linker.
+    ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}"
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+      is_clang = true
+      use_lld = false
+    }
+
+    # TODO(ncbray): depend on link script
+    deps = [ tls_edit_label ]
+  }
+}
+
+template("nacl_clang_toolchains") {
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+  nacl_clang_toolchain(target_name) {
+    toolchain_tuple = invoker.toolchain_tuple
+  }
+  nacl_irt_toolchain(target_name) {
+    toolchain_tuple = invoker.toolchain_tuple
+  }
+}
+
+nacl_clang_toolchains("x86") {
+  # Rely on :compiler_cpu_abi adding -m32.  See nacl_x86_glibc above.
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("x64") {
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("arm") {
+  toolchain_tuple = "arm-nacl"
+}
+
+nacl_clang_toolchains("mipsel") {
+  toolchain_tuple = "mipsel-nacl"
+}
diff --git a/src/build/toolchain/nacl_toolchain.gni b/src/build/toolchain/nacl_toolchain.gni
new file mode 100644
index 0000000..100c0b0
--- /dev/null
+++ b/src/build/toolchain/nacl_toolchain.gni
@@ -0,0 +1,71 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# This template defines a NaCl toolchain.
+#
+# It requires the following variables specifying the executables to run:
+#  - cc
+#  - cxx
+#  - ar
+#  - ld
+
+template("nacl_toolchain") {
+  assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value")
+  assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value")
+  assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value")
+  assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value")
+  gcc_toolchain(target_name) {
+    if (defined(invoker.executable_extension)) {
+      executable_extension = invoker.executable_extension
+    } else {
+      executable_extension = ".nexe"
+    }
+    rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision
+
+    forward_variables_from(invoker,
+                           [
+                             "ar",
+                             "cc",
+                             "cxx",
+                             "deps",
+                             "ld",
+                             "link_outputs",
+                             "nm",
+                             "readelf",
+                             "strip",
+                             "extra_cppflags",
+                           ])
+
+    toolchain_args = {
+      # Use all values set on the invoker's toolchain_args.
+      forward_variables_from(invoker.toolchain_args, "*")
+
+      current_os = "nacl"
+
+      # We do not support component builds with the NaCl toolchains.
+      is_component_build = false
+
+      # We do not support tcmalloc in the NaCl toolchains.
+      use_allocator = "none"
+      use_allocator_shim = false
+
+      # We do not support clang profiling in the NaCl toolchains.
+      use_clang_profiling = false
+      use_clang_coverage = false
+      coverage_instrumentation_input_file = ""
+
+      if (use_rbe) {
+        if (is_win) {
+          rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows_nacl.cfg"
+        } else {
+          # TODO(ukai): non linux?
+          rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux_nacl.cfg"
+        }
+      }
+    }
+  }
+}
diff --git a/src/build/toolchain/rbe.gni b/src/build/toolchain/rbe.gni
new file mode 100644
index 0000000..cf1848c
--- /dev/null
+++ b/src/build/toolchain/rbe.gni
@@ -0,0 +1,32 @@
+# Defines the configuration of Remote Build Execution (RBE).
+
+# The directory where the re-client tooling binaries are.
+rbe_bin_dir = rebase_path("//buildtools/reclient", root_build_dir)
+
+# The directory where the re-client configuration files are.
+rbe_cfg_dir = rebase_path("//buildtools/reclient_cfgs", root_build_dir)
+
+# RBE Execution root - this should be the root of the source tree.
+# This is defined here instead of in the config file because
+# this will vary depending on where the user has placed the
+# chromium source on their system.
+rbe_exec_root = rebase_path("//")
+
+declare_args() {
+  # Set to true to enable remote compilation using RBE.
+  use_rbe = false
+
+  # Set to the path of the RBE reclient configuration file.
+  rbe_cc_cfg_file = ""
+}
+
+# Configuration file selection based on operating system.
+if (is_linux || is_android) {
+  rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_linux.cfg"
+}
+if (is_win) {
+  rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_windows.cfg"
+}
+if (is_mac || is_ios) {
+  rbe_cc_cfg_file = "${rbe_cfg_dir}/rewrapper_mac.cfg"
+}
diff --git a/src/build/toolchain/toolchain.gni b/src/build/toolchain/toolchain.gni
new file mode 100644
index 0000000..94ecbbb
--- /dev/null
+++ b/src/build/toolchain/toolchain.gni
@@ -0,0 +1,107 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Toolchain-related configuration that may be needed outside the context of the
+# toolchain() rules themselves.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build_overrides/build.gni")
+
+declare_args() {
+  # If this is set to true, we use the revision in the llvm repo to determine
+  # the CLANG_REVISION to use, instead of the version hard-coded into
+  # //tools/clang/scripts/update.py. This should only be used in
+  # conjunction with setting the llvm_force_head_revision DEPS variable when
+  # `gclient runhooks` is run as well.
+  llvm_force_head_revision = false
+
+  # Compile with Xcode version of clang instead of hermetic version shipped
+  # with the build. Used to be used iOS for official builds, but is now off by
+  # default for all configurations.
+  use_xcode_clang = false
+
+  # Used for binary size analysis.
+  generate_linker_map = is_android && is_official_build
+}
+
+if (generate_linker_map) {
+  assert(is_official_build || is_chromecast,
+         "Linker map files should only be generated when is_official_build = " +
+             "true or is_chromecast = true")
+  assert(current_os == "android" || current_os == "linux" ||
+             target_os == "android" || target_os == "linux" ||
+             target_os == "chromeos",
+         "Linker map files should only be generated for Android, Linux, " +
+             "or ChromeOS.")
+}
+
+declare_args() {
+  clang_version = "13.0.0"
+}
+
+# Check target_os here instead of is_ios as this file is loaded for secondary
+# toolchain (host toolchain in particular) but the argument is the same for
+# all toolchains.
+assert(!use_xcode_clang || target_os == "ios",
+       "Using Xcode's clang is only supported in iOS builds")
+
+# Extension for shared library files (including leading dot).
+if (is_apple) {
+  shlib_extension = ".dylib"
+} else if (is_android && is_component_build) {
+  # By appending .cr, we prevent name collisions with libraries already
+  # loaded by the Android zygote.
+  shlib_extension = ".cr.so"
+} else if (is_posix || is_fuchsia) {
+  shlib_extension = ".so"
+} else if (is_win) {
+  shlib_extension = ".dll"
+} else {
+  assert(false, "Platform not supported")
+}
+
+# Prefix for shared library files.
+if (is_posix || is_fuchsia) {
+  shlib_prefix = "lib"
+} else {
+  shlib_prefix = ""
+}
+
+# Directory for shared library files.
+if (is_fuchsia) {
+  shlib_subdir = "/lib"
+} else {
+  shlib_subdir = ""
+}
+
+# While other "tool"s in a toolchain are specific to the target of that
+# toolchain, the "stamp" and "copy" tools are really generic to the host;
+# but each toolchain must define them separately.  GN doesn't allow a
+# template instantiation inside a toolchain definition, so some boilerplate
+# has to be repeated in each toolchain to define these two tools.  These
+# four variables reduce the duplication in that boilerplate.
+stamp_description = "STAMP {{output}}"
+copy_description = "COPY {{source}} {{output}}"
+if (host_os == "win") {
+  _tool_wrapper_path =
+      rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+
+  stamp_command = "cmd /c type nul > \"{{output}}\""
+  copy_command =
+      "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
+} else {
+  stamp_command = "touch {{output}}"
+  copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+}
+
+# This variable is true if the current toolchain is one of the target
+# toolchains, i.e. a toolchain which is being used to build the main Chrome
+# binary. This generally means "not the host toolchain", but in the case where
+# we're targeting the host it's true then as well. We do require current_os to
+# match target_os so that for example we avoid considering Android as a target
+# toolchain when targeting CrOS.
+is_a_target_toolchain =
+    (current_toolchain != host_toolchain ||
+     default_toolchain == host_toolchain) && current_os == target_os
diff --git a/src/build/toolchain/win/BUILD.gn b/src/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000..a813752
--- /dev/null
+++ b/src/build/toolchain/win/BUILD.gn
@@ -0,0 +1,534 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+
+# This tool will is used as a wrapper for various commands below.
+tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
+
+if (use_rbe) {
+  goma_prefix = ""
+  rbe_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
+  clang_prefix = rbe_prefix
+} else if (use_goma) {
+  if (host_os == "win") {
+    goma_prefix = "$goma_dir/gomacc.exe "
+  } else {
+    goma_prefix = "$goma_dir/gomacc "
+  }
+  clang_prefix = goma_prefix
+} else {
+  goma_prefix = ""
+  if (cc_wrapper != "") {
+    clang_prefix = cc_wrapper + " "
+  } else {
+    clang_prefix = ""
+  }
+}
+
+# Copy the VS runtime DLL for the default toolchain to the root build directory
+# so things will run.
+if (current_toolchain == default_toolchain) {
+  if (is_debug) {
+    configuration_name = "Debug"
+  } else {
+    configuration_name = "Release"
+  }
+  exec_script("../../vs_toolchain.py",
+              [
+                "copy_dlls",
+                rebase_path(root_build_dir),
+                configuration_name,
+                target_cpu,
+              ])
+}
+
+if (host_os == "win") {
+  clang_cl = "clang-cl.exe"
+} else {
+  clang_cl = "clang-cl"
+}
+
+# Parameters:
+#   environment: File name of environment file.
+#
+# You would also define a toolchain_args variable with at least these set:
+#   current_cpu: current_cpu to pass as a build arg
+#   current_os: current_os to pass as a build arg
+template("msvc_toolchain") {
+  toolchain(target_name) {
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    assert(defined(invoker.toolchain_args))
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+
+      # This value needs to be passed through unchanged.
+      host_toolchain = host_toolchain
+    }
+
+    # Make these apply to all tools below.
+    lib_switch = ""
+    lib_dir_switch = "/LIBPATH:"
+
+    # Object files go in this directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    env = invoker.environment
+
+    cl = invoker.cl
+
+    if (use_lld) {
+      if (host_os == "win") {
+        lld_link = "lld-link.exe"
+      } else {
+        lld_link = "lld-link"
+      }
+      prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+
+      # lld-link includes a replacement for lib.exe that can produce thin
+      # archives and understands bitcode (for lto builds).
+      link = "$prefix/$lld_link"
+      if (host_os == "win") {
+        # Flip the slashes so that copy/paste of the commands works.
+        link = string_replace(link, "/", "\\")
+      }
+      lib = "$link /lib"
+      if (host_os != "win") {
+        # See comment adding --rsp-quoting to $cl above for more information.
+        link = "$link --rsp-quoting=posix"
+      }
+    } else {
+      lib = "lib.exe"
+      link = "link.exe"
+    }
+
+    # If possible, pass system includes as flags to the compiler.  When that's
+    # not possible, load a full environment file (containing %INCLUDE% and
+    # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just
+    # passing in a list of include directories isn't enough.
+    if (defined(invoker.sys_include_flags)) {
+      env_wrapper = ""
+      sys_include_flags =
+          "${invoker.sys_include_flags} "  # Note trailing space.
+    } else {
+      # clang-cl doesn't need this env hoop, so omit it there.
+      assert((defined(toolchain_args.is_clang) && !toolchain_args.is_clang) ||
+             !is_clang)
+      env_wrapper = "ninja -t msvc -e $env -- "  # Note trailing space.
+      sys_include_flags = ""
+    }
+
+    # ninja does not have -t msvc other than windows, and lld doesn't depend on
+    # mt.exe in PATH on non-Windows, so it's not needed there anyways.
+    if (host_os != "win") {
+      linker_wrapper = ""
+      sys_lib_flags = "${invoker.sys_lib_flags} "  # Note trailing space.
+    } else if (defined(invoker.sys_lib_flags)) {
+      # Invoke ninja as wrapper instead of tool wrapper, because python
+      # invocation requires higher cpu usage compared to ninja invocation, and
+      # the python wrapper is only needed to work around link.exe problems.
+      # TODO(thakis): Remove wrapper once lld-link can merge manifests without
+      # relying on mt.exe being in %PATH% on Windows, https://crbug.com/872740
+      linker_wrapper = "ninja -t msvc -e $env -- "  # Note trailing space.
+      sys_lib_flags = "${invoker.sys_lib_flags} "  # Note trailing space.
+    } else {
+      # Note trailing space:
+      linker_wrapper =
+          "$python_path $tool_wrapper_path link-wrapper $env False "
+      sys_lib_flags = ""
+    }
+
+    if (defined(toolchain_args.use_clang_coverage)) {
+      toolchain_use_clang_coverage = toolchain_args.use_clang_coverage
+    } else {
+      toolchain_use_clang_coverage = use_clang_coverage
+    }
+
+    if (toolchain_use_clang_coverage) {
+      assert(toolchain_args.is_clang,
+             "use_clang_coverage should only be used with Clang")
+      if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+        toolchain_coverage_instrumentation_input_file =
+            toolchain_args.coverage_instrumentation_input_file
+      } else {
+        toolchain_coverage_instrumentation_input_file =
+            coverage_instrumentation_input_file
+      }
+
+      coverage_wrapper =
+          rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+                      root_build_dir)
+      coverage_wrapper = coverage_wrapper + " --target-os=" + target_os
+      if (toolchain_coverage_instrumentation_input_file != "") {
+        coverage_wrapper =
+            coverage_wrapper + " --files-to-instrument=" +
+            rebase_path(toolchain_coverage_instrumentation_input_file,
+                        root_build_dir)
+      }
+      coverage_wrapper = "$python_path " + coverage_wrapper + " "
+    } else {
+      coverage_wrapper = ""
+    }
+
+    if (toolchain_args.is_clang) {
+      # This flag omits system includes from /showIncludes output, to reduce the
+      # amount of data to parse and store in .ninja_deps. We do this on non-Windows too,
+      # and already make sure rebuilds after win sdk / libc++ / clang header updates happen via
+      # changing commandline flags.
+      show_includes = "/showIncludes:user"
+    } else {
+      show_includes = "/showIncludes"
+    }
+
+    tool("cc") {
+      precompiled_header_type = "msvc"
+      pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb"
+
+      # Label names may have spaces in them so the pdbname must be quoted. The
+      # source and output don't need to be quoted because GN knows they're a
+      # full file name and will quote automatically when necessary.
+      depsformat = "msvc"
+      description = "CC {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+
+      command = "$coverage_wrapper$env_wrapper$cl /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+    }
+
+    tool("cxx") {
+      precompiled_header_type = "msvc"
+
+      # The PDB name needs to be different between C and C++ compiled files.
+      pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb"
+
+      # See comment in CC tool about quoting.
+      depsformat = "msvc"
+      description = "CXX {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+
+      command = "$coverage_wrapper$env_wrapper$cl /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+    }
+
+    tool("rc") {
+      command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+      depsformat = "msvc"
+      outputs = [ "$object_subdir/{{source_name_part}}.res" ]
+      description = "RC {{output}}"
+    }
+
+    tool("asm") {
+      is_msvc_assembler = true
+
+      if (toolchain_args.current_cpu == "arm64") {
+        if (is_clang) {
+          prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+          ml = "${clang_prefix}${prefix}/${clang_cl} --target=arm64-windows"
+          if (host_os == "win") {
+            # Flip the slashes so that copy/paste of the command works.
+            ml = string_replace(ml, "/", "\\")
+          }
+          ml += " -c -o{{output}}"
+          is_msvc_assembler = false
+        } else {
+          # Only affects Arm builds with is_clang = false, implemented for building
+          # V8 for Windows on Arm systems with the MSVC toolchain.
+          ml = "armasm64.exe"
+        }
+      } else {
+        # x86/x64 builds always use the MSVC assembler.
+        if (toolchain_args.current_cpu == "x64") {
+          ml = "ml64.exe"
+        } else {
+          ml = "ml.exe"
+        }
+      }
+
+      if (is_msvc_assembler) {
+        ml += " /nologo /Fo{{output}}"
+
+        # Suppress final-stage linking on x64/x86 builds. (Armasm64 does not
+        # require /c because it doesn't support linking.)
+        if (toolchain_args.current_cpu != "arm64") {
+          ml += " /c"
+        }
+        if (use_lld) {
+          # Wrap ml(64).exe with a script that makes its output deterministic.
+          # It's lld only because the script zaps obj Timestamp which
+          # link.exe /incremental looks at.
+          # TODO(https://crbug.com/762167): If we end up writing an llvm-ml64,
+          # make sure it has deterministic output (maybe with /Brepro or
+          # something) and remove this wrapper.
+          ml_py = rebase_path("ml.py", root_build_dir)
+          ml = "$python_path $ml_py $ml"
+        }
+      }
+      if (toolchain_args.current_cpu != "arm64" || is_clang) {
+        command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}"
+      } else {
+        # armasm64.exe does not support definitions passed via the command line.
+        # (Fortunately, they're not needed for compiling the V8 snapshot, which
+        # is the only time this assembler is required.)
+        command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}"
+      }
+
+      description = "ASM {{output}}"
+      outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "$linker_wrapper$lib /OUT:{{output}} /nologo ${sys_lib_flags}{{arflags}} @$rspfile"
+      description = "LIB {{output}}"
+      outputs = [
+        # Ignore {{output_extension}} and always use .lib, there's no reason to
+        # allow targets to override this extension on Windows.
+        "{{output_dir}}/{{target_output_name}}.lib",
+      ]
+      default_output_extension = ".lib"
+      default_output_dir = "{{target_out_dir}}"
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}}"
+    }
+
+    tool("solink") {
+      # E.g. "foo.dll":
+      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      libname = "${dllname}.lib"  # e.g. foo.dll.lib
+      pdbname = "${dllname}.pdb"
+      rspfile = "${dllname}.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /PDB:$pdbname @$rspfile"
+
+      default_output_extension = ".dll"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK(DLL) {{output}}"
+      outputs = [
+        dllname,
+        libname,
+        pdbname,
+      ]
+      link_output = libname
+      depend_output = libname
+      runtime_outputs = [
+        dllname,
+        pdbname,
+      ]
+
+      # Since the above commands only updates the .lib file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("solink_module") {
+      # E.g. "foo.dll":
+      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      pdbname = "${dllname}.pdb"
+      rspfile = "${dllname}.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/DLL /PDB:$pdbname @$rspfile"
+
+      default_output_extension = ".dll"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK_MODULE(DLL) {{output}}"
+      outputs = [
+        dllname,
+        pdbname,
+      ]
+      runtime_outputs = outputs
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("link") {
+      exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      pdbname = "$exename.pdb"
+      rspfile = "$exename.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$linker_wrapper$link /OUT:$exename /nologo ${sys_lib_flags} /PDB:$pdbname @$rspfile"
+
+      default_output_extension = ".exe"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK {{output}}"
+      outputs = [
+        exename,
+        pdbname,
+      ]
+      runtime_outputs = outputs
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+      pool = "//build/toolchain:action_pool($default_toolchain)"
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+      pool = "//build/toolchain:action_pool($default_toolchain)"
+    }
+
+    tool("action") {
+      pool = "//build/toolchain:action_pool($default_toolchain)"
+    }
+  }
+}
+
+template("win_toolchains") {
+  assert(defined(invoker.toolchain_arch))
+  toolchain_arch = invoker.toolchain_arch
+
+  win_toolchain_data = exec_script("setup_toolchain.py",
+                                   [
+                                     visual_studio_path,
+                                     windows_sdk_path,
+                                     visual_studio_runtime_dirs,
+                                     "win",
+                                     toolchain_arch,
+                                     "environment." + toolchain_arch,
+                                   ],
+                                   "scope")
+
+  # The toolchain using MSVC only makes sense when not doing cross builds.
+  # Chromium exclusively uses the win_clang_ toolchain below, but V8 and
+  # WebRTC still use this MSVC toolchain in some cases.
+  if (host_os == "win") {
+    msvc_toolchain(target_name) {
+      environment = "environment." + toolchain_arch
+      cl = "${goma_prefix}\"${win_toolchain_data.vc_bin_dir}/cl.exe\""
+
+      toolchain_args = {
+        if (defined(invoker.toolchain_args)) {
+          forward_variables_from(invoker.toolchain_args, "*")
+        }
+        is_clang = false
+        use_clang_coverage = false
+        current_os = "win"
+        current_cpu = toolchain_arch
+      }
+    }
+  }
+
+  msvc_toolchain("win_clang_" + target_name) {
+    environment = "environment." + toolchain_arch
+    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cl = "${clang_prefix}$prefix/${clang_cl}"
+    _clang_lib_dir =
+        rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows",
+                    root_build_dir)
+    if (host_os == "win") {
+      # Flip the slashes so that copy/paste of the command works.
+      cl = string_replace(cl, "/", "\\")
+
+      # And to match the other -libpath flags.
+      _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\")
+    }
+
+    sys_include_flags = "${win_toolchain_data.include_flags_imsvc}"
+    sys_lib_flags =
+        "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_flags}"
+
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+      is_clang = true
+      current_os = "win"
+      current_cpu = toolchain_arch
+    }
+  }
+}
+
+if (target_cpu == "x86" || target_cpu == "x64") {
+  win_toolchains("x86") {
+    toolchain_arch = "x86"
+  }
+  win_toolchains("x64") {
+    toolchain_arch = "x64"
+  }
+}
+
+if (target_cpu == "arm64") {
+  win_toolchains("arm64") {
+    toolchain_arch = "arm64"
+  }
+  win_toolchains(host_cpu) {
+    toolchain_arch = host_cpu
+  }
+}
+
+# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
+# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
+# The only reason it's a separate toolchain is so that it can force
+# is_component_build to false in the toolchain_args() block, because
+# building nacl64.exe in component style does not work.
+win_toolchains("nacl_win64") {
+  toolchain_arch = "x64"
+  toolchain_args = {
+    is_component_build = false
+  }
+}
+
+# WinUWP toolchains. Only define these when targeting them.
+
+if (target_os == "winuwp") {
+  assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" ||
+         target_cpu == "arm64")
+  store_cpu_toolchain_data = exec_script("setup_toolchain.py",
+                                         [
+                                           visual_studio_path,
+                                           windows_sdk_path,
+                                           visual_studio_runtime_dirs,
+                                           target_os,
+                                           target_cpu,
+                                           "environment.store_" + target_cpu,
+                                         ],
+                                         "scope")
+
+  msvc_toolchain("uwp_" + target_cpu) {
+    environment = "environment.store_" + target_cpu
+    cl = "${goma_prefix}\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
+    toolchain_args = {
+      current_os = "winuwp"
+      current_cpu = target_cpu
+      is_clang = false
+    }
+  }
+}
diff --git a/src/build/toolchain/win/midl.gni b/src/build/toolchain/win/midl.gni
new file mode 100644
index 0000000..5ccaaf2
--- /dev/null
+++ b/src/build/toolchain/win/midl.gni
@@ -0,0 +1,201 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/python.gni")
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler. The generated
+# source code will be compiled and linked into targets that depend on this.
+#
+# Parameters
+#
+#   sources
+#       List of .idl file to process.
+#
+#   header_file (optional)
+#       File name of generated header file.  Defaults to the basename of the
+#       source idl file with a .h extension.
+#
+#   out_dir (optional)
+#       Directory to write the generated files to. Defaults to target_gen_dir.
+#
+#   generated_dir (optional)
+#       Directory where generated files were previously persisted.
+#       Defaults to third_party\win_build_output\midl\|out_dir|.
+#
+#   dynamic_guids (optional)
+#       If the GUIDs are not constant across builds, the current GUID
+#       substitutions.
+#       |dynamic_guids| is of the form:
+#         "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791="
+#         "3d852661-c795-4d20-9b95-5561e9a1d2d9,"
+#         "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B="
+#         "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+#       See midl.py for more details.
+#
+#   writes_tlb (optional)
+#       Whether a .tlb file should be added to outputs. Defaults to false.
+#
+#   writes_proxy(optional)
+#       Whether a _p.c file should be added to outputs. Defaults to true.
+#
+#   writes_dlldata(optional)
+#       Whether a .dlldata.c file should be added to outputs. Defaults to true.
+#
+#   deps (optional)
+#
+#   defines (optional)
+#       Build time defines to be passed to midl.exe as /D parameter.
+#
+#   visibility (optional)
+
+template("midl") {
+  action_name = "${target_name}_idl_action"
+  source_set_name = target_name
+
+  assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+  if (defined(invoker.out_dir)) {
+    out_dir = invoker.out_dir
+  } else {
+    out_dir = target_gen_dir
+  }
+
+  if (defined(invoker.generated_dir)) {
+    generated_dir = rebase_path(invoker.generated_dir)
+  } else {
+    # midl.py expects 'gen' to be replaced with 'midl'.
+    generated_dir = rebase_path("//third_party/win_build_output") + "/midl/" +
+                    rebase_path(out_dir, root_gen_dir)
+  }
+
+  if (defined(invoker.dynamic_guids)) {
+    dynamic_guids = invoker.dynamic_guids
+  } else {
+    dynamic_guids = "none"
+  }
+
+  if (defined(invoker.header_file)) {
+    header_file = invoker.header_file
+  } else {
+    header_file = "{{source_name_part}}.h"
+  }
+
+  if (defined(invoker.writes_tlb)) {
+    writes_tlb = invoker.writes_tlb
+  } else {
+    writes_tlb = false
+  }
+
+  if (defined(invoker.writes_proxy)) {
+    writes_proxy = invoker.writes_proxy
+  } else {
+    writes_proxy = true
+  }
+
+  if (defined(invoker.writes_dlldata)) {
+    writes_dlldata = invoker.writes_dlldata
+  } else {
+    writes_dlldata = true
+  }
+
+  if (writes_tlb) {
+    type_library_file = "{{source_name_part}}.tlb"
+  } else {
+    type_library_file = "none"
+  }
+
+  if (writes_dlldata) {
+    dlldata_file = "{{source_name_part}}.dlldata.c"
+  } else {
+    dlldata_file = "none"
+  }
+
+  if (writes_proxy) {
+    proxy_file = "{{source_name_part}}_p.c"
+  } else {
+    proxy_file = "none"
+  }
+
+  interface_identifier_file = "{{source_name_part}}_i.c"
+
+  # TODO(crbug.com/1112471): Get this to run cleanly under Python 3.
+  python2_action_foreach(action_name) {
+    visibility = [ ":$source_set_name" ]
+    script = "//build/toolchain/win/midl.py"
+
+    sources = invoker.sources
+
+    outputs = [
+      "$out_dir/$header_file",
+      "$out_dir/$interface_identifier_file",
+    ]
+
+    # These files are only added to outputs if the invoker so desires, as it
+    # they are not always generated depending on the content of the input idl
+    # file.
+    if (writes_tlb) {
+      outputs += [ "$out_dir/$type_library_file" ]
+    }
+    if (writes_dlldata) {
+      outputs += [ "$out_dir/$dlldata_file" ]
+    }
+    if (writes_proxy) {
+      outputs += [ "$out_dir/$proxy_file" ]
+    }
+
+    if (current_cpu == "x86") {
+      win_tool_arch = "environment.x86"
+      idl_target_platform = "win32"
+    } else if (current_cpu == "x64") {
+      win_tool_arch = "environment.x64"
+      idl_target_platform = "x64"
+    } else if (current_cpu == "arm64") {
+      win_tool_arch = "environment.arm64"
+      idl_target_platform = "arm64"
+    } else {
+      assert(false, "Need environment for this arch")
+    }
+
+    args = [
+      win_tool_arch,
+      generated_dir,
+      rebase_path(out_dir, root_build_dir),
+      dynamic_guids,
+      type_library_file,
+      header_file,
+      dlldata_file,
+      interface_identifier_file,
+      proxy_file,
+      rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe",
+                  root_build_dir),
+      "{{source}}",
+      "/char",
+      "signed",
+      "/env",
+      idl_target_platform,
+      "/Oicf",
+    ]
+
+    if (defined(invoker.defines)) {
+      foreach(define, invoker.defines) {
+        args += [ "/D" + define ]
+      }
+    }
+
+    forward_variables_from(invoker, [ "deps" ])
+  }
+
+  source_set(target_name) {
+    forward_variables_from(invoker, [ "visibility" ])
+
+    # We only compile the IID files from the IDL tool rather than all outputs.
+    sources = process_file_template(invoker.sources,
+                                    [ "$out_dir/$interface_identifier_file" ])
+
+    public_deps = [ ":$action_name" ]
+  }
+}
diff --git a/src/build/toolchain/win/midl.py b/src/build/toolchain/win/midl.py
new file mode 100644
index 0000000..3224385
--- /dev/null
+++ b/src/build/toolchain/win/midl.py
@@ -0,0 +1,482 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import division
+from __future__ import print_function
+
+import array
+import difflib
+import distutils.dir_util
+import filecmp
+import io
+import operator
+import os
+import posixpath
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+import uuid
+
+from functools import reduce
+
+
+def ZapTimestamp(filename):
+  contents = open(filename, 'rb').read()
+  # midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its
+  # outputs, but using the local timezone.  To make the output timezone-
+  # independent, replace that date with a fixed string of the same length.
+  # Also blank out the minor version number.
+  if filename.endswith('.tlb'):
+    # See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for
+    # a fairly complete description of the .tlb binary format.
+    # TLB files start with a 54 byte header. Offset 0x20 stores how many types
+    # are defined in the file, and the header is followed by that many uint32s.
+    # After that, 15 section headers appear.  Each section header is 16 bytes,
+    # starting with offset and length uint32s.
+    # Section 12 in the file contains custom() data. custom() data has a type
+    # (int, string, etc).  Each custom data chunk starts with a uint16_t
+    # describing its type.  Type 8 is string data, consisting of a uint32_t
+    # len, followed by that many data bytes, followed by 'W' bytes to pad to a
+    # 4 byte boundary.  Type 0x13 is uint32 data, followed by 4 data bytes,
+    # followed by two 'W' to pad to a 4 byte boundary.
+    # The custom block always starts with one string containing "Created by
+    # MIDL version 8...", followed by one uint32 containing 0x7fffffff,
+    # followed by another uint32 containing the MIDL compiler version (e.g.
+    # 0x0801026e for v8.1.622 -- 0x26e == 622).  These 3 fields take 0x54 bytes.
+    # There might be more custom data after that, but these 3 blocks are always
+    # there for file-level metadata.
+    # All data is little-endian in the file.
+    assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
+    ntypes, = struct.unpack_from('<I', contents, 0x20)
+    custom_off, custom_len = struct.unpack_from(
+        '<II', contents, 0x54 + 4*ntypes + 11*16)
+    assert custom_len >= 0x54
+    # First: Type string (0x8), followed by 0x3e characters.
+    assert contents[custom_off:custom_off + 6] == b'\x08\x00\x3e\x00\x00\x00'
+    assert re.match(
+        br'Created by MIDL version 8\.\d\d\.\d{4} '
+        br'at ... Jan 1. ..:..:.. 2038\n',
+        contents[custom_off + 6:custom_off + 6 + 0x3e])
+    # Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad)
+    assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \
+        b'\x13\x00\xff\xff\xff\x7f\x57\x57'
+    # Third: Type uint32 (0x13) storing MIDL compiler version.
+    assert contents[custom_off + 6 + 0x3e + 8:custom_off + 6 + 0x3e + 8 +
+                    2] == b'\x13\x00'
+    # Replace "Created by" string with fixed string, and fixed MIDL version with
+    # 8.1.622 always.
+    contents = (
+        contents[0:custom_off + 6] +
+        b'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' +
+        # uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW
+        b'\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' +
+        contents[custom_off + 0x54:])
+  else:
+    contents = re.sub(
+        br'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n'
+        br'/\* at ... Jan 1. ..:..:.. 2038',
+        br'File created by MIDL compiler version 8.xx.xxxx */\r\n'
+        br'/* at a redacted point in time', contents)
+    contents = re.sub(
+        br'    Oicf, W1, Zp8, env=(.....) \(32b run\), '
+        br'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}',
+        br'    Oicf, W1, Zp8, env=\1 (32b run), target_arch=\2 8.xx.xxxx',
+        contents)
+    # TODO(thakis): If we need more hacks than these, try to verify checked-in
+    # outputs when we're using the hermetic toolchain.
+    # midl.exe older than 8.1.622 omit '//' after #endif, fix that:
+    contents = contents.replace(b'#endif !_MIDL_USE_GUIDDEF_',
+                                b'#endif // !_MIDL_USE_GUIDDEF_')
+    # midl.exe puts the midl version into code in one place.  To have
+    # predictable output, lie about the midl version if it's not 8.1.622.
+    # This is unfortunate, but remember that there's beauty too in imperfection.
+    contents = contents.replace(b'0x801026c, /* MIDL Version 8.1.620 */',
+                                b'0x801026e, /* MIDL Version 8.1.622 */')
+  open(filename, 'wb').write(contents)
+
+
+def get_tlb_contents(tlb_file):
+  # See ZapTimestamp() for a short overview of the .tlb format.
+  contents = open(tlb_file, 'rb').read()
+  assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
+  ntypes, = struct.unpack_from('<I', contents, 0x20)
+  type_off, type_len = struct.unpack_from('<II', contents, 0x54 + 4*ntypes)
+
+  guid_off, guid_len = struct.unpack_from(
+      '<II', contents, 0x54 + 4*ntypes + 5*16)
+  assert guid_len % 24 == 0
+
+  contents = array.array('B', contents)
+
+  return contents, ntypes, type_off, guid_off, guid_len
+
+
+def recreate_guid_hashtable(contents, ntypes, guid_off, guid_len):
+  # This function is called after changing guids in section 6 (the "guid"
+  # section). This function recreates the GUID hashtable in section 5. Since the
+  # hash table uses chaining, it's easiest to recompute it from scratch rather
+  # than trying to patch it up.
+  hashtab = [0xffffffff] * (0x80 // 4)
+  for guidind in range(guid_off, guid_off + guid_len, 24):
+    guidbytes, typeoff, nextguid = struct.unpack_from(
+        '<16sII', contents, guidind)
+    words = struct.unpack('<8H', guidbytes)
+    # midl seems to use the following simple hash function for GUIDs:
+    guidhash = reduce(operator.xor, [w for w in words]) % (0x80 // 4)
+    nextguid = hashtab[guidhash]
+    struct.pack_into('<I', contents, guidind + 0x14, nextguid)
+    hashtab[guidhash] = guidind - guid_off
+  hash_off, hash_len = struct.unpack_from(
+      '<II', contents, 0x54 + 4*ntypes + 4*16)
+  for i, hashval in enumerate(hashtab):
+    struct.pack_into('<I', contents, hash_off + 4*i, hashval)
+
+
+def overwrite_guids_h(h_file, dynamic_guids):
+  contents = open(h_file, 'rb').read()
+  for key in dynamic_guids:
+    contents = re.sub(key, dynamic_guids[key], contents, flags=re.I)
+  open(h_file, 'wb').write(contents)
+
+
+def get_uuid_format(guid, prefix):
+  formatted_uuid = '0x%s,0x%s,0x%s,' % (guid[0:8], guid[9:13], guid[14:18])
+  formatted_uuid += '%s0x%s,0x%s' % (prefix, guid[19:21], guid[21:23])
+  for i in range(24, len(guid), 2):
+    formatted_uuid += ',0x' + guid[i:i + 2]
+  return formatted_uuid
+
+
+def get_uuid_format_iid_file(guid):
+  # Convert from "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83" to
+  # 0xD0E1CACC,0xC63C,0x4192,0x94,0xAB,0xBF,0x8E,0xAD,0x0E,0x3B,0x83.
+  return get_uuid_format(guid, '')
+
+
+def overwrite_guids_iid(iid_file, dynamic_guids):
+  contents = open(iid_file, 'rb').read()
+  for key in dynamic_guids:
+    contents = re.sub(get_uuid_format_iid_file(key),
+                      get_uuid_format_iid_file(dynamic_guids[key]),
+                      contents,
+                      flags=re.I)
+  open(iid_file, 'wb').write(contents)
+
+
+def get_uuid_format_proxy_file(guid):
+  # Convert from "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83" to
+  # {0xD0E1CACC,0xC63C,0x4192,{0x94,0xAB,0xBF,0x8E,0xAD,0x0E,0x3B,0x83}}.
+  return get_uuid_format(guid, '{')
+
+
+def overwrite_guids_proxy(proxy_file, dynamic_guids):
+  contents = open(proxy_file, 'rb').read()
+  for key in dynamic_guids:
+    contents = re.sub(get_uuid_format_proxy_file(key),
+                      get_uuid_format_proxy_file(dynamic_guids[key]),
+                      contents,
+                      flags=re.I)
+  open(proxy_file, 'wb').write(contents)
+
+
+def getguid(contents, offset):
+  # Returns a guid string of the form "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+  g0, g1, g2, g3 = struct.unpack_from('<IHH8s', contents, offset)
+  g3 = ''.join(['%02X' % ord(g) for g in g3])
+  return '%08X-%04X-%04X-%s-%s' % (g0, g1, g2, g3[0:4], g3[4:])
+
+
+def setguid(contents, offset, guid):
+  guid = uuid.UUID(guid)
+  struct.pack_into('<IHH8s', contents, offset,
+                   *(guid.fields[0:3] + (guid.bytes[8:], )))
+
+
+def overwrite_guids_tlb(tlb_file, dynamic_guids):
+  contents, ntypes, type_off, guid_off, guid_len = get_tlb_contents(tlb_file)
+
+  for i in range(0, guid_len, 24):
+    current_guid = getguid(contents, guid_off + i)
+    for key in dynamic_guids:
+      if key.lower() == current_guid.lower():
+        setguid(contents, guid_off + i, dynamic_guids[key])
+
+  recreate_guid_hashtable(contents, ntypes, guid_off, guid_len)
+  open(tlb_file, 'wb').write(contents)
+
+
+# Handle multiple guid substitutions, where |dynamic_guids| is of the form
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791="
+# "3d852661-c795-4d20-9b95-5561e9a1d2d9,"
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B="
+# "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+#
+# Before specifying |dynamic_guids| in the build, the IDL file is first compiled
+# with "158428a4-6014-4978-83ba-9fad0dabe791" and
+# "63B8FFB1-5314-48C9-9C57-93EC8BC6184B". These are the "replaceable" guids,
+# i.e., guids that can be replaced in future builds. The resulting MIDL outputs
+# are copied over to src\third_party\win_build_output\.
+#
+# Then, in the future, any changes to these guids can be accomplished by
+# providing |dynamic_guids| of the format above in the build file. These
+# "dynamic" guid changes by themselves will not require the MIDL compiler and
+# therefore will not require copying output over to
+# src\third_party\win_build_output\.
+#
+# The pre-generated src\third_party\win_build_output\ files are used for
+# cross-compiling on other platforms, since the MIDL compiler is Windows-only.
+def overwrite_guids(h_file, iid_file, proxy_file, tlb_file, dynamic_guids):
+  # Fix up GUIDs in .h, _i.c, _p.c, and .tlb.
+  overwrite_guids_h(h_file, dynamic_guids)
+  overwrite_guids_iid(iid_file, dynamic_guids)
+  overwrite_guids_proxy(proxy_file, dynamic_guids)
+  if tlb_file:
+    overwrite_guids_tlb(tlb_file, dynamic_guids)
+
+
+# This function removes all occurrences of 'PLACEHOLDER-GUID-' from the
+# template, and if |dynamic_guids| is specified, also replaces the guids within
+# the file. Finally, it writes the resultant output to the |idl| file.
+def generate_idl_from_template(idl_template, dynamic_guids, idl):
+  contents = open(idl_template, 'rb').read()
+  contents = re.sub('PLACEHOLDER-GUID-', '', contents, flags=re.I)
+  if dynamic_guids:
+    for key in dynamic_guids:
+      contents = re.sub(key, dynamic_guids[key], contents, flags=re.I)
+  open(idl, 'wb').write(contents)
+
+
+# This function runs the MIDL compiler with the provided arguments. It creates
+# and returns a tuple of |0,midl_output_dir| on success.
+def run_midl(args, env_dict):
+  midl_output_dir = tempfile.mkdtemp()
+  delete_midl_output_dir = True
+
+  try:
+    popen = subprocess.Popen(args + ['/out', midl_output_dir],
+                             shell=True,
+                             env=env_dict,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    if popen.returncode != 0:
+      return popen.returncode, midl_output_dir
+
+    # Filter junk out of stdout, and write filtered versions. Output we want
+    # to filter is pairs of lines that look like this:
+    # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+    # objidl.idl
+    lines = out.decode('utf-8').splitlines()
+    prefixes = ('Processing ', '64 bit Processing ')
+    processing = set(
+        os.path.basename(x) for x in lines if x.startswith(prefixes))
+    for line in lines:
+      if not line.startswith(prefixes) and line not in processing:
+        print(line)
+
+    for f in os.listdir(midl_output_dir):
+      ZapTimestamp(os.path.join(midl_output_dir, f))
+
+    delete_midl_output_dir = False
+  finally:
+    if os.path.exists(midl_output_dir) and delete_midl_output_dir:
+      shutil.rmtree(midl_output_dir)
+
+  return 0, midl_output_dir
+
+
+# This function adds support for dynamic generation of guids: when values are
+# specified as 'uuid5:name', this function will substitute the values with
+# generated dynamic guids using the uuid5 function. The uuid5 function generates
+# a guid based on the SHA-1 hash of a namespace identifier (which is the guid
+# that comes after 'PLACEHOLDER-GUID-') and a name (which is a string, such as a
+# version string "87.1.2.3").
+#
+# For instance, when |dynamic_guid| is of the form:
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791=uuid5:88.0.4307.0
+# ,"
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B=uuid5:88.0.4307.0
+# "
+#
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791" would be substituted
+# with uuid5("158428a4-6014-4978-83ba-9fad0dabe791", "88.0.4307.0"), which is
+# "64700170-AD80-5DE3-924E-2F39D862CFD5". And
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B" would be
+# substituted with uuid5("63B8FFB1-5314-48C9-9C57-93EC8BC6184B", "88.0.4307.0"),
+# which is "7B6E7538-3C38-5565-BC92-42BCEE268D76".
+def uuid5_substitutions(dynamic_guids):
+  for key, value in dynamic_guids.items():
+    if value.startswith("uuid5:"):
+      name = value.split("uuid5:", 1)[1]
+      assert name
+      dynamic_guids[key] = str(uuid.uuid5(uuid.UUID(key), name)).upper()
+
+
+def main(arch, gendir, outdir, dynamic_guids, tlb, h, dlldata, iid, proxy,
+         clang, idl, *flags):
+  # Copy checked-in outputs to final location.
+  source = gendir
+  if os.path.isdir(os.path.join(source, os.path.basename(idl))):
+    source = os.path.join(source, os.path.basename(idl))
+  source = os.path.join(source, arch.split('.')[1])  # Append 'x86' or 'x64'.
+  source = os.path.normpath(source)
+
+  source_exists = True
+  if not os.path.isdir(source):
+    source_exists = False
+    if sys.platform != 'win32':
+      print('Directory %s needs to be populated from Windows first' % source)
+      return 1
+
+    # This is a brand new IDL file that does not have outputs under
+    # third_party\win_build_output\midl. We create an empty directory for now.
+    os.makedirs(source)
+
+  common_files = [h, iid]
+  if tlb != 'none':
+    # Not all projects use tlb files.
+    common_files += [tlb]
+  else:
+    tlb = None
+
+  if dlldata != 'none':
+    # Not all projects use dlldta files.
+    common_files += [dlldata]
+  else:
+    dlldata = None
+
+  # Not all projects use proxy files
+  if proxy != 'none':
+    # Not all projects use proxy files.
+    common_files += [proxy]
+  else:
+    proxy = None
+
+  for source_file in common_files:
+    file_path = os.path.join(source, source_file)
+    if not os.path.isfile(file_path):
+      source_exists = False
+      if sys.platform != 'win32':
+        print('File %s needs to be generated from Windows first' % file_path)
+        return 1
+
+      # Either this is a brand new IDL file that does not have outputs under
+      # third_party\win_build_output\midl or the file is (unexpectedly) missing.
+      # We create an empty file for now. The rest of the machinery below will
+      # then generate the correctly populated file using the MIDL compiler and
+      # instruct the developer to copy that file under
+      # third_party\win_build_output\midl.
+      open(file_path, 'wb').close()
+    shutil.copy(file_path, outdir)
+
+  if dynamic_guids != 'none':
+    assert '=' in dynamic_guids
+    if dynamic_guids.startswith("ignore_proxy_stub,"):
+      # TODO(ganesh): The custom proxy/stub file ("_p.c") is not generated
+      # correctly for dynamic IIDs (but correctly if there are only dynamic
+      # CLSIDs). The proxy/stub lookup functions generated by MIDL.exe within
+      # "_p.c" rely on a sorted set of vtable lists, which we are not currently
+      # regenerating. At the moment, no project in Chromium that uses dynamic
+      # IIDs is relying on the custom proxy/stub file. So for now, if
+      # |dynamic_guids| is prefixed with "ignore_proxy_stub,", we exclude the
+      # custom proxy/stub file from the directory comparisons.
+      common_files.remove(proxy)
+      dynamic_guids = dynamic_guids.split("ignore_proxy_stub,", 1)[1]
+    dynamic_guids = re.sub('PLACEHOLDER-GUID-', '', dynamic_guids, flags=re.I)
+    dynamic_guids = dynamic_guids.split(',')
+    dynamic_guids = dict(s.split('=') for s in dynamic_guids)
+    uuid5_substitutions(dynamic_guids)
+    if source_exists:
+      overwrite_guids(*(os.path.join(outdir, file) if file else None
+                        for file in [h, iid, proxy, tlb]),
+                      dynamic_guids=dynamic_guids)
+  else:
+    dynamic_guids = None
+
+  # On non-Windows, that's all we can do.
+  if sys.platform != 'win32':
+    return 0
+
+  idl_template = None
+  if dynamic_guids:
+    idl_template = idl
+
+    # posixpath is used here to keep the MIDL-generated files with a uniform
+    # separator of '/' instead of mixed '/' and '\\'.
+    idl = posixpath.join(
+        outdir,
+        os.path.splitext(os.path.basename(idl_template))[0] + '.idl')
+
+    # |idl_template| can contain one or more occurrences of guids that are
+    # substituted with |dynamic_guids|, and then MIDL is run on the substituted
+    # IDL file.
+    generate_idl_from_template(idl_template, dynamic_guids, idl)
+
+  # On Windows, run midl.exe on the input and check that its outputs are
+  # identical to the checked-in outputs (after replacing guids if
+  # |dynamic_guids| is specified).
+
+  # Read the environment block from the file. This is stored in the format used
+  # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+  # trailing vs. separator.
+  env_pairs = open(arch).read()[:-2].split('\0')
+  env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+  # Extract the /D options and send them to the preprocessor.
+  preprocessor_options = '-E -nologo -Wno-nonportable-include-path'
+  preprocessor_options += ''.join(
+      [' ' + flag for flag in flags if flag.startswith('/D')])
+  args = ['midl', '/nologo'] + list(flags) + (['/tlb', tlb] if tlb else []) + [
+      '/h', h
+  ] + (['/dlldata', dlldata] if dlldata else []) + ['/iid', iid] + (
+      ['/proxy', proxy] if proxy else
+      []) + ['/cpp_cmd', clang, '/cpp_opt', preprocessor_options, idl]
+
+  returncode, midl_output_dir = run_midl(args, env_dict)
+  if returncode != 0:
+    return returncode
+
+  # Now compare the output in midl_output_dir to the copied-over outputs.
+  _, mismatch, errors = filecmp.cmpfiles(midl_output_dir, outdir, common_files)
+  assert not errors
+
+  if mismatch:
+    print('midl.exe output different from files in %s, see %s' %
+          (outdir, midl_output_dir))
+    for f in mismatch:
+      if f.endswith('.tlb'): continue
+      fromfile = os.path.join(outdir, f)
+      tofile = os.path.join(midl_output_dir, f)
+      print(''.join(
+          difflib.unified_diff(
+              io.open(fromfile).readlines(),
+              io.open(tofile).readlines(), fromfile, tofile)))
+
+    if dynamic_guids:
+      # |idl_template| can contain one or more occurrences of guids prefixed
+      # with 'PLACEHOLDER-GUID-'. We first remove the extraneous
+      # 'PLACEHOLDER-GUID-' prefix and then run MIDL on the substituted IDL
+      # file.
+      # No guid substitutions are done at this point, because we want to compile
+      # with the placeholder guids and then instruct the user to copy the output
+      # over to |source| which is typically src\third_party\win_build_output\.
+      # In future runs, the placeholder guids in |source| are replaced with the
+      # guids specified in |dynamic_guids|.
+      generate_idl_from_template(idl_template, None, idl)
+      returncode, midl_output_dir = run_midl(args, env_dict)
+      if returncode != 0:
+        return returncode
+
+    print('To rebaseline:')
+    print(r'  copy /y %s\* %s' % (midl_output_dir, source))
+    return 1
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(*sys.argv[1:]))
diff --git a/src/build/toolchain/win/ml.py b/src/build/toolchain/win/ml.py
new file mode 100755
index 0000000..6a1b6e5
--- /dev/null
+++ b/src/build/toolchain/win/ml.py
@@ -0,0 +1,290 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps ml.exe or ml64.exe and postprocesses the output to be deterministic.
+Sets timestamp in .obj file to 0, hence incompatible with link.exe /incremental.
+
+Use by prefixing the ml(64).exe invocation with this script:
+    python ml.py ml.exe [args...]"""
+
+import array
+import collections
+import struct
+import subprocess
+import sys
+
+
+class Struct(object):
+  """A thin wrapper around the struct module that returns a namedtuple"""
+  def __init__(self, name, *args):
+    """Pass the name of the return type, and then an interleaved list of
+    format strings as used by the struct module and of field names."""
+    self.fmt = '<' + ''.join(args[0::2])
+    self.type = collections.namedtuple(name, args[1::2])
+
+  def pack_into(self, buffer, offset, data):
+    return struct.pack_into(self.fmt, buffer, offset, *data)
+
+  def unpack_from(self, buffer, offset=0):
+    return self.type(*struct.unpack_from(self.fmt, buffer, offset))
+
+  def size(self):
+    return struct.calcsize(self.fmt)
+
+
+def Subtract(nt, **kwargs):
+  """Subtract(nt, f=2) returns a new namedtuple with 2 subtracted from nt.f"""
+  return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.items()})
+
+
+def MakeDeterministic(objdata):
+  # Takes data produced by ml(64).exe (without any special flags) and
+  # 1. Sets the timestamp to 0
+  # 2. Strips the .debug$S section (which contains an unwanted absolute path)
+
+  # This makes several assumptions about ml's output:
+  # - Section data is in the same order as the corresponding section headers:
+  #   section headers preceding the .debug$S section header have their data
+  #   preceding the .debug$S section data; likewise for section headers
+  #   following the .debug$S section.
+  # - The .debug$S section contains only the absolute path to the obj file and
+  #   nothing else, in particular there's only a single entry in the symbol
+  #   table referring to the .debug$S section.
+  # - There are no COFF line number entries.
+  # - There's no IMAGE_SYM_CLASS_CLR_TOKEN symbol.
+  # These seem to hold in practice; if they stop holding this script needs to
+  # become smarter.
+
+  objdata = array.array('b', objdata)  # Writable, e.g. via struct.pack_into.
+
+  # Read coff header.
+  COFFHEADER = Struct('COFFHEADER',
+                      'H', 'Machine',
+                      'H', 'NumberOfSections',
+                      'I', 'TimeDateStamp',
+                      'I', 'PointerToSymbolTable',
+                      'I', 'NumberOfSymbols',
+
+                      'H', 'SizeOfOptionalHeader',
+                      'H', 'Characteristics')
+  coff_header = COFFHEADER.unpack_from(objdata)
+  assert coff_header.SizeOfOptionalHeader == 0  # Only set for binaries.
+
+  # Read section headers following coff header.
+  SECTIONHEADER = Struct('SECTIONHEADER',
+                         '8s', 'Name',
+                         'I', 'VirtualSize',
+                         'I', 'VirtualAddress',
+
+                         'I', 'SizeOfRawData',
+                         'I', 'PointerToRawData',
+                         'I', 'PointerToRelocations',
+                         'I', 'PointerToLineNumbers',
+
+                         'H', 'NumberOfRelocations',
+                         'H', 'NumberOfLineNumbers',
+                         'I', 'Characteristics')
+  section_headers = []
+  debug_section_index = -1
+  for i in range(0, coff_header.NumberOfSections):
+    section_header = SECTIONHEADER.unpack_from(
+        objdata, offset=COFFHEADER.size() + i * SECTIONHEADER.size())
+    assert not section_header[0].startswith(b'/')  # Support short names only.
+    section_headers.append(section_header)
+
+    if section_header.Name == b'.debug$S':
+      assert debug_section_index == -1
+      debug_section_index = i
+  assert debug_section_index != -1
+
+  data_start = COFFHEADER.size() + len(section_headers) * SECTIONHEADER.size()
+
+  # Verify the .debug$S section looks like we expect.
+  assert section_headers[debug_section_index].Name == b'.debug$S'
+  assert section_headers[debug_section_index].VirtualSize == 0
+  assert section_headers[debug_section_index].VirtualAddress == 0
+  debug_size = section_headers[debug_section_index].SizeOfRawData
+  debug_offset = section_headers[debug_section_index].PointerToRawData
+  assert section_headers[debug_section_index].PointerToRelocations == 0
+  assert section_headers[debug_section_index].PointerToLineNumbers == 0
+  assert section_headers[debug_section_index].NumberOfRelocations == 0
+  assert section_headers[debug_section_index].NumberOfLineNumbers == 0
+
+  # Make sure sections in front of .debug$S have their data preceding it.
+  for header in section_headers[:debug_section_index]:
+    assert header.PointerToRawData < debug_offset
+    assert header.PointerToRelocations < debug_offset
+    assert header.PointerToLineNumbers < debug_offset
+
+  # Make sure sections after of .debug$S have their data following it.
+  for header in section_headers[debug_section_index + 1:]:
+    # Make sure the .debug$S data is at the very end of section data:
+    assert header.PointerToRawData > debug_offset
+    assert header.PointerToRelocations == 0
+    assert header.PointerToLineNumbers == 0
+
+  # Make sure the first non-empty section's data starts right after the section
+  # headers.
+  for section_header in section_headers:
+    if section_header.PointerToRawData == 0:
+      assert section_header.PointerToRelocations == 0
+      assert section_header.PointerToLineNumbers == 0
+      continue
+    assert section_header.PointerToRawData == data_start
+    break
+
+  # Make sure the symbol table (and hence, string table) appear after the last
+  # section:
+  assert (coff_header.PointerToSymbolTable >=
+      section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData)
+
+  # The symbol table contains a symbol for the no-longer-present .debug$S
+  # section. If we leave it there, lld-link will complain:
+  #
+  #    lld-link: error: .debug$S should not refer to non-existent section 5
+  #
+  # so we need to remove that symbol table entry as well. This shifts symbol
+  # entries around and we need to update symbol table indices in:
+  # - relocations
+  # - line number records (never present)
+  # - one aux symbol entry (IMAGE_SYM_CLASS_CLR_TOKEN; not present in ml output)
+  SYM = Struct('SYM',
+               '8s', 'Name',
+               'I', 'Value',
+               'h', 'SectionNumber',  # Note: Signed!
+               'H', 'Type',
+
+               'B', 'StorageClass',
+               'B', 'NumberOfAuxSymbols')
+  i = 0
+  debug_sym = -1
+  while i < coff_header.NumberOfSymbols:
+    sym_offset = coff_header.PointerToSymbolTable + i * SYM.size()
+    sym = SYM.unpack_from(objdata, sym_offset)
+
+    # 107 is IMAGE_SYM_CLASS_CLR_TOKEN, which has aux entry "CLR Token
+    # Definition", which contains a symbol index. Check it's never present.
+    assert sym.StorageClass != 107
+
+    # Note: sym.SectionNumber is 1-based, debug_section_index is 0-based.
+    if sym.SectionNumber - 1 == debug_section_index:
+      assert debug_sym == -1, 'more than one .debug$S symbol found'
+      debug_sym = i
+      # Make sure the .debug$S symbol looks like we expect.
+      # In particular, it should have exactly one aux symbol.
+      assert sym.Name == b'.debug$S'
+      assert sym.Value == 0
+      assert sym.Type == 0
+      assert sym.StorageClass == 3
+      assert sym.NumberOfAuxSymbols == 1
+    elif sym.SectionNumber > debug_section_index:
+      sym = Subtract(sym, SectionNumber=1)
+      SYM.pack_into(objdata, sym_offset, sym)
+    i += 1 + sym.NumberOfAuxSymbols
+  assert debug_sym != -1, '.debug$S symbol not found'
+
+  # Note: Usually the .debug$S section is the last, but for files saying
+  # `includelib foo.lib`, like safe_terminate_process.asm in 32-bit builds,
+  # this isn't true: .drectve is after .debug$S.
+
+  # Update symbol table indices in relocations.
+  # There are a few processor types that have one or two relocation types
+  # where SymbolTableIndex has a different meaning, but not for x86.
+  REL = Struct('REL',
+               'I', 'VirtualAddress',
+               'I', 'SymbolTableIndex',
+               'H', 'Type')
+  for header in section_headers[0:debug_section_index]:
+    for j in range(0, header.NumberOfRelocations):
+      rel_offset = header.PointerToRelocations + j * REL.size()
+      rel = REL.unpack_from(objdata, rel_offset)
+      assert rel.SymbolTableIndex != debug_sym
+      if rel.SymbolTableIndex > debug_sym:
+        rel = Subtract(rel, SymbolTableIndex=2)
+        REL.pack_into(objdata, rel_offset, rel)
+
+  # Update symbol table indices in line numbers -- just check they don't exist.
+  for header in section_headers:
+    assert header.NumberOfLineNumbers == 0
+
+  # Now that all indices are updated, remove the symbol table entry referring to
+  # .debug$S and its aux entry.
+  del objdata[coff_header.PointerToSymbolTable + debug_sym * SYM.size():
+              coff_header.PointerToSymbolTable + (debug_sym + 2) * SYM.size()]
+
+  # Now we know that it's safe to write out the input data, with just the
+  # timestamp overwritten to 0, the last section header cut out (and the
+  # offsets of all other section headers decremented by the size of that
+  # one section header), and the last section's data cut out. The symbol
+  # table offset needs to be reduced by one section header and the size of
+  # the missing section.
+  # (The COFF spec only requires on-disk sections to be aligned in image files,
+  # for obj files it's not required. If that wasn't the case, deleting slices
+  # if data would not generally be safe.)
+
+  # Update section offsets and remove .debug$S section data.
+  for i in range(0, debug_section_index):
+    header = section_headers[i]
+    if header.SizeOfRawData:
+      header = Subtract(header, PointerToRawData=SECTIONHEADER.size())
+    if header.NumberOfRelocations:
+      header = Subtract(header, PointerToRelocations=SECTIONHEADER.size())
+    if header.NumberOfLineNumbers:
+      header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size())
+    SECTIONHEADER.pack_into(
+        objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+  for i in range(debug_section_index + 1, len(section_headers)):
+    header = section_headers[i]
+    shift = SECTIONHEADER.size() + debug_size
+    if header.SizeOfRawData:
+      header = Subtract(header, PointerToRawData=shift)
+    if header.NumberOfRelocations:
+      header = Subtract(header, PointerToRelocations=shift)
+    if header.NumberOfLineNumbers:
+      header = Subtract(header, PointerToLineNumbers=shift)
+    SECTIONHEADER.pack_into(
+        objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+
+  del objdata[debug_offset:debug_offset + debug_size]
+
+  # Finally, remove .debug$S section header and update coff header.
+  coff_header = coff_header._replace(TimeDateStamp=0)
+  coff_header = Subtract(coff_header,
+                         NumberOfSections=1,
+                         PointerToSymbolTable=SECTIONHEADER.size() + debug_size,
+                         NumberOfSymbols=2)
+  COFFHEADER.pack_into(objdata, 0, coff_header)
+
+  del objdata[
+      COFFHEADER.size() + debug_section_index * SECTIONHEADER.size():
+      COFFHEADER.size() + (debug_section_index + 1) * SECTIONHEADER.size()]
+
+  # All done!
+  if sys.version_info.major == 2:
+    return objdata.tostring()
+  else:
+    return objdata.tobytes()
+
+
+def main():
+  ml_result = subprocess.call(sys.argv[1:])
+  if ml_result != 0:
+    return ml_result
+
+  objfile = None
+  for i in range(1, len(sys.argv)):
+    if sys.argv[i].startswith('/Fo'):
+      objfile = sys.argv[i][len('/Fo'):]
+  assert objfile, 'failed to find ml output'
+
+  with open(objfile, 'rb') as f:
+    objdata = f.read()
+  objdata = MakeDeterministic(objdata)
+  with open(objfile, 'wb') as f:
+    f.write(objdata)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/win/rc/README.md b/src/build/toolchain/win/rc/README.md
new file mode 100644
index 0000000..e6d38f9
--- /dev/null
+++ b/src/build/toolchain/win/rc/README.md
@@ -0,0 +1,30 @@
+# rc
+
+This contains a cross-platform reimplementation of rc.exe.
+
+This exists mainly to compile .rc files on non-Windows hosts for cross builds.
+However, it also runs on Windows for two reasons:
+
+1. To compare the output of Microsoft's rc.exe and the reimplementation and to
+    check that they produce bitwise identical output.
+2. The reimplementation supports printing resource files in /showIncludes
+   output, which helps getting build dependencies right.
+
+The resource compiler consists of two parts:
+
+1. A python script rc.py that serves as the driver.  It does unicode
+   conversions, runs the input through the preprocessor, and then calls the
+   actual resource compiler.
+2. The resource compiler, a C++ binary obtained via sha1 files from Google
+   Storage.  The binary's code currenty lives at
+   https://github.com/nico/hack/tree/master/res, even though work is (slowly)
+   underway to upstream it into LLVM.
+
+To update the rc binary, run `upload_rc_binaries.sh` in this directory, on a
+Mac.
+
+rc isn't built from source as part of the regular chrome build because
+it's needed in a gn toolchain tool, and these currently cannot have deps.
+Alternatively, gn could be taught about deps on tools, or rc invocations could
+be not a tool but a template like e.g. yasm invocations (which can have deps),
+then the prebuilt binaries wouldn't be needed.
diff --git a/src/build/toolchain/win/rc/linux64/rc.sha1 b/src/build/toolchain/win/rc/linux64/rc.sha1
new file mode 100644
index 0000000..ad14ca4
--- /dev/null
+++ b/src/build/toolchain/win/rc/linux64/rc.sha1
@@ -0,0 +1 @@
+2d0c766039264dc2514d005a42f074af4838a446
\ No newline at end of file
diff --git a/src/build/toolchain/win/rc/mac/rc.sha1 b/src/build/toolchain/win/rc/mac/rc.sha1
new file mode 100644
index 0000000..dbd6302
--- /dev/null
+++ b/src/build/toolchain/win/rc/mac/rc.sha1
@@ -0,0 +1 @@
+4c25c3bcb6608109bb52028d008835895cf72629
\ No newline at end of file
diff --git a/src/build/toolchain/win/rc/rc.py b/src/build/toolchain/win/rc/rc.py
new file mode 100755
index 0000000..2ab4122
--- /dev/null
+++ b/src/build/toolchain/win/rc/rc.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""usage: rc.py [options] input.res
+A resource compiler for .rc files.
+
+options:
+-h, --help     Print this message.
+-I<dir>        Add include path, used for both headers and resources.
+-imsvc<dir>    Add system include path, used for preprocessing only.
+/winsysroot<d> Set winsysroot, used for preprocessing only.
+-D<sym>        Define a macro for the preprocessor.
+/fo<out>       Set path of output .res file.
+/nologo        Ignored (rc.py doesn't print a logo by default).
+/showIncludes  Print referenced header and resource files."""
+
+from __future__ import print_function
+from collections import namedtuple
+import codecs
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = \
+    os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(THIS_DIR))))
+
+
+def ParseFlags():
+  """Parses flags off sys.argv and returns the parsed flags."""
+  # Can't use optparse / argparse because of /fo flag :-/
+  includes = []
+  imsvcs = []
+  winsysroot = []
+  defines = []
+  output = None
+  input = None
+  show_includes = False
+  # Parse.
+  for flag in sys.argv[1:]:
+    if flag == '-h' or flag == '--help':
+      print(__doc__)
+      sys.exit(0)
+    if flag.startswith('-I'):
+      includes.append(flag)
+    elif flag.startswith('-imsvc'):
+      imsvcs.append(flag)
+    elif flag.startswith('/winsysroot'):
+      winsysroot = [flag]
+    elif flag.startswith('-D'):
+      defines.append(flag)
+    elif flag.startswith('/fo'):
+      if output:
+        print('rc.py: error: multiple /fo flags', '/fo' + output, flag,
+              file=sys.stderr)
+        sys.exit(1)
+      output = flag[3:]
+    elif flag == '/nologo':
+      pass
+    elif flag == '/showIncludes':
+      show_includes = True
+    elif (flag.startswith('-') or
+          (flag.startswith('/') and not os.path.exists(flag))):
+      print('rc.py: error: unknown flag', flag, file=sys.stderr)
+      print(__doc__, file=sys.stderr)
+      sys.exit(1)
+    else:
+      if input:
+        print('rc.py: error: multiple inputs:', input, flag, file=sys.stderr)
+        sys.exit(1)
+      input = flag
+  # Validate and set default values.
+  if not input:
+    print('rc.py: error: no input file', file=sys.stderr)
+    sys.exit(1)
+  if not output:
+    output = os.path.splitext(input)[0] + '.res'
+  Flags = namedtuple('Flags', [
+      'includes', 'defines', 'output', 'imsvcs', 'winsysroot', 'input',
+      'show_includes'
+  ])
+  return Flags(includes=includes,
+               defines=defines,
+               output=output,
+               imsvcs=imsvcs,
+               winsysroot=winsysroot,
+               input=input,
+               show_includes=show_includes)
+
+
+def ReadInput(input):
+  """"Reads input and returns it. For UTF-16LEBOM input, converts to UTF-8."""
+  # Microsoft's rc.exe only supports unicode in the form of UTF-16LE with a BOM.
+  # Our rc binary sniffs for UTF-16LE.  If that's not found, if /utf-8 is
+  # passed, the input is treated as UTF-8.  If /utf-8 is not passed and the
+  # input is not UTF-16LE, then our rc errors out on characters outside of
+  # 7-bit ASCII.  Since the driver always converts UTF-16LE to UTF-8 here (for
+  # the preprocessor, which doesn't support UTF-16LE), our rc will either see
+  # UTF-8 with the /utf-8 flag (for UTF-16LE input), or ASCII input.
+  # This is compatible with Microsoft rc.exe.  If we wanted, we could expose
+  # a /utf-8 flag for the driver for UTF-8 .rc inputs too.
+  # TODO(thakis): Microsoft's rc.exe supports BOM-less UTF-16LE. We currently
+  # don't, but for chrome it currently doesn't matter.
+  is_utf8 = False
+  try:
+    with open(input, 'rb') as rc_file:
+      rc_file_data = rc_file.read()
+      if rc_file_data.startswith(codecs.BOM_UTF16_LE):
+        rc_file_data = rc_file_data[2:].decode('utf-16le').encode('utf-8')
+        is_utf8 = True
+  except IOError:
+    print('rc.py: failed to open', input, file=sys.stderr)
+    sys.exit(1)
+  except UnicodeDecodeError:
+    print('rc.py: failed to decode UTF-16 despite BOM', input, file=sys.stderr)
+    sys.exit(1)
+  return rc_file_data, is_utf8
+
+
+def Preprocess(rc_file_data, flags):
+  """Runs the input file through the preprocessor."""
+  clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
+                       'Release+Asserts', 'bin', 'clang-cl')
+  # Let preprocessor write to a temp file so that it doesn't interfere
+  # with /showIncludes output on stdout.
+  if sys.platform == 'win32':
+    clang += '.exe'
+  temp_handle, temp_file = tempfile.mkstemp(suffix='.i')
+  # Closing temp_handle immediately defeats the purpose of mkstemp(), but I
+  # can't figure out how to let write to the temp file on Windows otherwise.
+  os.close(temp_handle)
+  clang_cmd = [clang, '/P', '/DRC_INVOKED', '/TC', '-', '/Fi' + temp_file]
+  if flags.imsvcs:
+    clang_cmd += ['/X']
+  if os.path.dirname(flags.input):
+    # This must precede flags.includes.
+    clang_cmd.append('-I' + os.path.dirname(flags.input))
+  if flags.show_includes:
+    clang_cmd.append('/showIncludes')
+  clang_cmd += flags.imsvcs + flags.winsysroot + flags.includes + flags.defines
+  p = subprocess.Popen(clang_cmd, stdin=subprocess.PIPE)
+  p.communicate(input=rc_file_data)
+  if p.returncode != 0:
+    sys.exit(p.returncode)
+  preprocessed_output = open(temp_file, 'rb').read()
+  os.remove(temp_file)
+
+  # rc.exe has a wacko preprocessor:
+  # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381033(v=vs.85).aspx
+  # """RC treats files with the .c and .h extensions in a special manner. It
+  # assumes that a file with one of these extensions does not contain
+  # resources. If a file has the .c or .h file name extension, RC ignores all
+  # lines in the file except the preprocessor directives."""
+  # Thankfully, the Microsoft headers are mostly good about putting everything
+  # in the system headers behind `if !defined(RC_INVOKED)`, so regular
+  # preprocessing with RC_INVOKED defined works.
+  return preprocessed_output
+
+
+def RunRc(preprocessed_output, is_utf8, flags):
+  if sys.platform.startswith('linux'):
+    rc = os.path.join(THIS_DIR, 'linux64', 'rc')
+  elif sys.platform == 'darwin':
+    rc = os.path.join(THIS_DIR, 'mac', 'rc')
+  elif sys.platform == 'win32':
+    rc = os.path.join(THIS_DIR, 'win', 'rc.exe')
+  else:
+    print('rc.py: error: unsupported platform', sys.platform, file=sys.stderr)
+    sys.exit(1)
+  rc_cmd = [rc]
+  # Make sure rc-relative resources can be found:
+  if os.path.dirname(flags.input):
+    rc_cmd.append('/cd' + os.path.dirname(flags.input))
+  rc_cmd.append('/fo' + flags.output)
+  if is_utf8:
+    rc_cmd.append('/utf-8')
+  # TODO(thakis): cl currently always prints full paths for /showIncludes,
+  # but clang-cl /P doesn't.  Which one is right?
+  if flags.show_includes:
+    rc_cmd.append('/showIncludes')
+  # Microsoft rc.exe searches for referenced files relative to -I flags in
+  # addition to the pwd, so -I flags need to be passed both to both
+  # the preprocessor and rc.
+  rc_cmd += flags.includes
+  p = subprocess.Popen(rc_cmd, stdin=subprocess.PIPE)
+  p.communicate(input=preprocessed_output)
+
+  if flags.show_includes and p.returncode == 0:
+    TOOL_DIR = os.path.dirname(os.path.relpath(THIS_DIR)).replace("\\", "/")
+    # Since tool("rc") can't have deps, add deps on this script and on rc.py
+    # and its deps here, so that rc edges become dirty if rc.py changes.
+    print('Note: including file: {}/tool_wrapper.py'.format(TOOL_DIR))
+    print('Note: including file: {}/rc/rc.py'.format(TOOL_DIR))
+    print(
+        'Note: including file: {}/rc/linux64/rc.sha1'.format(TOOL_DIR))
+    print('Note: including file: {}/rc/mac/rc.sha1'.format(TOOL_DIR))
+    print(
+        'Note: including file: {}/rc/win/rc.exe.sha1'.format(TOOL_DIR))
+
+  return p.returncode
+
+
+def CompareToMsRcOutput(preprocessed_output, is_utf8, flags):
+  msrc_in = flags.output + '.preprocessed.rc'
+
+  # Strip preprocessor line markers.
+  preprocessed_output = re.sub(br'^#.*$', b'', preprocessed_output, flags=re.M)
+  if is_utf8:
+    preprocessed_output = preprocessed_output.decode('utf-8').encode('utf-16le')
+  with open(msrc_in, 'wb') as f:
+    f.write(preprocessed_output)
+
+  msrc_out = flags.output + '_ms_rc'
+  msrc_cmd = ['rc', '/nologo', '/x', '/fo' + msrc_out]
+
+  # Make sure rc-relative resources can be found. rc.exe looks for external
+  # resource files next to the file, but the preprocessed file isn't where the
+  # input was.
+  # Note that rc searches external resource files in the order of
+  # 1. next to the input file
+  # 2. relative to cwd
+  # 3. next to -I directories
+  # Changing the cwd means we'd have to rewrite all -I flags, so just add
+  # the input file dir as -I flag. That technically gets the order of 1 and 2
+  # wrong, but in Chromium's build the cwd is the gn out dir, and generated
+  # files there are in obj/ and gen/, so this difference doesn't matter in
+  # practice.
+  if os.path.dirname(flags.input):
+    msrc_cmd += [ '-I' + os.path.dirname(flags.input) ]
+
+  # Microsoft rc.exe searches for referenced files relative to -I flags in
+  # addition to the pwd, so -I flags need to be passed both to both
+  # the preprocessor and rc.
+  msrc_cmd += flags.includes
+
+  # Input must come last.
+  msrc_cmd += [ msrc_in ]
+
+  rc_exe_exit_code = subprocess.call(msrc_cmd)
+  # Assert Microsoft rc.exe and rc.py produced identical .res files.
+  if rc_exe_exit_code == 0:
+    import filecmp
+    assert filecmp.cmp(msrc_out, flags.output)
+  return rc_exe_exit_code
+
+
+def main():
+  # This driver has to do these things:
+  # 1. Parse flags.
+  # 2. Convert the input from UTF-16LE to UTF-8 if needed.
+  # 3. Pass the input through a preprocessor (and clean up the preprocessor's
+  #    output in minor ways).
+  # 4. Call rc for the heavy lifting.
+  flags = ParseFlags()
+  rc_file_data, is_utf8 = ReadInput(flags.input)
+  preprocessed_output = Preprocess(rc_file_data, flags)
+  rc_exe_exit_code = RunRc(preprocessed_output, is_utf8, flags)
+
+  # 5. On Windows, we also call Microsoft's rc.exe and check that we produced
+  #   the same output.
+  # Since Microsoft's rc has a preprocessor that only accepts 32 characters
+  # for macro names, feed the clang-preprocessed source into it instead
+  # of using ms rc's preprocessor.
+  if sys.platform == 'win32' and rc_exe_exit_code == 0:
+    rc_exe_exit_code = CompareToMsRcOutput(preprocessed_output, is_utf8, flags)
+
+  return rc_exe_exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/toolchain/win/rc/upload_rc_binaries.sh b/src/build/toolchain/win/rc/upload_rc_binaries.sh
new file mode 100755
index 0000000..ec4df4c
--- /dev/null
+++ b/src/build/toolchain/win/rc/upload_rc_binaries.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+set -eu
+
+# Builds new rc binaries at head and uploads them to google storage.
+# The new .sha1 files will be in the tree after this has run.
+
+if [[ "$OSTYPE" != "darwin"* ]]; then
+  echo "this script must run on a mac"
+  exit 1
+fi
+
+DIR="$(cd "$(dirname "${0}" )" && pwd)"
+SRC_DIR="$DIR/../../../.."
+
+# Make sure Linux and Windows sysroots are installed, for distrib.py.
+$SRC_DIR/build/linux/sysroot_scripts/install-sysroot.py --arch amd64
+$SRC_DIR/build/vs_toolchain.py update --force
+
+# Make a temporary directory.
+WORK_DIR=$(mktemp -d)
+if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
+  echo "could not create temp dir"
+  exit 1
+fi
+function cleanup {
+  rm -rf "$WORK_DIR"
+}
+trap cleanup EXIT
+
+# Check out rc and build it in the temporary directory. Copy binaries over.
+pushd "$WORK_DIR" > /dev/null
+git clone -q https://github.com/nico/hack
+cd hack/res
+./distrib.py "$SRC_DIR"
+popd > /dev/null
+cp "$WORK_DIR/hack/res/rc-linux64" "$DIR/linux64/rc"
+cp "$WORK_DIR/hack/res/rc-mac" "$DIR/mac/rc"
+cp "$WORK_DIR/hack/res/rc-win.exe" "$DIR/win/rc.exe"
+
+# Upload binaries to cloud storage.
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/linux64/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/mac/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/win/rc.exe"
diff --git a/src/build/toolchain/win/rc/win/rc.exe.sha1 b/src/build/toolchain/win/rc/win/rc.exe.sha1
new file mode 100644
index 0000000..3fdbfc0
--- /dev/null
+++ b/src/build/toolchain/win/rc/win/rc.exe.sha1
@@ -0,0 +1 @@
+ba51d69039ffb88310b72b6568efa9f0de148f8f
\ No newline at end of file
diff --git a/src/build/toolchain/win/setup_toolchain.py b/src/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000..f66de0a
--- /dev/null
+++ b/src/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,311 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+from __future__ import print_function
+
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+import gn_helpers
+
+SCRIPT_DIR = os.path.dirname(__file__)
+
+def _ExtractImportantEnvironment(output_of_set):
+  """Extracts environment variables required for the toolchain to run from
+  a textual dump output by the cmd.exe 'set' command."""
+  envvars_to_save = (
+      'cipd_cache_dir', # needed by vpython
+      'homedrive', # needed by vpython
+      'homepath', # needed by vpython
+      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+      'include',
+      'lib',
+      'libpath',
+      'luci_context', # needed by vpython
+      'path',
+      'pathext',
+      'systemroot',
+      'temp',
+      'tmp',
+      'userprofile', # needed by vpython
+      'vpython_virtualenv_root' # needed by vpython
+      )
+  env = {}
+  # This occasionally happens and leads to misleading SYSTEMROOT error messages
+  # if not caught here.
+  if output_of_set.count('=') == 0:
+    raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
+  for line in output_of_set.splitlines():
+    for envvar in envvars_to_save:
+      if re.match(envvar + '=', line.lower()):
+        var, setting = line.split('=', 1)
+        if envvar == 'path':
+          # Our own rules and actions in Chromium rely on python being in the
+          # path. Add the path to this python here so that if it's not in the
+          # path when ninja is run later, python will still be found.
+          setting = os.path.dirname(sys.executable) + os.pathsep + setting
+        env[var.upper()] = setting
+        break
+  if sys.platform in ('win32', 'cygwin'):
+    for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+      if required not in env:
+        raise Exception('Environment variable "%s" '
+                        'required to be set to valid path' % required)
+  return env
+
+
+def _DetectVisualStudioPath():
+  """Return path to the installed Visual Studio.
+  """
+
+  # Use the code in build/vs_toolchain.py to avoid duplicating code.
+  chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
+  sys.path.append(os.path.join(chromium_dir, 'build'))
+  import vs_toolchain
+  return vs_toolchain.DetectVisualStudioPath()
+
+
+def _LoadEnvFromBat(args):
+  """Given a bat command, runs it and returns env vars set by it."""
+  args = args[:]
+  args.extend(('&&', 'set'))
+  popen = subprocess.Popen(
+      args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  variables, _ = popen.communicate()
+  if popen.returncode != 0:
+    raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+  return variables.decode(errors='ignore')
+
+
+def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store):
+  """Returns a dictionary with environment variables that must be set while
+  running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe)."""
+  # Check if we are running in the SDK command line environment and use
+  # the setup script from the SDK if so. |cpu| should be either
+  # 'x86' or 'x64' or 'arm' or 'arm64'.
+  assert cpu in ('x86', 'x64', 'arm', 'arm64')
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+    # Load environment from json file.
+    env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu))
+    env = json.load(open(env))['env']
+    if env['VSINSTALLDIR'] == [["..", "..\\"]]:
+      # Old-style paths were relative to the win_sdk\bin directory.
+      json_relative_dir = os.path.join(sdk_dir, 'bin')
+    else:
+      # New-style paths are relative to the toolchain directory.
+      json_relative_dir = toolchain_root
+    for k in env:
+      entries = [os.path.join(*([json_relative_dir] + e)) for e in env[k]]
+      # clang-cl wants INCLUDE to be ;-separated even on non-Windows,
+      # lld-link wants LIB to be ;-separated even on non-Windows.  Path gets :.
+      # The separator for INCLUDE here must match the one used in main() below.
+      sep = os.pathsep if k == 'PATH' else ';'
+      env[k] = sep.join(entries)
+    # PATH is a bit of a special case, it's in addition to the current PATH.
+    env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH']
+    # Augment with the current env to pick up TEMP and friends.
+    for k in os.environ:
+      if k not in env:
+        env[k] = os.environ[k]
+
+    varlines = []
+    for k in sorted(env.keys()):
+      varlines.append('%s=%s' % (str(k), str(env[k])))
+    variables = '\n'.join(varlines)
+
+    # Check that the json file contained the same environment as the .cmd file.
+    if sys.platform in ('win32', 'cygwin'):
+      script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd'))
+      arg = '/' + cpu
+      json_env = _ExtractImportantEnvironment(variables)
+      cmd_env = _ExtractImportantEnvironment(_LoadEnvFromBat([script, arg]))
+      assert _LowercaseDict(json_env) == _LowercaseDict(cmd_env)
+  else:
+    if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ:
+      os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath()
+    # We only support x64-hosted tools.
+    script_path = os.path.normpath(os.path.join(
+                                       os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                       'VC/vcvarsall.bat'))
+    if not os.path.exists(script_path):
+      # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
+      # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
+      # variable. Since vcvarsall.bat appends to the INCLUDE, LIB, and LIBPATH
+      # environment variables we need to clear those to avoid getting double
+      # entries when vcvarsall.bat has been run before gn gen. vcvarsall.bat
+      # also adds to PATH, but there is no clean way of clearing that and it
+      # doesn't seem to cause problems.
+      if 'VSINSTALLDIR' in os.environ:
+        del os.environ['VSINSTALLDIR']
+        del os.environ['INCLUDE']
+        del os.environ['LIB']
+        del os.environ['LIBPATH']
+      other_path = os.path.normpath(os.path.join(
+                                        os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                        'VC/Auxiliary/Build/vcvarsall.bat'))
+      if not os.path.exists(other_path):
+        raise Exception('%s is missing - make sure VC++ tools are installed.' %
+                        script_path)
+      script_path = other_path
+    cpu_arg = "amd64"
+    if (cpu != 'x64'):
+      # x64 is default target CPU thus any other CPU requires a target set
+      cpu_arg += '_' + cpu
+    args = [script_path, cpu_arg, ]
+    # Store target must come before any SDK version declaration
+    if (target_store):
+      args.append('store')
+    # Explicitly specifying the SDK version to build with to avoid accidentally
+    # building with a new and untested SDK. This should stay in sync with the
+    # packaged toolchain in build/vs_toolchain.py.
+    args.append('10.0.19041.0')
+    variables = _LoadEnvFromBat(args)
+  return _ExtractImportantEnvironment(variables)
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+  """Format as an 'environment block' directly suitable for CreateProcess.
+  Briefly this is a list of key=value\0, terminated by an additional \0. See
+  CreateProcess documentation for more details."""
+  block = ''
+  nul = '\0'
+  for key, value in envvar_dict.items():
+    block += key + '=' + value + nul
+  block += nul
+  return block
+
+
+def _LowercaseDict(d):
+  """Returns a copy of `d` with both key and values lowercased.
+
+  Args:
+    d: dict to lowercase (e.g. {'A': 'BcD'}).
+
+  Returns:
+    A dict with both keys and values lowercased (e.g.: {'a': 'bcd'}).
+  """
+  return {k.lower(): d[k].lower() for k in d}
+
+
+def FindFileInEnvList(env, env_name, separator, file_name, optional=False):
+  parts = env[env_name].split(separator)
+  for path in parts:
+    if os.path.exists(os.path.join(path, file_name)):
+      return os.path.realpath(path)
+  assert optional, "%s is not found in %s:\n%s\nCheck if it is installed." % (
+      file_name, env_name, '\n'.join(parts))
+  return ''
+
+
+def main():
+  if len(sys.argv) != 7:
+    print('Usage setup_toolchain.py '
+          '<visual studio path> <win sdk path> '
+          '<runtime dirs> <target_os> <target_cpu> '
+          '<environment block name|none>')
+    sys.exit(2)
+  # toolchain_root and win_sdk_path are only read if the hermetic Windows
+  # toolchain is set, that is if DEPOT_TOOLS_WIN_TOOLCHAIN is not set to 0.
+  # With the hermetic Windows toolchain, the visual studio path in argv[1]
+  # is the root of the Windows toolchain directory.
+  toolchain_root = sys.argv[1]
+  win_sdk_path = sys.argv[2]
+
+  runtime_dirs = sys.argv[3]
+  target_os = sys.argv[4]
+  target_cpu = sys.argv[5]
+  environment_block_name = sys.argv[6]
+  if (environment_block_name == 'none'):
+    environment_block_name = ''
+
+  if (target_os == 'winuwp'):
+    target_store = True
+  else:
+    target_store = False
+
+  cpus = ('x86', 'x64', 'arm', 'arm64')
+  assert target_cpu in cpus
+  vc_bin_dir = ''
+  vc_lib_path = ''
+  vc_lib_atlmfc_path = ''
+  vc_lib_um_path = ''
+  include = ''
+  lib = ''
+
+  # TODO(scottmg|goma): Do we need an equivalent of
+  # ninja_use_custom_environment_files?
+
+  def relflag(s):  # Make s relative to builddir when cwd and sdk on same drive.
+    try:
+      return os.path.relpath(s)
+    except ValueError:
+      return s
+
+  def q(s):  # Quote s if it contains spaces or other weird characters.
+    return s if re.match(r'^[a-zA-Z0-9._/\\:-]*$', s) else '"' + s + '"'
+
+  for cpu in cpus:
+    if cpu == target_cpu:
+      # Extract environment variables for subprocesses.
+      env = _LoadToolchainEnv(cpu, toolchain_root, win_sdk_path, target_store)
+      env['PATH'] = runtime_dirs + os.pathsep + env['PATH']
+
+      vc_bin_dir = FindFileInEnvList(env, 'PATH', os.pathsep, 'cl.exe')
+      vc_lib_path = FindFileInEnvList(env, 'LIB', ';', 'msvcrt.lib')
+      vc_lib_atlmfc_path = FindFileInEnvList(
+          env, 'LIB', ';', 'atls.lib', optional=True)
+      vc_lib_um_path = FindFileInEnvList(env, 'LIB', ';', 'user32.lib')
+
+      # The separator for INCLUDE here must match the one used in
+      # _LoadToolchainEnv() above.
+      include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p]
+      include = list(map(relflag, include))
+
+      lib = [p.replace('"', r'\"') for p in env['LIB'].split(';') if p]
+      lib = list(map(relflag, lib))
+
+      include_I = ' '.join([q('/I' + i) for i in include])
+      include_imsvc = ' '.join([q('-imsvc' + i) for i in include])
+      libpath_flags = ' '.join([q('-libpath:' + i) for i in lib])
+
+      if (environment_block_name != ''):
+        env_block = _FormatAsEnvironmentBlock(env)
+        with open(environment_block_name, 'w') as f:
+          f.write(env_block)
+
+  print('vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir))
+  assert include_I
+  print('include_flags_I = ' + gn_helpers.ToGNString(include_I))
+  assert include_imsvc
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and win_sdk_path:
+    print('include_flags_imsvc = ' +
+          gn_helpers.ToGNString(q('/winsysroot' + relflag(toolchain_root))))
+  else:
+    print('include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc))
+  print('vc_lib_path = ' + gn_helpers.ToGNString(vc_lib_path))
+  # Possible atlmfc library path gets introduced in the future for store thus
+  # output result if a result exists.
+  if (vc_lib_atlmfc_path != ''):
+    print('vc_lib_atlmfc_path = ' + gn_helpers.ToGNString(vc_lib_atlmfc_path))
+  print('vc_lib_um_path = ' + gn_helpers.ToGNString(vc_lib_um_path))
+  print('paths = ' + gn_helpers.ToGNString(env['PATH']))
+  assert libpath_flags
+  print('libpath_flags = ' + gn_helpers.ToGNString(libpath_flags))
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/toolchain/win/tool_wrapper.py b/src/build/toolchain/win/tool_wrapper.py
new file mode 100644
index 0000000..9327369
--- /dev/null
+++ b/src/build/toolchain/win/tool_wrapper.py
@@ -0,0 +1,190 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+This file is copied to the build directory as part of toolchain setup and
+is used to set up calls to tools used by the build that need wrappers.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import sys
+
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
+
+def main(args):
+  exit_code = WinTool().Dispatch(args)
+  if exit_code is not None:
+    sys.exit(exit_code)
+
+
+class WinTool(object):
+  """This class performs all the Windows tooling steps. The methods can either
+  be executed directly, or dispatched from an argument list."""
+
+  def _UseSeparateMspdbsrv(self, env, args):
+    """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+    shared one."""
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    if args[0] != 'link.exe':
+      return
+
+    # Use the output filename passed to the linker to generate an endpoint name
+    # for mspdbsrv.exe.
+    endpoint_name = None
+    for arg in args:
+      m = _LINK_EXE_OUT_ARG.match(arg)
+      if m:
+        endpoint_name = re.sub(r'\W+', '',
+            '%s_%d' % (m.group('out'), os.getpid()))
+        break
+
+    if endpoint_name is None:
+      return
+
+    # Adds the appropriate environment variable. This will be read by link.exe
+    # to know which instance of mspdbsrv.exe it should connect to (if it's
+    # not set then the default endpoint is used).
+    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
+  def Dispatch(self, args):
+    """Dispatches a string command to a method."""
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    method = "Exec%s" % self._CommandifyName(args[0])
+    return getattr(self, method)(*args[1:])
+
+  def _CommandifyName(self, name_string):
+    """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+    return name_string.title().replace('-', '')
+
+  def _GetEnv(self, arch):
+    """Gets the saved environment from a file for a given architecture."""
+    # The environment is saved as an "environment block" (see CreateProcess
+    # and msvs_emulation for details). We convert to a dict here.
+    # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+    pairs = open(arch).read()[:-2].split('\0')
+    kvs = [item.split('=', 1) for item in pairs]
+    return dict(kvs)
+
+  def ExecDeleteFile(self, path):
+    """Simple file delete command."""
+    if os.path.exists(path):
+      os.unlink(path)
+
+  def ExecRecursiveMirror(self, source, dest):
+    """Emulation of rm -rf out && cp -af in out."""
+    if os.path.exists(dest):
+      if os.path.isdir(dest):
+        def _on_error(fn, path, dummy_excinfo):
+          # The operation failed, possibly because the file is set to
+          # read-only. If that's why, make it writable and try the op again.
+          if not os.access(path, os.W_OK):
+            os.chmod(path, stat.S_IWRITE)
+          fn(path)
+        shutil.rmtree(dest, onerror=_on_error)
+      else:
+        if not os.access(dest, os.W_OK):
+          # Attempt to make the file writable before deleting it.
+          os.chmod(dest, stat.S_IWRITE)
+        os.unlink(dest)
+
+    if os.path.isdir(source):
+      shutil.copytree(source, dest)
+    else:
+      shutil.copy2(source, dest)
+      # Try to diagnose crbug.com/741603
+      if not os.path.exists(dest):
+        raise Exception("Copying of %s to %s failed" % (source, dest))
+
+  def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+    """Filter diagnostic output from link that looks like:
+    '   Creating library ui.dll.lib and object ui.dll.exp'
+    This happens when there are exports from the dll or exe.
+    """
+    env = self._GetEnv(arch)
+    if use_separate_mspdbsrv == 'True':
+      self._UseSeparateMspdbsrv(env, args)
+    if sys.platform == 'win32':
+      args = list(args)  # *args is a tuple by default, which is read-only.
+      args[0] = args[0].replace('/', '\\')
+    # https://docs.python.org/2/library/subprocess.html:
+    # "On Unix with shell=True [...] if args is a sequence, the first item
+    # specifies the command string, and any additional items will be treated as
+    # additional arguments to the shell itself.  That is to say, Popen does the
+    # equivalent of:
+    #   Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+    # For that reason, since going through the shell doesn't seem necessary on
+    # non-Windows don't do that there.
+    pe_name = None
+    for arg in args:
+      m = _LINK_EXE_OUT_ARG.match(arg)
+      if m:
+        pe_name = m.group('out')
+    link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    # Read output one line at a time as it shows up to avoid OOM failures when
+    # GBs of output is produced.
+    for line in link.stdout:
+      if (not line.startswith(b'   Creating library ')
+          and not line.startswith(b'Generating code')
+          and not line.startswith(b'Finished generating code')):
+        print(line)
+    return link.wait()
+
+  def ExecAsmWrapper(self, arch, *args):
+    """Filter logo banner from invocations of asm.exe."""
+    env = self._GetEnv(arch)
+    if sys.platform == 'win32':
+      # Windows ARM64 uses clang-cl as assembler which has '/' as path
+      # separator, convert it to '\\' when running on Windows.
+      args = list(args) # *args is a tuple by default, which is read-only
+      args[0] = args[0].replace('/', '\\')
+    popen = subprocess.Popen(args, shell=True, env=env,
+                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    for line in out.decode('utf8').splitlines():
+      if not line.startswith(' Assembling: '):
+        print(line)
+    return popen.returncode
+
+  def ExecRcWrapper(self, arch, *args):
+    """Converts .rc files to .res files."""
+    env = self._GetEnv(arch)
+    args = list(args)
+    rcpy_args = args[:]
+    rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')]
+    rcpy_args.append('/showIncludes')
+    return subprocess.call(rcpy_args, env=env)
+
+  def ExecActionWrapper(self, arch, rspfile, *dirname):
+    """Runs an action command line from a response file using the environment
+    for |arch|. If |dirname| is supplied, use that as the working directory."""
+    env = self._GetEnv(arch)
+    # TODO(scottmg): This is a temporary hack to get some specific variables
+    # through to actions that are set after GN-time. http://crbug.com/333738.
+    for k, v in os.environ.items():
+      if k not in env:
+        env[k] = v
+    args = open(rspfile).read()
+    dirname = dirname[0] if dirname else None
+    return subprocess.call(args, shell=True, env=env, cwd=dirname)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/toolchain/wrapper_utils.py b/src/build/toolchain/wrapper_utils.py
new file mode 100644
index 0000000..5949a37
--- /dev/null
+++ b/src/build/toolchain/wrapper_utils.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for gcc_toolchain.gni wrappers."""
+
+import gzip
+import os
+import re
+import subprocess
+import shlex
+import shutil
+import sys
+import threading
+
+_BAT_PREFIX = 'cmd /c call '
+
+
+def _GzipThenDelete(src_path, dest_path):
+  # Results for Android map file with GCC on a z620:
+  # Uncompressed: 207MB
+  # gzip -9: 16.4MB, takes 8.7 seconds.
+  # gzip -1: 21.8MB, takes 2.0 seconds.
+  # Piping directly from the linker via -print-map (or via -Map with a fifo)
+  # adds a whopping 30-45 seconds!
+  with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out:
+    shutil.copyfileobj(f_in, f_out)
+  os.unlink(src_path)
+
+
+def CommandToRun(command):
+  """Generates commands compatible with Windows.
+
+  When running on a Windows host and using a toolchain whose tools are
+  actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+  executables, the |command| to run has to be prefixed with this magic.
+  The GN toolchain definitions take care of that for when GN/Ninja is
+  running the tool directly.  When that command is passed in to this
+  script, it appears as a unitary string but needs to be split up so that
+  just 'cmd' is the actual command given to Python's subprocess module.
+
+  Args:
+    command: List containing the UNIX style |command|.
+
+  Returns:
+    A list containing the Windows version of the |command|.
+  """
+  if command[0].startswith(_BAT_PREFIX):
+    command = command[0].split(None, 3) + command[1:]
+  return command
+
+
+def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
+  """Runs the given command, adding in -Wl,-Map when |map_file| is given.
+
+  Also takes care of gzipping when |map_file| ends with .gz.
+
+  Args:
+    command: List of arguments comprising the command.
+    env: Environment variables.
+    map_file: Path to output map_file.
+
+  Returns:
+    The exit code of running |command|.
+  """
+  tmp_map_path = None
+  if map_file and map_file.endswith('.gz'):
+    tmp_map_path = map_file + '.tmp'
+    command.append('-Wl,-Map,' + tmp_map_path)
+  elif map_file:
+    command.append('-Wl,-Map,' + map_file)
+
+  result = subprocess.call(command, env=env)
+
+  if tmp_map_path and result == 0:
+    threading.Thread(
+        target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start()
+  elif tmp_map_path and os.path.exists(tmp_map_path):
+    os.unlink(tmp_map_path)
+
+  return result
+
+
+def CaptureCommandStderr(command, env=None):
+  """Returns the stderr of a command.
+
+  Args:
+    command: A list containing the command and arguments.
+    env: Environment variables for the new process.
+  """
+  child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env)
+  _, stderr = child.communicate()
+  return child.returncode, stderr
diff --git a/src/build/tree_truth.sh b/src/build/tree_truth.sh
new file mode 100755
index 0000000..617092d
--- /dev/null
+++ b/src/build/tree_truth.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag.  If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+  if [ $? -eq 0 ] ; then
+    echo $oneline
+  fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+  ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+  ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+  ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+  local tag_sha1=$(tt_sha1_for_tag $1 $2)
+  local head_sha1=$(tt_sha1_for_head $1)
+  local display_name=$(echo $3 | sed 's#/#_#g')
+  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+    return
+  fi
+  if [ "${tag_sha1}" = "" ] ; then
+    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+    echo "NOTE: git tag was not found so we have no baseline."
+    echo "Here are some recent commits, but they may not be new for this build."
+    ( cd $1 && git log -n 10 --stat | cat)
+  else
+    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+  fi
+}
+
+# Clean out the tree truth tags in all repos.  For testing.
+tt_clean_all() {
+ for project in $@; do
+   tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+   local full_path=$CHROME_SRC/../$project
+   tt_list_commits $full_path tree_truth $project
+   tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+  for project in $@; do
+    echo $project:
+    local full_path=$CHROME_SRC/../$project
+    (cd $full_path && git log -n 10 --format="   %H %s   %an, %ad" | cat)
+    echo "================================================================="
+  done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/src/build/update-linux-sandbox.sh b/src/build/update-linux-sandbox.sh
new file mode 100755
index 0000000..d24cf26
--- /dev/null
+++ b/src/build/update-linux-sandbox.sh
@@ -0,0 +1,82 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+  exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+  exit 1
+fi
+
+installsandbox() {
+  echo "(using sudo so you may be asked for your password)"
+  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+    "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+  return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+  echo "If you are building in Release mode"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+  echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}"
+  echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE=<value> ${0}\" to override "
+  echo "after you build the chrome_sandbox target"
+  exit 1
+fi
+
+if readelf -d "${CHROME_SANDBOX_BUILD_PATH}" | \
+  grep "(RPATH)" > /dev/null 2>&1; then
+  echo "Build requires is_component_build=false in ${CHROME_OUT_DIR}/args.gn."
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+  echo "installing it now."
+  installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+  exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+  echo "Your installed setuid sandbox is too old, installing it now."
+  if ! installsandbox; then
+    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+    exit 1
+  fi
+else
+  echo "Your setuid sandbox is up to date"
+  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+    echo -n "Make sure you have \"export "
+    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+    echo "somewhere in your .bashrc"
+    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+  fi
+fi
diff --git a/src/build/util/BUILD.gn b/src/build/util/BUILD.gn
new file mode 100644
index 0000000..2ba66a4
--- /dev/null
+++ b/src/build/util/BUILD.gn
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/lastchange.gni")
+
+action("webkit_version") {
+  script = "version.py"
+
+  template_file = "webkit_version.h.in"
+  inputs = [
+    lastchange_file,
+    template_file,
+  ]
+
+  output_file = "$target_gen_dir/webkit_version.h"
+  outputs = [ output_file ]
+
+  args = [
+    # LASTCHANGE contains "<build hash>-<ref>".  The user agent only wants the
+    # "<build hash>" bit, so chop off everything after it.
+    "-e",
+    "LASTCHANGE=LASTCHANGE[:LASTCHANGE.find('-')]",
+    "-f",
+    rebase_path(lastchange_file, root_build_dir),
+    rebase_path(template_file, root_build_dir),
+    rebase_path(output_file, root_build_dir),
+  ]
+}
+
+action("chrome_version_json") {
+  script = "version.py"
+  _chrome_version_path = "//chrome/VERSION"
+  inputs = [ _chrome_version_path ]
+  _output_file = "$root_gen_dir/CHROME_VERSION.json"
+  outputs = [ _output_file ]
+  args = [
+    "--file",
+    rebase_path(_chrome_version_path, root_build_dir),
+    "--template",
+    "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
+    "--output",
+    rebase_path(_output_file, root_build_dir),
+  ]
+}
diff --git a/src/build/util/LASTCHANGE.dummy b/src/build/util/LASTCHANGE.dummy
new file mode 100644
index 0000000..21bb3c3
--- /dev/null
+++ b/src/build/util/LASTCHANGE.dummy
@@ -0,0 +1 @@
+LASTCHANGE=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
diff --git a/src/build/util/android_chrome_version.py b/src/build/util/android_chrome_version.py
new file mode 100644
index 0000000..c06bb38
--- /dev/null
+++ b/src/build/util/android_chrome_version.py
@@ -0,0 +1,211 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Different build variants of Chrome for Android have different version codes.
+
+For targets that have the same package name (e.g. Chrome, Chrome Modern,
+Monochrome, Trichrome), Play Store considers them the same app and will push the
+supported app with the highest version code to devices. Note that Play Store
+does not support hosting two different apps with same version code and package
+name.
+
+Each version code generated by this script will be used by one or more APKs.
+
+Webview channels must have unique version codes for a couple reasons:
+a) Play Store does not support having the same version code for different
+   versions of a package. Without unique codes, promoting a beta apk to stable
+   would require first removing the beta version.
+b) Firebase project support (used by official builders) requires unique
+   [version code + package name].
+   We cannot add new webview package names for new channels because webview
+   packages are allowlisted by Android as webview providers.
+
+WEBVIEW_STABLE, WEBVIEW_BETA, WEBVIEW_DEV are all used for standalone webview,
+whereas the others are used for various chrome APKs.
+
+Note that a package digit of '3' for Webview is reserved for Trichrome Webview.
+The same versionCode is used for both Trichrome Chrome and Trichrome Webview.
+
+Version code values are constructed like this:
+
+  {full BUILD number}{3 digits: PATCH}{1 digit: package}{1 digit: ABIs}.
+
+For example:
+
+  Build 3721, patch 0, ChromeModern (1), on ARM64 (5): 372100015
+  Build 3721, patch 9, Monochrome (2), on ARM (0): 372100920
+
+"""
+
+# Package name version bits.
+_PACKAGE_NAMES = {
+    'CHROME': 0,
+    'CHROME_MODERN': 10,
+    'MONOCHROME': 20,
+    'TRICHROME': 30,
+    'WEBVIEW_STABLE': 0,
+    'WEBVIEW_BETA': 10,
+    'WEBVIEW_DEV': 20,
+}
+
+""" "Next" builds get +5 on their package version code digit.
+
+We choose 5 because it won't conflict with values in _PACKAGE_NAMES.
+"""
+_NEXT_BUILD_VERSION_CODE_DIFF = 50
+
+"""List of version numbers to be created for each build configuration.
+Tuple format:
+
+  (version code name), (package name), (supported ABIs)
+
+Here, (supported ABIs) is referring to the combination of browser ABI and
+webview library ABI present in a particular APK. For example, 64_32 implies a
+64-bit browser with an extra 32-bit Webview library. See also
+_ABIS_TO_BIT_MASK.
+"""
+_APKS = {
+    '32': [
+        ('CHROME', 'CHROME', '32'),
+        ('CHROME_MODERN', 'CHROME_MODERN', '32'),
+        ('MONOCHROME', 'MONOCHROME', '32'),
+        ('TRICHROME', 'TRICHROME', '32'),
+        ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32'),
+        ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32'),
+        ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32'),
+    ],
+    '64': [
+        ('CHROME', 'CHROME', '64'),
+        ('CHROME_MODERN', 'CHROME_MODERN', '64'),
+        ('MONOCHROME', 'MONOCHROME', '32_64'),
+        ('MONOCHROME_32', 'MONOCHROME', '32'),
+        ('MONOCHROME_32_64', 'MONOCHROME', '32_64'),
+        ('MONOCHROME_64_32', 'MONOCHROME', '64_32'),
+        ('MONOCHROME_64', 'MONOCHROME', '64'),
+        ('TRICHROME', 'TRICHROME', '32_64'),
+        ('TRICHROME_32', 'TRICHROME', '32'),
+        ('TRICHROME_32_64', 'TRICHROME', '32_64'),
+        ('TRICHROME_64_32', 'TRICHROME', '64_32'),
+        ('TRICHROME_64', 'TRICHROME', '64'),
+        ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32_64'),
+        ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32_64'),
+        ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32_64'),
+        ('WEBVIEW_32_STABLE', 'WEBVIEW_STABLE', '32'),
+        ('WEBVIEW_32_BETA', 'WEBVIEW_BETA', '32'),
+        ('WEBVIEW_32_DEV', 'WEBVIEW_DEV', '32'),
+    ]
+}
+
+# Splits input build config architecture to manufacturer and bitness.
+_ARCH_TO_MFG_AND_BITNESS = {
+    'arm': ('arm', '32'),
+    'arm64': ('arm', '64'),
+    'x86': ('intel', '32'),
+    'x64': ('intel', '64'),
+    'mipsel': ('mipsel', '32'),
+}
+
+# Expose the available choices to other scripts.
+ARCH_CHOICES = _ARCH_TO_MFG_AND_BITNESS.keys()
+"""
+The architecture preference is encoded into the version_code for devices
+that support multiple architectures. (exploiting play store logic that pushes
+apk with highest version code)
+
+Detail:
+Many Android devices support multiple architectures, and can run applications
+built for any of them; the Play Store considers all of the supported
+architectures compatible and does not, itself, have any preference for which
+is "better". The common cases here:
+
+- All production arm64 devices can also run arm
+- All production x64 devices can also run x86
+- Pretty much all production x86/x64 devices can also run arm (via a binary
+  translator)
+
+Since the Play Store has no particular preferences, you have to encode your own
+preferences into the ordering of the version codes. There's a few relevant
+things here:
+
+- For any android app, it's theoretically preferable to ship a 64-bit version to
+  64-bit devices if it exists, because the 64-bit architectures are supposed to
+  be "better" than their 32-bit predecessors (unfortunately this is not always
+  true due to the effect on memory usage, but we currently deal with this by
+  simply not shipping a 64-bit version *at all* on the configurations where we
+  want the 32-bit version to be used).
+- For any android app, it's definitely preferable to ship an x86 version to x86
+  devices if it exists instead of an arm version, because running things through
+  the binary translator is a performance hit.
+- For WebView, Monochrome, and Trichrome specifically, they are a special class
+  of APK called "multiarch" which means that they actually need to *use* more
+  than one architecture at runtime (rather than simply being compatible with
+  more than one). The 64-bit builds of these multiarch APKs contain both 32-bit
+  and 64-bit code, so that Webview is available for both ABIs. If you're
+  multiarch you *must* have a version that supports both 32-bit and 64-bit
+  version on a 64-bit device, otherwise it won't work properly. So, the 64-bit
+  version needs to be a higher versionCode, as otherwise a 64-bit device would
+  prefer the 32-bit version that does not include any 64-bit code, and fail.
+- The relative order of mips isn't important, but it needs to be a *distinct*
+  value to the other architectures because all builds need unique version codes.
+"""
+_ABIS_TO_BIT_MASK = {
+    'arm': {
+        '32': 0,
+        '32_64': 3,
+        '64_32': 4,
+        '64': 5,
+    },
+    'intel': {
+        '32': 1,
+        '32_64': 6,
+        '64_32': 7,
+        '64': 8,
+    },
+    'mipsel': {
+        '32': 2,
+    }
+}
+
+def GenerateVersionCodes(version_values, arch, is_next_build):
+  """Build dict of version codes for the specified build architecture. Eg:
+
+  {
+    'CHROME_VERSION_CODE': '378100010',
+    'MONOCHROME_VERSION_CODE': '378100013',
+    ...
+  }
+
+  versionCode values are built like this:
+  {full BUILD int}{3 digits: PATCH}{1 digit: package}{1 digit: ABIs}.
+
+  MAJOR and MINOR values are not used for generating versionCode.
+  - MINOR is always 0. It was used for something long ago in Chrome's history
+    but has not been used since, and has never been nonzero on Android.
+  - MAJOR is cosmetic and controlled by the release managers. MAJOR and BUILD
+    always have reasonable sort ordering: for two version codes A and B, it's
+    always the case that (A.MAJOR < B.MAJOR) implies (A.BUILD < B.BUILD), and
+    that (A.MAJOR > B.MAJOR) implies (A.BUILD > B.BUILD). This property is just
+    maintained by the humans who set MAJOR.
+
+  Thus, this method is responsible for the final two digits of versionCode.
+  """
+
+  base_version_code = int(
+      '%s%03d00' % (version_values['BUILD'], int(version_values['PATCH'])))
+
+  if is_next_build:
+    base_version_code += _NEXT_BUILD_VERSION_CODE_DIFF
+
+  mfg, bitness = _ARCH_TO_MFG_AND_BITNESS[arch]
+
+  version_codes = {}
+
+  for apk, package, abis in _APKS[bitness]:
+    abi_bits = _ABIS_TO_BIT_MASK[mfg][abis]
+    package_bits = _PACKAGE_NAMES[package]
+
+    version_code_name = apk + '_VERSION_CODE'
+    version_code_val = base_version_code + abi_bits + package_bits
+    version_codes[version_code_name] = str(version_code_val)
+
+  return version_codes
diff --git a/src/build/util/android_chrome_version_test.py b/src/build/util/android_chrome_version_test.py
new file mode 100644
index 0000000..eed7748
--- /dev/null
+++ b/src/build/util/android_chrome_version_test.py
@@ -0,0 +1,308 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from android_chrome_version import GenerateVersionCodes
+
+
+class _VersionTest(unittest.TestCase):
+  """Unittests for the android_chrome_version module.
+  """
+
+  EXAMPLE_VERSION_VALUES = {
+      'MAJOR': '74',
+      'MINOR': '0',
+      'BUILD': '3720',
+      'PATCH': '0',
+  }
+
+  def testGenerateVersionCodesAndroidChrome(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(chrome_version_code, '372000000')
+
+  def testGenerateVersionCodesAndroidChromeModern(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE']
+
+    self.assertEqual(chrome_modern_version_code, '372000010')
+
+  def testGenerateVersionCodesAndroidMonochrome(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+
+    self.assertEqual(monochrome_version_code, '372000020')
+
+  def testGenerateVersionCodesAndroidTrichrome(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    trichrome_version_code = output['TRICHROME_VERSION_CODE']
+
+    self.assertEqual(trichrome_version_code, '372000030')
+
+  def testGenerateVersionCodesAndroidWebviewStable(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+
+    self.assertEqual(webview_stable_version_code, '372000000')
+
+  def testGenerateVersionCodesAndroidWebviewBeta(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+    self.assertEqual(webview_beta_version_code, '372000010')
+
+  def testGenerateVersionCodesAndroidWebviewDev(self):
+    """Assert it gives correct values for standard/example inputs"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE']
+
+    self.assertEqual(webview_dev_version_code, '372000020')
+
+  def testGenerateVersionCodesAndroidNextBuild(self):
+    """Assert it handles "next" builds correctly"""
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=True)
+
+    # Get just a sample of values
+    chrome_version_code = output['CHROME_VERSION_CODE']
+    monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+    webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+    webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+    self.assertEqual(chrome_version_code, '372000050')
+    self.assertEqual(monochrome_version_code, '372000070')
+    self.assertEqual(webview_stable_version_code, '372000050')
+    self.assertEqual(webview_beta_version_code, '372000060')
+
+  def testGenerateVersionCodesAndroidArchArm(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docs in android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+    arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(arch_chrome_version_code, '372000000')
+
+  def testGenerateVersionCodesAndroidArchX86(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False)
+    arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(arch_chrome_version_code, '372000001')
+
+  def testGenerateVersionCodesAndroidArchMips(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='mipsel', is_next_build=False)
+    arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(arch_chrome_version_code, '372000002')
+
+  def testGenerateVersionCodesAndroidArchArm64(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+    arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(arch_chrome_version_code, '372000005')
+
+  def testGenerateVersionCodesAndroidArchArm64Variants(self):
+    """Assert it handles 64-bit-specific additional version codes correctly.
+
+    Some additional version codes are generated for 64-bit architectures.
+    See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+    arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+    arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE']
+    arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE']
+    arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE']
+    arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE']
+    arch_trichrome_version_code = output['TRICHROME_VERSION_CODE']
+    arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE']
+    arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE']
+    arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
+    arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+
+    self.assertEqual(arch_monochrome_32_version_code, '372000020')
+    self.assertEqual(arch_monochrome_32_64_version_code, '372000023')
+    self.assertEqual(arch_monochrome_version_code, '372000023')
+    self.assertEqual(arch_monochrome_64_32_version_code, '372000024')
+    self.assertEqual(arch_monochrome_64_version_code, '372000025')
+    self.assertEqual(arch_trichrome_32_version_code, '372000030')
+    self.assertEqual(arch_trichrome_32_64_version_code, '372000033')
+    self.assertEqual(arch_trichrome_version_code, '372000033')
+    self.assertEqual(arch_trichrome_64_32_version_code, '372000034')
+    self.assertEqual(arch_trichrome_64_version_code, '372000035')
+
+  def testGenerateVersionCodesAndroidArchX64(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+    arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+    self.assertEqual(arch_chrome_version_code, '372000008')
+
+  def testGenerateVersionCodesAndroidArchX64Variants(self):
+    """Assert it handles 64-bit-specific additional version codes correctly.
+
+    Some additional version codes are generated for 64-bit architectures.
+    See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+    arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE']
+    arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE']
+    arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+    arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE']
+    arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE']
+    arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE']
+    arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE']
+    arch_trichrome_version_code = output['TRICHROME_VERSION_CODE']
+    arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
+    arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+
+    self.assertEqual(arch_monochrome_32_version_code, '372000021')
+    self.assertEqual(arch_monochrome_32_64_version_code, '372000026')
+    self.assertEqual(arch_monochrome_version_code, '372000026')
+    self.assertEqual(arch_monochrome_64_32_version_code, '372000027')
+    self.assertEqual(arch_monochrome_64_version_code, '372000028')
+    self.assertEqual(arch_trichrome_32_version_code, '372000031')
+    self.assertEqual(arch_trichrome_32_64_version_code, '372000036')
+    self.assertEqual(arch_trichrome_version_code, '372000036')
+    self.assertEqual(arch_trichrome_64_32_version_code, '372000037')
+    self.assertEqual(arch_trichrome_64_version_code, '372000038')
+
+  def testGenerateVersionCodesAndroidArchOrderArm(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+
+    Test arm-related values.
+    """
+    arm_output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+    arm64_output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+
+    arm_chrome_version_code = arm_output['CHROME_VERSION_CODE']
+    arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE']
+
+    self.assertLess(arm_chrome_version_code, arm64_chrome_version_code)
+
+  def testGenerateVersionCodesAndroidArchOrderX86(self):
+    """Assert it handles different architectures correctly.
+
+    Version codes for different builds need to be distinct and maintain a
+    certain ordering.
+    See docstring on android_chrome_version._ABIS_TO_BIT_MASK for
+    reasoning.
+
+    Test x86-related values.
+    """
+    x86_output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False)
+    x64_output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+
+    x86_chrome_version_code = x86_output['CHROME_VERSION_CODE']
+    x64_chrome_version_code = x64_output['CHROME_VERSION_CODE']
+
+    self.assertLess(x86_chrome_version_code, x64_chrome_version_code)
+
+  def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self):
+    """Assert webview beta channel is higher than stable.
+
+    The channel-specific version codes for standalone webview needs to follow
+    the order stable < beta < dev.
+
+    This allows that if a user opts into beta track, they will always have the
+    beta apk, including any finch experiments targeted at beta users, even when
+    beta and stable channels are otherwise on the same version.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+    webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+    self.assertGreater(webview_beta_version_code, webview_stable_version_code)
+
+  def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self):
+    """Assert webview dev channel is higher than beta.
+
+    The channel-specific version codes for standalone webview needs to follow
+    the order stable < beta < dev.
+
+    This allows that if a user opts into dev track, they will always have the
+    dev apk, including any finch experiments targeted at dev users, even when
+    dev and beta channels are otherwise on the same version.
+    """
+    output = GenerateVersionCodes(
+        self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+    webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+    webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE']
+
+    self.assertGreater(webview_dev_version_code, webview_beta_version_code)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/util/branding.gni b/src/build/util/branding.gni
new file mode 100644
index 0000000..aa758e6
--- /dev/null
+++ b/src/build/util/branding.gni
@@ -0,0 +1,45 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome branding as GN variables for use in build files.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action to generate a file at
+# build-time with the information you need. This allows better dependency
+# checking and GN will run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+import("//build/config/chrome_build.gni")
+
+_branding_dictionary_template =
+    "full_name = \"@PRODUCT_FULLNAME@\" " +
+    "short_name = \"@PRODUCT_SHORTNAME@\" " +
+    "bundle_id = \"@MAC_BUNDLE_ID@\" " +
+    "creator_code = \"@MAC_CREATOR_CODE@\" " +
+    "installer_full_name = \"@PRODUCT_INSTALLER_FULLNAME@\" " +
+    "installer_short_name = \"@PRODUCT_INSTALLER_SHORTNAME@\" " +
+    "team_id = \"@MAC_TEAM_ID@\" "
+
+_result = exec_script("version.py",
+                      [
+                        "-f",
+                        rebase_path(branding_file_path, root_build_dir),
+                        "-t",
+                        _branding_dictionary_template,
+                      ],
+                      "scope",
+                      [ branding_file_path ])
+
+chrome_product_full_name = _result.full_name
+chrome_product_short_name = _result.short_name
+chrome_product_installer_full_name = _result.installer_full_name
+chrome_product_installer_short_name = _result.installer_short_name
+
+if (is_mac) {
+  chrome_mac_bundle_id = _result.bundle_id
+  chrome_mac_creator_code = _result.creator_code
+  chrome_mac_team_id = _result.team_id
+}
diff --git a/src/build/util/generate_wrapper.gni b/src/build/util/generate_wrapper.gni
new file mode 100644
index 0000000..02e8bca
--- /dev/null
+++ b/src/build/util/generate_wrapper.gni
@@ -0,0 +1,105 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Wraps a target and any of its arguments to an executable script.
+#
+# Many executable targets have build-time-constant arguments. This
+# template allows those to be wrapped into a single, user- or bot-friendly
+# script at build time.
+#
+# Paths to be wrapped should be relative to root_build_dir and should be
+# wrapped in "@WrappedPath(...)"; see Example below.
+#
+# Variables:
+#   generator_script: Path to the script to use to perform the wrapping.
+#     Defaults to //build/util/generate_wrapper.py. Generally should only
+#     be set by other templates.
+#   wrapper_script: Output path.
+#   executable: Path to the executable to wrap. Can be a script or a
+#     build product. Paths can be relative to the containing gn file
+#     or source-absolute.
+#   executable_args: List of arguments to write into the wrapper.
+#   use_vpython3: If true, invoke the generated wrapper with vpython3 instead
+#     of vpython.
+#
+# Example wrapping a checked-in script:
+#   generate_wrapper("sample_wrapper") {
+#     executable = "//for/bar/sample.py"
+#     wrapper_script = "$root_build_dir/bin/run_sample"
+#
+#     _sample_argument_path = "//sample/$target_cpu/lib/sample_lib.so"
+#     _rebased_sample_argument_path = rebase_path(
+#         _sample_argument_path,
+#         root_build_dir)
+#     executable_args = [
+#       "--sample-lib", "@WrappedPath(${_rebased_sample_argument_path})",
+#     ]
+#   }
+#
+# Example wrapping a build product:
+#   generate_wrapper("sample_wrapper") {
+#     executable = "$root_build_dir/sample_build_product"
+#     wrapper_script = "$root_build_dir/bin/run_sample_build_product"
+#   }
+template("generate_wrapper") {
+  _generator_script = "//build/util/generate_wrapper.py"
+  if (defined(invoker.generator_script)) {
+    _generator_script = invoker.generator_script
+  }
+  _executable_to_wrap = invoker.executable
+  _wrapper_script = invoker.wrapper_script
+  if (is_win) {
+    _wrapper_script += ".bat"
+  }
+  if (defined(invoker.executable_args)) {
+    _wrapped_arguments = invoker.executable_args
+  } else {
+    _wrapped_arguments = []
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           TESTONLY_AND_VISIBILITY + [
+                                 "data",
+                                 "data_deps",
+                                 "deps",
+                                 "sources",
+                               ])
+    script = _generator_script
+    if (!defined(data)) {
+      data = []
+    }
+    data += [ _wrapper_script ]
+    outputs = [ _wrapper_script ]
+
+    _rebased_executable_to_wrap =
+        rebase_path(_executable_to_wrap, root_build_dir)
+    _rebased_wrapper_script = rebase_path(_wrapper_script, root_build_dir)
+    if (is_win) {
+      _script_language = "batch"
+    } else {
+      _script_language = "bash"
+    }
+    args = [
+      "--executable",
+      "@WrappedPath(${_rebased_executable_to_wrap})",
+      "--wrapper-script",
+      _rebased_wrapper_script,
+      "--output-directory",
+      rebase_path(root_build_dir, root_build_dir),
+      "--script-language",
+      _script_language,
+    ]
+
+    if (defined(invoker.use_vpython3) && invoker.use_vpython3) {
+      args += [ "--use-vpython3" ]
+    }
+    args += [ "--" ]
+    args += _wrapped_arguments
+
+    if (defined(invoker.write_runtime_deps)) {
+      write_runtime_deps = invoker.write_runtime_deps
+    }
+  }
+}
diff --git a/src/build/util/generate_wrapper.py b/src/build/util/generate_wrapper.py
new file mode 100755
index 0000000..07167e8
--- /dev/null
+++ b/src/build/util/generate_wrapper.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env vpython
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps an executable and any provided arguments into an executable script."""
+
+import argparse
+import os
+import sys
+import textwrap
+
+
+# The bash template passes the python script into vpython via stdin.
+# The interpreter doesn't know about the script, so we have bash
+# inject the script location.
+BASH_TEMPLATE = textwrap.dedent("""\
+    #!/usr/bin/env {vpython}
+    _SCRIPT_LOCATION = __file__
+    {script}
+    """)
+
+
+# The batch template reruns the batch script with vpython, with the -x
+# flag instructing the interpreter to ignore the first line. The interpreter
+# knows about the (batch) script in this case, so it can get the file location
+# directly.
+BATCH_TEMPLATE = textwrap.dedent("""\
+    @SETLOCAL ENABLEDELAYEDEXPANSION \
+      & {vpython}.bat -x "%~f0" %* \
+      & EXIT /B !ERRORLEVEL!
+    _SCRIPT_LOCATION = __file__
+    {script}
+    """)
+
+
+SCRIPT_TEMPLATES = {
+    'bash': BASH_TEMPLATE,
+    'batch': BATCH_TEMPLATE,
+}
+
+
+PY_TEMPLATE = textwrap.dedent("""\
+    import os
+    import re
+    import subprocess
+    import sys
+
+    _WRAPPED_PATH_RE = re.compile(r'@WrappedPath\(([^)]+)\)')
+    _PATH_TO_OUTPUT_DIR = '{path_to_output_dir}'
+    _SCRIPT_DIR = os.path.dirname(os.path.realpath(_SCRIPT_LOCATION))
+
+
+    def ExpandWrappedPath(arg):
+      m = _WRAPPED_PATH_RE.match(arg)
+      if m:
+        relpath = os.path.join(
+            os.path.relpath(_SCRIPT_DIR), _PATH_TO_OUTPUT_DIR, m.group(1))
+        npath = os.path.normpath(relpath)
+        if os.path.sep not in npath:
+          # If the original path points to something in the current directory,
+          # returning the normalized version of it can be a problem.
+          # normpath() strips off the './' part of the path
+          # ('./foo' becomes 'foo'), which can be a problem if the result
+          # is passed to something like os.execvp(); in that case
+          # osexecvp() will search $PATH for the executable, rather than
+          # just execing the arg directly, and if '.' isn't in $PATH, this
+          # results in an error.
+          #
+          # So, we need to explicitly return './foo' (or '.\\foo' on windows)
+          # instead of 'foo'.
+          #
+          # Hopefully there are no cases where this causes a problem; if
+          # there are, we will either need to change the interface to
+          # WrappedPath() somehow to distinguish between the two, or
+          # somehow ensure that the wrapped executable doesn't hit cases
+          # like this.
+          return '.' + os.path.sep + npath
+        return npath
+      return arg
+
+
+    def ExpandWrappedPaths(args):
+      for i, arg in enumerate(args):
+        args[i] = ExpandWrappedPath(arg)
+      return args
+
+
+    def FindIsolatedOutdir(raw_args):
+      outdir = None
+      i = 0
+      remaining_args = []
+      while i < len(raw_args):
+        if raw_args[i] == '--isolated-outdir' and i < len(raw_args)-1:
+          outdir = raw_args[i+1]
+          i += 2
+        elif raw_args[i].startswith('--isolated-outdir='):
+          outdir = raw_args[i][len('--isolated-outdir='):]
+          i += 1
+        else:
+          remaining_args.append(raw_args[i])
+          i += 1
+      if not outdir and 'ISOLATED_OUTDIR' in os.environ:
+        outdir = os.environ['ISOLATED_OUTDIR']
+      return outdir, remaining_args
+
+
+    def FilterIsolatedOutdirBasedArgs(outdir, args):
+      rargs = []
+      i = 0
+      while i < len(args):
+        if 'ISOLATED_OUTDIR' in args[i]:
+          if outdir:
+            # Rewrite the arg.
+            rargs.append(args[i].replace('${{ISOLATED_OUTDIR}}',
+                                         outdir).replace(
+              '$ISOLATED_OUTDIR', outdir))
+            i += 1
+          else:
+            # Simply drop the arg.
+            i += 1
+        elif (not outdir and
+              args[i].startswith('-') and
+              '=' not in args[i] and
+              i < len(args) - 1 and
+              'ISOLATED_OUTDIR' in args[i+1]):
+          # Parsing this case is ambiguous; if we're given
+          # `--foo $ISOLATED_OUTDIR` we can't tell if $ISOLATED_OUTDIR
+          # is meant to be the value of foo, or if foo takes no argument
+          # and $ISOLATED_OUTDIR is the first positional arg.
+          #
+          # We assume the former will be much more common, and so we
+          # need to drop --foo and $ISOLATED_OUTDIR.
+          i += 2
+        else:
+          rargs.append(args[i])
+          i += 1
+      return rargs
+
+
+    def main(raw_args):
+      executable_path = ExpandWrappedPath('{executable_path}')
+      outdir, remaining_args = FindIsolatedOutdir(raw_args)
+      args = {executable_args}
+      args = FilterIsolatedOutdirBasedArgs(outdir, args)
+      executable_args = ExpandWrappedPaths(args)
+      cmd = [executable_path] + args + remaining_args
+      if executable_path.endswith('.py'):
+        cmd = [sys.executable] + cmd
+      return subprocess.call(cmd)
+
+
+    if __name__ == '__main__':
+      sys.exit(main(sys.argv[1:]))
+    """)
+
+
+def Wrap(args):
+  """Writes a wrapped script according to the provided arguments.
+
+  Arguments:
+    args: an argparse.Namespace object containing command-line arguments
+      as parsed by a parser returned by CreateArgumentParser.
+  """
+  path_to_output_dir = os.path.relpath(
+      args.output_directory,
+      os.path.dirname(args.wrapper_script))
+
+  with open(args.wrapper_script, 'w') as wrapper_script:
+    py_contents = PY_TEMPLATE.format(
+        path_to_output_dir=path_to_output_dir,
+        executable_path=str(args.executable),
+        executable_args=str(args.executable_args))
+    template = SCRIPT_TEMPLATES[args.script_language]
+    wrapper_script.write(
+        template.format(script=py_contents, vpython=args.vpython))
+  os.chmod(args.wrapper_script, 0o750)
+
+  return 0
+
+
+def CreateArgumentParser():
+  """Creates an argparse.ArgumentParser instance."""
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--executable',
+      help='Executable to wrap.')
+  parser.add_argument(
+      '--wrapper-script',
+      help='Path to which the wrapper script will be written.')
+  parser.add_argument(
+      '--output-directory',
+      help='Path to the output directory.')
+  parser.add_argument(
+      '--script-language',
+      choices=SCRIPT_TEMPLATES.keys(),
+      help='Language in which the wrapper script will be written.')
+  parser.add_argument('--use-vpython3',
+                      dest='vpython',
+                      action='store_const',
+                      const='vpython3',
+                      default='vpython',
+                      help='Use vpython3 instead of vpython')
+  parser.add_argument(
+      'executable_args', nargs='*',
+      help='Arguments to wrap into the executable.')
+  return parser
+
+
+def main(raw_args):
+  parser = CreateArgumentParser()
+  args = parser.parse_args(raw_args)
+  return Wrap(args)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/util/is_python2.py b/src/build/util/is_python2.py
new file mode 100644
index 0000000..83a407e
--- /dev/null
+++ b/src/build/util/is_python2.py
@@ -0,0 +1,11 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script for checking if we're running Python 2 or 3."""
+
+from __future__ import print_function
+
+import subprocess
+import sys
+
+print("true" if sys.version_info.major == 2 else "false")
diff --git a/src/build/util/java_action.gni b/src/build/util/java_action.gni
new file mode 100644
index 0000000..0615b38
--- /dev/null
+++ b/src/build/util/java_action.gni
@@ -0,0 +1,99 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+jarrunner = "//build/util/java_action.py"
+
+# Declare a target that runs a java command a single time.
+#
+# This target type allows you to run a java command a single time to produce
+# one or more output files. If you want to run a java command for each of a
+# set of input files, see "java_action_foreach".
+#
+# See "gn help action" for more information on how to use this target. This
+# template is based on the "action" and supports the same variables.
+template("java_action") {
+  assert(defined(invoker.script),
+         "Need script in $target_name listing the .jar file to run.")
+  assert(defined(invoker.outputs),
+         "Need outputs in $target_name listing the generated outputs.")
+
+  jarscript = invoker.script
+  action(target_name) {
+    script = jarrunner
+
+    inputs = [ jarscript ]
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    args = [
+      "-jar",
+      rebase_path(jarscript, root_build_dir),
+    ]
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "console",
+                             "data",
+                             "data_deps",
+                             "depfile",
+                             "deps",
+                             "outputs",
+                             "sources",
+                             "testonly",
+                             "visibility",
+                           ])
+  }
+}
+
+# Declare a target that runs a java command over a set of files.
+#
+# This target type allows you to run a java command once-per-file over a set of
+# sources. If you want to run a java command once that takes many files as
+# input, see "java_action".
+#
+# See "gn help action_foreach" for more information on how to use this target.
+# This template is based on the "action_foreach" supports the same variables.
+template("java_action_foreach") {
+  assert(defined(invoker.script),
+         "Need script in $target_name listing the .jar file to run.")
+  assert(defined(invoker.outputs),
+         "Need outputs in $target_name listing the generated outputs.")
+  assert(defined(invoker.sources),
+         "Need sources in $target_name listing the target inputs.")
+
+  jarscript = invoker.script
+  action_foreach(target_name) {
+    script = jarrunner
+
+    inputs = [ jarscript ]
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    args = [
+      "-jar",
+      rebase_path(jarscript, root_build_dir),
+    ]
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "console",
+                             "data",
+                             "data_deps",
+                             "depfile",
+                             "deps",
+                             "outputs",
+                             "sources",
+                             "testonly",
+                             "visibility",
+                           ])
+  }
+}
diff --git a/src/build/util/java_action.py b/src/build/util/java_action.py
new file mode 100755
index 0000000..ed9bb60
--- /dev/null
+++ b/src/build/util/java_action.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper script to run java command as action with gn."""
+
+import os
+import subprocess
+import sys
+
+EXIT_SUCCESS = 0
+EXIT_FAILURE = 1
+
+
+def IsExecutable(path):
+  """Returns whether file at |path| exists and is executable.
+
+  Args:
+    path: absolute or relative path to test.
+
+  Returns:
+    True if the file at |path| exists, False otherwise.
+  """
+  return os.path.isfile(path) and os.access(path, os.X_OK)
+
+
+def FindCommand(command):
+  """Looks up for |command| in PATH.
+
+  Args:
+    command: name of the command to lookup, if command is a relative or
+      absolute path (i.e. contains some path separator) then only that
+      path will be tested.
+
+  Returns:
+    Full path to command or None if the command was not found.
+
+    On Windows, this respects the PATHEXT environment variable when the
+    command name does not have an extension.
+  """
+  fpath, _ = os.path.split(command)
+  if fpath:
+    if IsExecutable(command):
+      return command
+
+  if sys.platform == 'win32':
+    # On Windows, if the command does not have an extension, cmd.exe will
+    # try all extensions from PATHEXT when resolving the full path.
+    command, ext = os.path.splitext(command)
+    if not ext:
+      exts = os.environ['PATHEXT'].split(os.path.pathsep)
+    else:
+      exts = [ext]
+  else:
+    exts = ['']
+
+  for path in os.environ['PATH'].split(os.path.pathsep):
+    for ext in exts:
+      path = os.path.join(path, command) + ext
+      if IsExecutable(path):
+        return path
+
+  return None
+
+
+def main():
+  java_path = FindCommand('java')
+  if not java_path:
+    sys.stderr.write('java: command not found\n')
+    sys.exit(EXIT_FAILURE)
+
+  args = sys.argv[1:]
+  if len(args) < 2 or args[0] != '-jar':
+    sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0])
+    sys.exit(EXIT_FAILURE)
+
+  return subprocess.check_call([java_path] + args)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/util/lastchange.gni b/src/build/util/lastchange.gni
new file mode 100644
index 0000000..a132959
--- /dev/null
+++ b/src/build/util/lastchange.gni
@@ -0,0 +1,16 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is used to inject fixed dummy commit for commit independent
+# reproducible binaries.
+
+declare_args() {
+  use_dummy_lastchange = false
+}
+
+if (use_dummy_lastchange) {
+  lastchange_file = "//build/util/LASTCHANGE.dummy"
+} else {
+  lastchange_file = "//build/util/LASTCHANGE"
+}
diff --git a/src/build/util/lastchange.py b/src/build/util/lastchange.py
new file mode 100755
index 0000000..874870a
--- /dev/null
+++ b/src/build/util/lastchange.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+from __future__ import print_function
+
+import argparse
+import collections
+import logging
+import os
+import subprocess
+import sys
+
+VersionInfo = collections.namedtuple("VersionInfo",
+                                     ("revision_id", "revision", "timestamp"))
+
+class GitError(Exception):
+  pass
+
+# This function exists for compatibility with logic outside this
+# repository that uses this file as a library.
+# TODO(eliribble) remove this function after it has been ported into
+# the repositories that depend on it
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    return proc
+  except OSError as e:
+    logging.error('Command %r failed: %s' % (' '.join(command), e))
+    return None
+
+
+def _RunGitCommand(directory, command):
+  """Launches git subcommand.
+
+  Returns:
+    The stripped stdout of the git command.
+  Raises:
+    GitError on failure, including a nonzero return code.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    logging.info("Executing '%s' in %s", ' '.join(command), directory)
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    stdout, stderr = tuple(x.decode(encoding='utf_8')
+                           for x in proc.communicate())
+    stdout = stdout.strip()
+    logging.debug("returncode: %d", proc.returncode)
+    logging.debug("stdout: %s", stdout)
+    logging.debug("stderr: %s", stderr)
+    if proc.returncode != 0 or not stdout:
+      raise GitError((
+          "Git command '{}' in {} failed: "
+          "rc={}, stdout='{}' stderr='{}'").format(
+          " ".join(command), directory, proc.returncode, stdout, stderr))
+    return stdout
+  except OSError as e:
+    raise GitError("Git command 'git {}' in {} failed: {}".format(
+        " ".join(command), directory, e))
+
+
+def GetMergeBase(directory, ref):
+  """
+  Return the merge-base of HEAD and ref.
+
+  Args:
+    directory: The directory containing the .git directory.
+    ref: The ref to use to find the merge base.
+  Returns:
+    The git commit SHA of the merge-base as a string.
+  """
+  logging.debug("Calculating merge base between HEAD and %s in %s",
+                ref, directory)
+  command = ['merge-base', 'HEAD', ref]
+  return _RunGitCommand(directory, command)
+
+
+def FetchGitRevision(directory, commit_filter, start_commit="HEAD"):
+  """
+  Fetch the Git hash (and Cr-Commit-Position if any) for a given directory.
+
+  Args:
+    directory: The directory containing the .git directory.
+    commit_filter: A filter to supply to grep to filter commits
+    start_commit: A commit identifier. The result of this function
+      will be limited to only consider commits before the provided
+      commit.
+  Returns:
+    A VersionInfo object. On error all values will be 0.
+  """
+  hash_ = ''
+
+  git_args = ['log', '-1', '--format=%H %ct']
+  if commit_filter is not None:
+    git_args.append('--grep=' + commit_filter)
+
+  git_args.append(start_commit)
+
+  output = _RunGitCommand(directory, git_args)
+  hash_, commit_timestamp = output.split()
+  if not hash_:
+    return VersionInfo('0', '0', 0)
+
+  revision = hash_
+  output = _RunGitCommand(directory, ['cat-file', 'commit', hash_])
+  for line in reversed(output.splitlines()):
+    if line.startswith('Cr-Commit-Position:'):
+      pos = line.rsplit()[-1].strip()
+      logging.debug("Found Cr-Commit-Position '%s'", pos)
+      revision = "{}-{}".format(hash_, pos)
+      break
+  return VersionInfo(hash_, revision, int(commit_timestamp))
+
+
+def GetHeaderGuard(path):
+  """
+  Returns the header #define guard for the given file path.
+  This treats everything after the last instance of "src/" as being a
+  relevant part of the guard. If there is no "src/", then the entire path
+  is used.
+  """
+  src_index = path.rfind('src/')
+  if src_index != -1:
+    guard = path[src_index + 4:]
+  else:
+    guard = path
+  guard = guard.upper()
+  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+
+def GetHeaderContents(path, define, version):
+  """
+  Returns what the contents of the header file should be that indicate the given
+  revision.
+  """
+  header_guard = GetHeaderGuard(path)
+
+  header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif  // %(header_guard)s
+"""
+  header_contents = header_contents % { 'header_guard': header_guard,
+                                        'define': define,
+                                        'version': version }
+  return header_contents
+
+
+def GetGitTopDirectory(source_dir):
+  """Get the top git directory - the directory that contains the .git directory.
+
+  Args:
+    source_dir: The directory to search.
+  Returns:
+    The output of "git rev-parse --show-toplevel" as a string
+  """
+  return _RunGitCommand(source_dir, ['rev-parse', '--show-toplevel'])
+
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  Returns if new data was written.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return False
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+  return True
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = argparse.ArgumentParser(usage="lastchange.py [options]")
+  parser.add_argument("-m", "--version-macro",
+                    help=("Name of C #define when using --header. Defaults to "
+                          "LAST_CHANGE."))
+  parser.add_argument("-o", "--output", metavar="FILE",
+                    help=("Write last change to FILE. "
+                          "Can be combined with --header to write both files."))
+  parser.add_argument("--header", metavar="FILE",
+                    help=("Write last change to FILE as a C/C++ header. "
+                          "Can be combined with --output to write both files."))
+  parser.add_argument("--merge-base-ref",
+                    default=None,
+                    help=("Only consider changes since the merge "
+                          "base between HEAD and the provided ref"))
+  parser.add_argument("--revision-id-only", action='store_true',
+                    help=("Output the revision as a VCS revision ID only (in "
+                          "Git, a 40-character commit hash, excluding the "
+                          "Cr-Commit-Position)."))
+  parser.add_argument("--print-only", action="store_true",
+                    help=("Just print the revision string. Overrides any "
+                          "file-output-related options."))
+  parser.add_argument("-s", "--source-dir", metavar="DIR",
+                    help="Use repository in the given directory.")
+  parser.add_argument("--filter", metavar="REGEX",
+                    help=("Only use log entries where the commit message "
+                          "matches the supplied filter regex. Defaults to "
+                          "'^Change-Id:' to suppress local commits."),
+                    default='^Change-Id:')
+
+  args, extras = parser.parse_known_args(argv[1:])
+
+  logging.basicConfig(level=logging.WARNING)
+
+  out_file = args.output
+  header = args.header
+  commit_filter=args.filter
+
+  while len(extras) and out_file is None:
+    if out_file is None:
+      out_file = extras.pop(0)
+  if extras:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % extras)
+    parser.print_help()
+    sys.exit(2)
+
+  source_dir = args.source_dir or os.path.dirname(os.path.abspath(__file__))
+  try:
+    git_top_dir = GetGitTopDirectory(source_dir)
+  except GitError as e:
+    logging.error("Failed to get git top directory from '%s': %s",
+                  source_dir, e)
+    return 2
+
+  if args.merge_base_ref:
+    try:
+      merge_base_sha = GetMergeBase(git_top_dir, args.merge_base_ref)
+    except GitError as e:
+      logging.error("You requested a --merge-base-ref value of '%s' but no "
+                    "merge base could be found between it and HEAD. Git "
+                    "reports: %s", args.merge_base_ref, e)
+      return 3
+  else:
+    merge_base_sha = 'HEAD'
+
+  try:
+    version_info = FetchGitRevision(git_top_dir, commit_filter, merge_base_sha)
+  except GitError as e:
+    logging.error("Failed to get version info: %s", e)
+    logging.info(("Falling back to a version of 0.0.0 to allow script to "
+        "finish. This is normal if you are bootstrapping a new environment "
+        "or do not have a git repository for any other reason. If not, this "
+        "could represent a serious error."))
+    version_info = VersionInfo('0', '0', 0)
+
+  revision_string = version_info.revision
+  if args.revision_id_only:
+    revision_string = version_info.revision_id
+
+  if args.print_only:
+    print(revision_string)
+  else:
+    contents = "LASTCHANGE=%s\n" % revision_string
+    if not out_file and not args.header:
+      sys.stdout.write(contents)
+    else:
+      if out_file:
+        committime_file = out_file + '.committime'
+        out_changed = WriteIfChanged(out_file, contents)
+        if out_changed or not os.path.exists(committime_file):
+          with open(committime_file, 'w') as timefile:
+            timefile.write(str(version_info.timestamp))
+      if header:
+        WriteIfChanged(header,
+                       GetHeaderContents(header, args.version_macro,
+                                         revision_string))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/util/lib/common/__init__.py b/src/build/util/lib/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/build/util/lib/common/__init__.py
diff --git a/src/build/util/lib/common/chrome_test_server_spawner.py b/src/build/util/lib/common/chrome_test_server_spawner.py
new file mode 100644
index 0000000..9810215
--- /dev/null
+++ b/src/build/util/lib/common/chrome_test_server_spawner.py
@@ -0,0 +1,473 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+# pylint: disable=W0702
+
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import sys
+import threading
+import time
+
+from six.moves import BaseHTTPServer, urllib
+
+
+SERVER_TYPES = {
+    'http': '',
+    'ftp': '-f',
+    'ws': '--websocket',
+}
+
+
+_DIR_SOURCE_ROOT = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+                 os.pardir))
+
+
+_logger = logging.getLogger(__name__)
+
+
+# Path that are needed to import necessary modules when launching a testserver.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s'
+    % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'),
+       os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
+       os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver')))
+
+
+# The timeout (in seconds) of starting up the Python test server.
+_TEST_SERVER_STARTUP_TIMEOUT = 10
+
+
+def _GetServerTypeCommandLine(server_type):
+  """Returns the command-line by the given server type.
+
+  Args:
+    server_type: the server type to be used (e.g. 'http').
+
+  Returns:
+    A string containing the command-line argument.
+  """
+  if server_type not in SERVER_TYPES:
+    raise NotImplementedError('Unknown server type: %s' % server_type)
+  return SERVER_TYPES[server_type]
+
+
+class PortForwarder:
+  def Map(self, port_pairs):
+    pass
+
+  def GetDevicePortForHostPort(self, host_port):
+    """Returns the device port that corresponds to a given host port."""
+    return host_port
+
+  def WaitHostPortAvailable(self, port):
+    """Returns True if |port| is available."""
+    return True
+
+  def WaitPortNotAvailable(self, port):
+    """Returns True if |port| is not available."""
+    return True
+
+  def WaitDevicePortReady(self, port):
+    """Returns whether the provided port is used."""
+    return True
+
+  def Unmap(self, device_port):
+    """Unmaps specified port"""
+    pass
+
+
+class TestServerThread(threading.Thread):
+  """A thread to run the test server in a separate process."""
+
+  def __init__(self, ready_event, arguments, port_forwarder):
+    """Initialize TestServerThread with the following argument.
+
+    Args:
+      ready_event: event which will be set when the test server is ready.
+      arguments: dictionary of arguments to run the test server.
+      device: An instance of DeviceUtils.
+      tool: instance of runtime error detection tool.
+    """
+    threading.Thread.__init__(self)
+    self.wait_event = threading.Event()
+    self.stop_event = threading.Event()
+    self.ready_event = ready_event
+    self.ready_event.clear()
+    self.arguments = arguments
+    self.port_forwarder = port_forwarder
+    self.test_server_process = None
+    self.is_ready = False
+    self.host_port = self.arguments['port']
+    self.host_ocsp_port = 0
+    assert isinstance(self.host_port, int)
+    # The forwarder device port now is dynamically allocated.
+    self.forwarder_device_port = 0
+    self.forwarder_ocsp_device_port = 0
+    # Anonymous pipe in order to get port info from test server.
+    self.pipe_in = None
+    self.pipe_out = None
+    self.process = None
+    self.command_line = []
+
+  def _WaitToStartAndGetPortFromTestServer(self):
+    """Waits for the Python test server to start and gets the port it is using.
+
+    The port information is passed by the Python test server with a pipe given
+    by self.pipe_out. It is written as a result to |self.host_port|.
+
+    Returns:
+      Whether the port used by the test server was successfully fetched.
+    """
+    assert self.host_port == 0 and self.pipe_out and self.pipe_in
+    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+                                   _TEST_SERVER_STARTUP_TIMEOUT)
+    if len(in_fds) == 0:
+      _logger.error('Failed to wait to the Python test server to be started.')
+      return False
+    # First read the data length as an unsigned 4-byte value.  This
+    # is _not_ using network byte ordering since the Python test server packs
+    # size as native byte order and all Chromium platforms so far are
+    # configured to use little-endian.
+    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+    # use a unified byte order (either big-endian or little-endian).
+    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+    if data_length:
+      (data_length,) = struct.unpack('=L', data_length)
+      assert data_length
+    if not data_length:
+      _logger.error('Failed to get length of server data.')
+      return False
+    server_data_json = os.read(self.pipe_in, data_length)
+    if not server_data_json:
+      _logger.error('Failed to get server data.')
+      return False
+    _logger.info('Got port json data: %s', server_data_json)
+
+    parsed_server_data = None
+    try:
+      parsed_server_data = json.loads(server_data_json)
+    except ValueError:
+      pass
+
+    if not isinstance(parsed_server_data, dict):
+      _logger.error('Failed to parse server_data: %s' % server_data_json)
+      return False
+
+    if not isinstance(parsed_server_data.get('port'), int):
+      _logger.error('Failed to get port information from the server data.')
+      return False
+
+    self.host_port = parsed_server_data['port']
+    self.host_ocsp_port = parsed_server_data.get('ocsp_port', 0)
+
+    return self.port_forwarder.WaitPortNotAvailable(self.host_port)
+
+  def _GenerateCommandLineArguments(self):
+    """Generates the command line to run the test server.
+
+    Note that all options are processed by following the definitions in
+    testserver.py.
+    """
+    if self.command_line:
+      return
+
+    args_copy = dict(self.arguments)
+
+    # Translate the server type.
+    type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
+    if type_cmd:
+      self.command_line.append(type_cmd)
+
+    # Use a pipe to get the port given by the instance of Python test server
+    # if the test does not specify the port.
+    assert self.host_port == args_copy['port']
+    if self.host_port == 0:
+      (self.pipe_in, self.pipe_out) = os.pipe()
+      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+
+    # Pass the remaining arguments as-is.
+    for key, values in args_copy.iteritems():
+      if not isinstance(values, list):
+        values = [values]
+      for value in values:
+        if value is None:
+          self.command_line.append('--%s' % key)
+        else:
+          self.command_line.append('--%s=%s' % (key, value))
+
+  def _CloseUnnecessaryFDsForTestServerProcess(self):
+    # This is required to avoid subtle deadlocks that could be caused by the
+    # test server child process inheriting undesirable file descriptors such as
+    # file lock file descriptors.
+    for fd in xrange(0, 1024):
+      if fd != self.pipe_out:
+        try:
+          os.close(fd)
+        except:
+          pass
+
+  def run(self):
+    _logger.info('Start running the thread!')
+    self.wait_event.clear()
+    self._GenerateCommandLineArguments()
+    command = [sys.executable,
+               os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver',
+                            'testserver.py')] + self.command_line
+    _logger.info('Running: %s', command)
+
+    # Disable PYTHONUNBUFFERED because it has a bad interaction with the
+    # testserver. Remove once this interaction is fixed.
+    unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
+
+    # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative
+    # paths in the arguments are resolved correctly.
+    self.process = subprocess.Popen(
+        command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
+        cwd=_DIR_SOURCE_ROOT)
+    if unbuf:
+      os.environ['PYTHONUNBUFFERED'] = unbuf
+    if self.process:
+      if self.pipe_out:
+        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+      else:
+        self.is_ready = self.port_forwarder.WaitPortNotAvailable(self.host_port)
+
+    if self.is_ready:
+      port_map = [(0, self.host_port)]
+      if self.host_ocsp_port:
+        port_map.extend([(0, self.host_ocsp_port)])
+      self.port_forwarder.Map(port_map)
+
+      self.forwarder_device_port = \
+          self.port_forwarder.GetDevicePortForHostPort(self.host_port)
+      if self.host_ocsp_port:
+        self.forwarder_ocsp_device_port = \
+            self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port)
+
+      # Check whether the forwarder is ready on the device.
+      self.is_ready = self.forwarder_device_port and \
+          self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port)
+
+    # Wake up the request handler thread.
+    self.ready_event.set()
+    # Keep thread running until Stop() gets called.
+    self.stop_event.wait()
+    if self.process.poll() is None:
+      self.process.kill()
+      # Wait for process to actually terminate.
+      # (crbug.com/946475)
+      self.process.wait()
+
+    self.port_forwarder.Unmap(self.forwarder_device_port)
+    self.process = None
+    self.is_ready = False
+    if self.pipe_out:
+      os.close(self.pipe_in)
+      os.close(self.pipe_out)
+      self.pipe_in = None
+      self.pipe_out = None
+    _logger.info('Test-server has died.')
+    self.wait_event.set()
+
+  def Stop(self):
+    """Blocks until the loop has finished.
+
+    Note that this must be called in another thread.
+    """
+    if not self.process:
+      return
+    self.stop_event.set()
+    self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET/POST request."""
+
+  def _SendResponse(self, response_code, response_reason, additional_headers,
+                    contents):
+    """Generates a response sent to the client from the provided parameters.
+
+    Args:
+      response_code: number of the response status.
+      response_reason: string of reason description of the response.
+      additional_headers: dict of additional headers. Each key is the name of
+                          the header, each value is the content of the header.
+      contents: string of the contents we want to send to client.
+    """
+    self.send_response(response_code, response_reason)
+    self.send_header('Content-Type', 'text/html')
+    # Specify the content-length as without it the http(s) response will not
+    # be completed properly (and the browser keeps expecting data).
+    self.send_header('Content-Length', len(contents))
+    for header_name in additional_headers:
+      self.send_header(header_name, additional_headers[header_name])
+    self.end_headers()
+    self.wfile.write(contents)
+    self.wfile.flush()
+
+  def _StartTestServer(self):
+    """Starts the test server thread."""
+    _logger.info('Handling request to spawn a test server.')
+    content_type = self.headers.getheader('content-type')
+    if content_type != 'application/json':
+      raise Exception('Bad content-type for start request.')
+    content_length = self.headers.getheader('content-length')
+    if not content_length:
+      content_length = 0
+    try:
+      content_length = int(content_length)
+    except:
+      raise Exception('Bad content-length for start request.')
+    _logger.info(content_length)
+    test_server_argument_json = self.rfile.read(content_length)
+    _logger.info(test_server_argument_json)
+
+    if len(self.server.test_servers) >= self.server.max_instances:
+      self._SendResponse(400, 'Invalid request', {},
+                         'Too many test servers running')
+      return
+
+    ready_event = threading.Event()
+    new_server = TestServerThread(ready_event,
+                                  json.loads(test_server_argument_json),
+                                  self.server.port_forwarder)
+    new_server.setDaemon(True)
+    new_server.start()
+    ready_event.wait()
+    if new_server.is_ready:
+      response = {'port': new_server.forwarder_device_port,
+                  'message': 'started'};
+      if new_server.forwarder_ocsp_device_port:
+        response['ocsp_port'] = new_server.forwarder_ocsp_device_port
+      self._SendResponse(200, 'OK', {}, json.dumps(response))
+      _logger.info('Test server is running on port %d forwarded to %d.' %
+              (new_server.forwarder_device_port, new_server.host_port))
+      port = new_server.forwarder_device_port
+      assert port not in self.server.test_servers
+      self.server.test_servers[port] = new_server
+    else:
+      new_server.Stop()
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      _logger.info('Encounter problem during starting a test server.')
+
+  def _KillTestServer(self, params):
+    """Stops the test server instance."""
+    try:
+      port = int(params['port'][0])
+    except ValueError:
+      port = None
+    if port == None or port <= 0:
+      self._SendResponse(400, 'Invalid request.', {}, 'port must be specified')
+      return
+
+    if port not in self.server.test_servers:
+      self._SendResponse(400, 'Invalid request.', {},
+                         "testserver isn't running on port %d" % port)
+      return
+
+    server = self.server.test_servers.pop(port)
+
+    _logger.info('Handling request to kill a test server on port: %d.', port)
+    server.Stop()
+
+    # Make sure the status of test server is correct before sending response.
+    if self.server.port_forwarder.WaitHostPortAvailable(port):
+      self._SendResponse(200, 'OK', {}, 'killed')
+      _logger.info('Test server on port %d is killed', port)
+    else:
+      # We expect the port to be free, but nothing stops the system from
+      # binding something else to that port, so don't throw error.
+      # (crbug.com/946475)
+      self._SendResponse(200, 'OK', {}, '')
+      _logger.warn('Port %s is not free after killing test server.' % port)
+
+  def log_message(self, format, *args):
+    # Suppress the default HTTP logging behavior if the logging level is higher
+    # than INFO.
+    if _logger.getEffectiveLevel() <= logging.INFO:
+      pass
+
+  def do_POST(self):
+    parsed_path = urllib.parse.urlparse(self.path)
+    action = parsed_path.path
+    _logger.info('Action for POST method is: %s.', action)
+    if action == '/start':
+      self._StartTestServer()
+    else:
+      self._SendResponse(400, 'Unknown request.', {}, '')
+      _logger.info('Encounter unknown request: %s.', action)
+
+  def do_GET(self):
+    parsed_path = urllib.parse.urlparse(self.path)
+    action = parsed_path.path
+    params = urllib.parse.parse_qs(parsed_path.query, keep_blank_values=1)
+    _logger.info('Action for GET method is: %s.', action)
+    for param in params:
+      _logger.info('%s=%s', param, params[param][0])
+    if action == '/kill':
+      self._KillTestServer(params)
+    elif action == '/ping':
+      # The ping handler is used to check whether the spawner server is ready
+      # to serve the requests. We don't need to test the status of the test
+      # server when handling ping request.
+      self._SendResponse(200, 'OK', {}, 'ready')
+      _logger.info('Handled ping request and sent response.')
+    else:
+      self._SendResponse(400, 'Unknown request', {}, '')
+      _logger.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server."""
+
+  def __init__(self, test_server_spawner_port, port_forwarder, max_instances):
+    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+                                            SpawningServerRequestHandler)
+    self.server_port = self.server.server_port
+    _logger.info('Started test server spawner on port: %d.', self.server_port)
+
+    self.server.port_forwarder = port_forwarder
+    self.server.test_servers = {}
+    self.server.max_instances = max_instances
+
+  def _Listen(self):
+    _logger.info('Starting test server spawner.')
+    self.server.serve_forever()
+
+  def Start(self):
+    """Starts the test server spawner."""
+    listener_thread = threading.Thread(target=self._Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+
+  def Stop(self):
+    """Stops the test server spawner.
+
+    Also cleans the server state.
+    """
+    self.CleanupState()
+    self.server.shutdown()
+
+  def CleanupState(self):
+    """Cleans up the spawning server state.
+
+    This should be called if the test server spawner is reused,
+    to avoid sharing the test server instance.
+    """
+    if self.server.test_servers:
+      _logger.warning('Not all test servers were stopped.')
+      for port in self.server.test_servers:
+        _logger.warning('Stopping test server on port %d' % port)
+        self.server.test_servers[port].Stop()
+      self.server.test_servers = {}
diff --git a/src/build/util/lib/common/perf_result_data_type.py b/src/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 0000000..67b550a
--- /dev/null
+++ b/src/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+             INFORMATIONAL]
+
+
+def IsValidType(datatype):
+  return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/src/build/util/lib/common/perf_tests_results_helper.py b/src/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 0000000..153886d
--- /dev/null
+++ b/src/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,202 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+                perf_result_data_type.DEFAULT: '*RESULT ',
+                perf_result_data_type.INFORMATIONAL: '',
+                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+  """Returns a simple list without sub-lists."""
+  ret = []
+  for entry in values:
+    if isinstance(entry, list):
+      ret.extend(FlattenList(entry))
+    else:
+      ret.append(entry)
+  return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  # Handle empty histograms gracefully.
+  if not 'buckets' in histogram:
+    return 0.0, 0.0
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+  # Special case for floats so we don't print using scientific notation.
+  if isinstance(v, float):
+    return '%f' % v
+  else:
+    return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ', '.join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPages(page_list):
+  """Prints list of pages to stdout in the format required by perf tests."""
+  print('Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list]))
+
+
+def PrintPerfResult(measurement, trace, values, units,
+                    result_type=perf_result_data_type.DEFAULT,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+  This is parsed by the buildbot using:
+  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+        On the dashboard, this maps to a particular graph. Mandatory.
+    trace: A description of the particular data point, e.g. "reference".
+        On the dashboard, this maps to a particular "line" in the graph.
+        Mandatory.
+    values: A list of numeric measured values. An N-dimensional list will be
+        flattened and treated as a simple list.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert perf_result_data_type.IsValidType(result_type), \
+         'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if (result_type == perf_result_data_type.UNIMPORTANT or
+      result_type == perf_result_data_type.DEFAULT or
+      result_type == perf_result_data_type.INFORMATIONAL):
+    assert isinstance(values, list)
+    assert '/' not in measurement
+    flattened_values = FlattenList(values)
+    assert len(flattened_values)
+    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert perf_result_data_type.IsHistogram(result_type)
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    output = '%s%s: %s= %s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        value,
+        units)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print(output)
+    sys.stdout.flush()
+  return output
+
+
+def ReportPerfResult(chart_data, graph_title, trace_title, value, units,
+                     improvement_direction='down', important=True):
+  """Outputs test results in correct format.
+
+  If chart_data is None, it outputs data in old format. If chart_data is a
+  dictionary, formats in chartjson format. If any other format defaults to
+  old format.
+
+  Args:
+    chart_data: A dictionary corresponding to perf results in the chartjson
+        format.
+    graph_title: A string containing the name of the chart to add the result
+        to.
+    trace_title: A string containing the name of the trace within the chart
+        to add the result to.
+    value: The value of the result being reported.
+    units: The units of the value being reported.
+    improvement_direction: A string denoting whether higher or lower is
+        better for the result. Either 'up' or 'down'.
+    important: A boolean denoting whether the result is important or not.
+  """
+  if chart_data and isinstance(chart_data, dict):
+    chart_data['charts'].setdefault(graph_title, {})
+    chart_data['charts'][graph_title][trace_title] = {
+        'type': 'scalar',
+        'value': value,
+        'units': units,
+        'improvement_direction': improvement_direction,
+        'important': important
+    }
+  else:
+    PrintPerfResult(graph_title, trace_title, [value], units)
diff --git a/src/build/util/lib/common/unittest_util.py b/src/build/util/lib/common/unittest_util.py
new file mode 100644
index 0000000..9683ab7
--- /dev/null
+++ b/src/build/util/lib/common/unittest_util.py
@@ -0,0 +1,155 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import re
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+  """A test result class that can print formatted text results to a stream.
+
+  Results printed in conformance with gtest output format, like:
+  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+  [         OK ] autofill.AutofillTest.testAutofillInvalid
+  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
+  [         OK ] autofill.AutofillTest.testFillProfile
+  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  """
+  def __init__(self, stream, descriptions, verbosity):
+    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    self._fails = set()
+
+  def _GetTestURI(self, test):
+    return '%s.%s.%s' % (test.__class__.__module__,
+                         test.__class__.__name__,
+                         test._testMethodName)
+
+  def getDescription(self, test):
+    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+  def startTest(self, test):
+    unittest.TestResult.startTest(self, test)
+    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+
+  def addSuccess(self, test):
+    unittest.TestResult.addSuccess(self, test)
+    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+
+  def addError(self, test, err):
+    unittest.TestResult.addError(self, test, err)
+    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def addFailure(self, test, err):
+    unittest.TestResult.addFailure(self, test, err)
+    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def getRetestFilter(self):
+    return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+  """Test Runner for displaying test results in textual format.
+
+  Results are displayed in conformance with google test output.
+  """
+
+  def __init__(self, verbosity=1):
+    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+                                     verbosity=verbosity)
+
+  def _makeResult(self):
+    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+  """Returns all the tests from a given test suite."""
+  tests = []
+  for x in suite:
+    if isinstance(x, unittest.TestSuite):
+      tests += GetTestsFromSuite(x)
+    else:
+      tests += [x]
+  return tests
+
+
+def GetTestNamesFromSuite(suite):
+  """Returns a list of every test name in the given suite."""
+  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+  """Gets the test name of the given unittest test."""
+  return '.'.join([test.__class__.__module__,
+                   test.__class__.__name__,
+                   test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+  """Returns a new filtered tests suite based on the given gtest filter.
+
+  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+  for gtest_filter specification.
+  """
+  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+  """Filter a list of tests based on the given gtest filter.
+
+  Args:
+    all_tests: List of tests (unittest.TestSuite)
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of tests.
+  """
+  test_names = [GetTestName(test) for test in all_tests]
+  filtered_names = FilterTestNames(test_names, gtest_filter)
+  return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+  """Filter a list of test names based on the given gtest filter.
+
+  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+  for gtest_filter specification.
+
+  Args:
+    all_tests: List of test names.
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of test names.
+  """
+  pattern_groups = gtest_filter.split('-')
+  positive_patterns = ['*']
+  if pattern_groups[0]:
+    positive_patterns = pattern_groups[0].split(':')
+  negative_patterns = []
+  if len(pattern_groups) > 1:
+    negative_patterns = pattern_groups[1].split(':')
+
+  neg_pats = None
+  if negative_patterns:
+    neg_pats = re.compile('|'.join(fnmatch.translate(p) for p in
+                                   negative_patterns))
+
+  tests = []
+  test_set = set()
+  for pattern in positive_patterns:
+    pattern_tests = [
+        test for test in all_tests
+        if (fnmatch.fnmatch(test, pattern)
+            and not (neg_pats and neg_pats.match(test))
+            and test not in test_set)]
+    tests.extend(pattern_tests)
+    test_set.update(pattern_tests)
+  return tests
diff --git a/src/build/util/lib/common/unittest_util_test.py b/src/build/util/lib/common/unittest_util_test.py
new file mode 100755
index 0000000..1514c9b
--- /dev/null
+++ b/src/build/util/lib/common/unittest_util_test.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import logging
+import sys
+import unittest
+import unittest_util
+
+
+class FilterTestNamesTest(unittest.TestCase):
+
+  possible_list = ["Foo.One",
+                   "Foo.Two",
+                   "Foo.Three",
+                   "Bar.One",
+                   "Bar.Two",
+                   "Bar.Three",
+                   "Quux.One",
+                   "Quux.Two",
+                   "Quux.Three"]
+
+  def testMatchAll(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "*")
+    self.assertEquals(x, self.possible_list)
+
+  def testMatchPartial(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Foo.*")
+    self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"])
+
+  def testMatchFull(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two")
+    self.assertEquals(x, ["Foo.Two"])
+
+  def testMatchTwo(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Bar.Three",
+                          "Foo.One",
+                          "Foo.Two",
+                          "Foo.Three"])
+
+  def testMatchWithNegative(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Foo.One",
+                          "Foo.Two"])
+
+  def testMatchOverlapping(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Bar.Three",
+                          "Foo.Two",
+                          "Quux.Two"])
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/src/build/util/lib/common/util.py b/src/build/util/lib/common/util.py
new file mode 100644
index 0000000..a415b1f
--- /dev/null
+++ b/src/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+  """Return a string to be used in paths for the platform."""
+  if IsWindows():
+    return 'win'
+  if IsMac():
+    return 'mac'
+  if IsLinux():
+    return 'linux'
+  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+  return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+  """Deletes a directory recursively, which must exist."""
+  # Don't use shutil.rmtree because it can't delete read-only files on Win.
+  for root, dirs, files in os.walk(path, topdown=False):
+    for name in files:
+      filename = os.path.join(root, name)
+      os.chmod(filename, stat.S_IWRITE)
+      os.remove(filename)
+    for name in dirs:
+      os.rmdir(os.path.join(root, name))
+  os.rmdir(path)
+
+
+def Delete(path):
+  """Deletes the given file or directory (recursively), which must exist."""
+  if os.path.isdir(path):
+    _DeleteDir(path)
+  else:
+    os.remove(path)
+
+
+def MaybeDelete(path):
+  """Deletes the given file or directory (recurisvely), if it exists."""
+  if os.path.exists(path):
+    Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+  """Creates a temporary directory and returns an absolute path to it.
+
+  The temporary directory is automatically deleted when the python interpreter
+  exits normally.
+
+  Args:
+    parent_dir: the directory to create the temp dir in. If None, the system
+                temp dir is used.
+
+  Returns:
+    The absolute path to the temporary directory.
+  """
+  path = tempfile.mkdtemp(dir=parent_dir)
+  atexit.register(MaybeDelete, path)
+  return path
+
+
+def Unzip(zip_path, output_dir):
+  """Unzips the given zip file using a system installed unzip tool.
+
+  Args:
+    zip_path: zip file to unzip.
+    output_dir: directory to unzip the contents of the zip file. The directory
+                must exist.
+
+  Raises:
+    RuntimeError if the unzip operation fails.
+  """
+  if IsWindows():
+    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+  else:
+    unzip_cmd = ['unzip', '-o']
+  unzip_cmd += [zip_path]
+  if RunCommand(unzip_cmd, output_dir) != 0:
+    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+  """Terminate the given pid."""
+  if IsWindows():
+    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+  else:
+    os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+  """Runs the given command and returns the exit code.
+
+  Args:
+    cmd: list of command arguments.
+    cwd: working directory to execute the command, or None if the current
+         working directory should be used.
+
+  Returns:
+    The exit code of the command.
+  """
+  process = subprocess.Popen(cmd, cwd=cwd)
+  process.wait()
+  return process.returncode
+
+
+def DoesUrlExist(url):
+  """Determines whether a resource exists at the given URL.
+
+  Args:
+    url: URL to be verified.
+
+  Returns:
+    True if url exists, otherwise False.
+  """
+  parsed = urlparse.urlparse(url)
+  try:
+    conn = httplib.HTTPConnection(parsed.netloc)
+    conn.request('HEAD', parsed.path)
+    response = conn.getresponse()
+  except (socket.gaierror, socket.error):
+    return False
+  finally:
+    conn.close()
+  # Follow both permanent (301) and temporary (302) redirects.
+  if response.status == 302 or response.status == 301:
+    return DoesUrlExist(response.getheader('location'))
+  return response.status == 200
diff --git a/src/build/util/process_version.gni b/src/build/util/process_version.gni
new file mode 100644
index 0000000..e1ccb95
--- /dev/null
+++ b/src/build/util/process_version.gni
@@ -0,0 +1,122 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the version processing script over the given template file to produce
+# an output file. This is used for generating various forms of files that
+# incorporate the product name and version.
+#
+# Unlike GYP, this will actually compile the resulting file, so you don't need
+# to add it separately to the sources, just depend on the target.
+#
+# In GYP this is a rule that runs once per ".ver" file. In GN this just
+# processes one file per invocation of the template so you may have to have
+# multiple targets.
+#
+# Parameters:
+#   sources (optional):
+#     List of file names to read. When converting a GYP target, this should
+#     list the 'source' (see above) as well as any extra_variable_files.
+#     The files will be passed to version.py in the order specified here.
+#
+#   output:
+#     File name of file to write. In GYP this is unspecified and it will
+#     make up a file name for you based on the input name, and tack on
+#     "_version.rc" to the end. But in GN you need to specify the full name.
+#
+#   template_file (optional):
+#     Template file to use (not a list). Most Windows users that want to use
+#     this to process a .rc template should use process_version_rc_template(),
+#     defined in //chrome/process_version_rc_template.gni, instead.
+#
+#   extra_args (optional):
+#     Extra arguments to pass to version.py. Any "-f <filename>" args should
+#     use sources instead.
+#
+#   process_only (optional, defaults to false)
+#     Set to generate only one action that processes the version file and
+#     doesn't attempt to link the result into a source set. This is for if
+#     you are processing the version as data only.
+#
+#   visibility (optional)
+#
+# Example:
+#   process_version("myversion") {
+#     sources = [
+#       "//chrome/VERSION"
+#       "myfile.h.in"
+#     ]
+#     output = "$target_gen_dir/myfile.h"
+#     extra_args = [ "-e", "FOO=42" ]
+#   }
+template("process_version") {
+  assert(defined(invoker.output), "Output must be defined for $target_name")
+
+  process_only = defined(invoker.process_only) && invoker.process_only
+
+  if (process_only) {
+    action_name = target_name
+  } else {
+    action_name = target_name + "_action"
+    source_set_name = target_name
+  }
+
+  action(action_name) {
+    script = "//build/util/version.py"
+
+    inputs = []
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+    if (defined(invoker.template_file)) {
+      inputs += [ invoker.template_file ]
+    }
+
+    outputs = [ invoker.output ]
+
+    args = []
+
+    if (is_official_build) {
+      args += [ "--official" ]
+    }
+
+    if (defined(invoker.sources)) {
+      inputs += invoker.sources
+      foreach(i, invoker.sources) {
+        args += [
+          "-f",
+          rebase_path(i, root_build_dir),
+        ]
+      }
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+    args += [
+      "-o",
+      rebase_path(invoker.output, root_build_dir),
+    ]
+    if (defined(invoker.template_file)) {
+      args += [ rebase_path(invoker.template_file, root_build_dir) ]
+    }
+
+    forward_variables_from(invoker, [ "deps" ])
+
+    if (process_only) {
+      # When processing only, visibility gets applied to this target.
+      forward_variables_from(invoker, [ "visibility" ])
+    } else {
+      # When linking the result, only the source set can depend on the action.
+      visibility = [ ":$source_set_name" ]
+    }
+  }
+
+  if (!process_only) {
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      sources = get_target_outputs(":$action_name")
+      public_deps = [ ":$action_name" ]
+    }
+  }
+}
diff --git a/src/build/util/python2_action.py b/src/build/util/python2_action.py
new file mode 100644
index 0000000..609665b
--- /dev/null
+++ b/src/build/util/python2_action.py
@@ -0,0 +1,27 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script for ensuring that a python action runs under Python2, not Python3."""
+
+import subprocess
+import sys
+
+if sys.version_info.major == 2:
+  # If we get here, we're already Python2, so just re-execute the
+  # command without the wrapper.
+  exe = sys.executable
+elif sys.executable.endswith('.exe'):
+  # If we get here, we're a Python3 executable likely running on
+  # Windows, so look for the Python2 wrapper in depot_tools. We
+  # can't invoke it directly because some command lines might exceed the
+  # 8K commamand line length limit in cmd.exe, but we can use it to
+  # find the underlying executable, which we can then safely call.
+  exe = subprocess.check_output(
+      ['python.bat', '-c',
+       'import sys; print(sys.executable)']).decode('utf8').strip()
+else:
+  # If we get here, we are a Python3 executable. Hope that we can find
+  # a `python2.7` in path somewhere.
+  exe = 'python2.7'
+
+sys.exit(subprocess.call([exe] + sys.argv[1:]))
diff --git a/src/build/util/version.gni b/src/build/util/version.gni
new file mode 100644
index 0000000..fb8715d
--- /dev/null
+++ b/src/build/util/version.gni
@@ -0,0 +1,149 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome version as GN variables for use in build files.
+# This also generates the various version codes used for builds of chrome for
+# android.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action (or use the process_version
+# wrapper in build/util/version.gni) to generate a file at build-time with the
+# information you need. This allows better dependency checking and GN will
+# run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+# Give version.py a pattern that will expand to a GN scope consisting of
+# all values we need at once.
+_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " +
+                               "major = \"@MAJOR@\" minor = \"@MINOR@\" " +
+                               "build = \"@BUILD@\" patch = \"@PATCH@\" "
+
+# The file containing the Chrome version number.
+chrome_version_file = "//chrome/VERSION"
+
+_script_arguments = []
+
+if (is_mac) {
+  _version_dictionary_template += "patch_hi = @PATCH_HI@ patch_lo = @PATCH_LO@ "
+
+  _script_arguments += [
+    "-e",
+    "PATCH_HI=int(PATCH)//256",
+    "-e",
+    "PATCH_LO=int(PATCH)%256",
+  ]
+} else if (target_os == "android") {
+  import("//build/config/android/config.gni")
+
+  _version_dictionary_template +=
+      "chrome_version_code = " + "\"@CHROME_VERSION_CODE@\" " +
+      "chrome_modern_version_code = \"@CHROME_MODERN_VERSION_CODE@\" " +
+      "monochrome_version_code = \"@MONOCHROME_VERSION_CODE@\" " +
+      "trichrome_version_code = \"@TRICHROME_VERSION_CODE@\" " +
+      "webview_stable_version_code = \"@WEBVIEW_STABLE_VERSION_CODE@\" " +
+      "webview_beta_version_code = \"@WEBVIEW_BETA_VERSION_CODE@\" " +
+      "webview_dev_version_code = \"@WEBVIEW_DEV_VERSION_CODE@\" "
+
+  if (target_cpu == "arm64" || target_cpu == "x64") {
+    _version_dictionary_template += "monochrome_32_version_code = \"@MONOCHROME_32_VERSION_CODE@\" " + "monochrome_32_64_version_code = \"@MONOCHROME_32_64_VERSION_CODE@\" " + "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" " + "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" " + "trichrome_32_version_code = \"@TRICHROME_32_VERSION_CODE@\" " + "trichrome_32_64_version_code = \"@TRICHROME_32_64_VERSION_CODE@\" " + "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" " + "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" " + "webview_32_stable_version_code = \"@WEBVIEW_32_STABLE_VERSION_CODE@\" " + "webview_32_beta_version_code = \"@WEBVIEW_32_BETA_VERSION_CODE@\" " + "webview_32_dev_version_code = \"@WEBVIEW_32_DEV_VERSION_CODE@\" "
+  }
+
+  _script_arguments += [
+    "-a",
+    target_cpu,
+  ]
+
+  if (defined(final_android_sdk) && !final_android_sdk) {
+    _script_arguments += [ "--next" ]
+  }
+}
+
+_script_arguments += [
+  "-f",
+  rebase_path(chrome_version_file, root_build_dir),
+  "-t",
+  _version_dictionary_template,
+  "--os",
+  target_os,
+]
+
+_result = exec_script("version.py",
+                      _script_arguments,
+                      "scope",
+                      [
+                        chrome_version_file,
+                        "android_chrome_version.py",
+                      ])
+
+# Full version. For example "45.0.12321.0"
+chrome_version_full = _result.full
+
+# The consituent parts of the full version.
+chrome_version_major = _result.major
+chrome_version_minor = _result.minor
+chrome_version_build = _result.build
+chrome_version_patch = _result.patch
+
+if (is_mac) {
+  chrome_version_patch_hi = _result.patch_hi
+  chrome_version_patch_lo = _result.patch_lo
+
+  chrome_dylib_version = "$chrome_version_build.$chrome_version_patch_hi" +
+                         ".$chrome_version_patch_lo"
+} else if (target_os == "android") {
+  forward_variables_from(_result,
+                         [
+                           "chrome_modern_version_code",
+                           "chrome_version_code",
+                           "monochrome_version_code",
+                           "monochrome_32_version_code",
+                           "monochrome_32_64_version_code",
+                           "monochrome_64_32_version_code",
+                           "monochrome_64_version_code",
+                           "trichrome_version_code",
+                           "trichrome_32_version_code",
+                           "trichrome_32_64_version_code",
+                           "trichrome_64_32_version_code",
+                           "trichrome_64_version_code",
+                           "webview_beta_version_code",
+                           "webview_dev_version_code",
+                           "webview_stable_version_code",
+                           "webview_32_beta_version_code",
+                           "webview_32_dev_version_code",
+                           "webview_32_stable_version_code",
+                         ])
+
+  chrome_version_name = chrome_version_full
+
+  lines_to_write = [
+    "VersionName: $chrome_version_name",
+    "Chrome: $chrome_version_code",
+    "ChromeModern: $chrome_modern_version_code",
+    "Monochrome: $monochrome_version_code",
+    "TrichromeChrome: $trichrome_version_code",
+    "AndroidWebviewStable: $webview_stable_version_code",
+    "AndroidWebviewBeta: $webview_beta_version_code",
+    "AndroidWebviewDev: $webview_dev_version_code",
+  ]
+
+  if (target_cpu == "arm64" || target_cpu == "x64") {
+    lines_to_write += [
+      "Monochrome32: $monochrome_32_version_code",
+      "Monochrome3264: $monochrome_32_64_version_code",
+      "Monochrome6432: $monochrome_64_32_version_code",
+      "Monochrome64: $monochrome_64_version_code",
+      "TrichromeChrome32: $trichrome_32_version_code",
+      "TrichromeChrome3264: $trichrome_32_64_version_code",
+      "TrichromeChrome6432: $trichrome_64_32_version_code",
+      "TrichromeChrome64: $trichrome_64_version_code",
+      "AndroidWebview32Stable: $webview_32_stable_version_code",
+      "AndroidWebview32Beta: $webview_32_beta_version_code",
+      "AndroidWebview32Dev: $webview_32_dev_version_code",
+    ]
+  }
+
+  write_file("$root_out_dir/android_chrome_versions.txt", lines_to_write)
+}
diff --git a/src/build/util/version.py b/src/build/util/version.py
new file mode 100755
index 0000000..4f440c4
--- /dev/null
+++ b/src/build/util/version.py
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+import android_chrome_version
+
+
+def FetchValuesFromFile(values_dict, file_name):
+  """
+  Fetches KEYWORD=VALUE settings from the specified file.
+
+  Everything to the left of the first '=' is the keyword,
+  everything to the right is the value.  No stripping of
+  white space, so beware.
+
+  The file must exist, otherwise you get the Python exception from open().
+  """
+  for line in open(file_name, 'r').readlines():
+    key, val = line.rstrip('\r\n').split('=', 1)
+    values_dict[key] = val
+
+
+def FetchValues(file_list, is_official_build=None):
+  """
+  Returns a dictionary of values to be used for substitution.
+
+  Populates the dictionary with KEYWORD=VALUE settings from the files in
+  'file_list'.
+
+  Explicitly adds the following value from internal calculations:
+
+    OFFICIAL_BUILD
+  """
+  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+  if CHROME_BUILD_TYPE == '_official' or is_official_build:
+    official_build = '1'
+  else:
+    official_build = '0'
+
+  values = dict(
+    OFFICIAL_BUILD = official_build,
+  )
+
+  for file_name in file_list:
+    FetchValuesFromFile(values, file_name)
+
+  return values
+
+
+def SubstTemplate(contents, values):
+  """
+  Returns the template with substituted values from the specified dictionary.
+
+  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
+
+  No attempt is made to avoid recursive substitution.  The order
+  of evaluation is random based on the order of the keywords returned
+  by the Python dictionary.  So do NOT substitute a value that
+  contains any @KEYWORD@ strings expecting them to be recursively
+  substituted, okay?
+  """
+  for key, val in values.items():
+    try:
+      contents = contents.replace('@' + key + '@', val)
+    except TypeError:
+      print(repr(key), repr(val))
+  return contents
+
+
+def SubstFile(file_name, values):
+  """
+  Returns the contents of the specified file_name with substituted values.
+
+  Substituted values come from the specified dictionary.
+
+  This is like SubstTemplate, except it operates on a file.
+  """
+  template = open(file_name, 'r').read()
+  return SubstTemplate(template, values)
+
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name.
+
+  Does nothing if the contents aren't different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def BuildParser():
+  """Build argparse parser, with added arguments."""
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-f', '--file', action='append', default=[],
+                      help='Read variables from FILE.')
+  parser.add_argument('-i', '--input', default=None,
+                      help='Read strings to substitute from FILE.')
+  parser.add_argument('-o', '--output', default=None,
+                      help='Write substituted strings to FILE.')
+  parser.add_argument('-t', '--template', default=None,
+                      help='Use TEMPLATE as the strings to substitute.')
+  parser.add_argument(
+      '-e',
+      '--eval',
+      action='append',
+      default=[],
+      help='Evaluate VAL after reading variables. Can be used '
+      'to synthesize variables. e.g. -e \'PATCH_HI=int('
+      'PATCH)//256.')
+  parser.add_argument(
+      '-a',
+      '--arch',
+      default=None,
+      choices=android_chrome_version.ARCH_CHOICES,
+      help='Set which cpu architecture the build is for.')
+  parser.add_argument('--os', default=None, help='Set the target os.')
+  parser.add_argument('--official', action='store_true',
+                      help='Whether the current build should be an official '
+                           'build, used in addition to the environment '
+                           'variable.')
+  parser.add_argument(
+      '--next',
+      action='store_true',
+      help='Whether the current build should be a "next" '
+      'build, which targets pre-release versions of '
+      'Android')
+  parser.add_argument('args', nargs=argparse.REMAINDER,
+                      help='For compatibility: INPUT and OUTPUT can be '
+                           'passed as positional arguments.')
+  return parser
+
+
+def BuildEvals(options, parser):
+  """Construct a dict of passed '-e' arguments for evaluating."""
+  evals = {}
+  for expression in options.eval:
+    try:
+      evals.update(dict([expression.split('=', 1)]))
+    except ValueError:
+      parser.error('-e requires VAR=VAL')
+  return evals
+
+
+def ModifyOptionsCompat(options, parser):
+  """Support compatibility with old versions.
+
+  Specifically, for old versions that considered the first two
+  positional arguments shorthands for --input and --output.
+  """
+  while len(options.args) and (options.input is None or options.output is None):
+    if options.input is None:
+      options.input = options.args.pop(0)
+    elif options.output is None:
+      options.output = options.args.pop(0)
+  if options.args:
+    parser.error('Unexpected arguments: %r' % options.args)
+
+
+def GenerateValues(options, evals):
+  """Construct a dict of raw values used to generate output.
+
+  e.g. this could return a dict like
+  {
+    'BUILD': 74,
+  }
+
+  which would be used to resolve a template like
+  'build = "@BUILD@"' into 'build = "74"'
+
+  """
+  values = FetchValues(options.file, options.official)
+
+  for key, val in evals.items():
+    values[key] = str(eval(val, globals(), values))
+
+  if options.os == 'android':
+    android_chrome_version_codes = android_chrome_version.GenerateVersionCodes(
+        values, options.arch, options.next)
+    values.update(android_chrome_version_codes)
+
+  return values
+
+
+def GenerateOutputContents(options, values):
+  """Construct output string (e.g. from template).
+
+  Arguments:
+  options -- argparse parsed arguments
+  values -- dict with raw values used to resolve the keywords in a template
+    string
+  """
+
+  if options.template is not None:
+    return SubstTemplate(options.template, values)
+  elif options.input:
+    return SubstFile(options.input, values)
+  else:
+    # Generate a default set of version information.
+    return """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+
+def BuildOutput(args):
+  """Gets all input and output values needed for writing output."""
+  # Build argparse parser with arguments
+  parser = BuildParser()
+  options = parser.parse_args(args)
+
+  # Get dict of passed '-e' arguments for evaluating
+  evals = BuildEvals(options, parser)
+  # For compatibility with interface that considered first two positional
+  # arguments shorthands for --input and --output.
+  ModifyOptionsCompat(options, parser)
+
+  # Get the raw values that will be used the generate the output
+  values = GenerateValues(options, evals)
+  # Get the output string
+  contents = GenerateOutputContents(options, values)
+
+  return {'options': options, 'contents': contents}
+
+
+def main():
+  output = BuildOutput(sys.argv[1:])
+
+  if output['options'].output is not None:
+    WriteIfChanged(output['options'].output, output['contents'])
+  else:
+    print(output['contents'])
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/util/version_test.py b/src/build/util/version_test.py
new file mode 100644
index 0000000..2a65ddc
--- /dev/null
+++ b/src/build/util/version_test.py
@@ -0,0 +1,174 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+import mock
+import version
+
+
+def _ReplaceArgs(args, *replacements):
+  new_args = args[:]
+  for flag, val in replacements:
+    flag_index = args.index(flag)
+    new_args[flag_index + 1] = val
+  return new_args
+
+
+class _VersionTest(unittest.TestCase):
+  """Unittests for the version module.
+  """
+
+  _CHROME_VERSION_FILE = os.path.join(
+      os.path.dirname(__file__), os.pardir, os.pardir, 'chrome', 'VERSION')
+
+  _SCRIPT = os.path.join(os.path.dirname(__file__), 'version.py')
+
+  _EXAMPLE_VERSION = {
+      'MAJOR': '74',
+      'MINOR': '0',
+      'BUILD': '3720',
+      'PATCH': '0',
+  }
+
+  _EXAMPLE_TEMPLATE = (
+      'full = "@MAJOR@.@MINOR@.@BUILD@.@PATCH@" '
+      'major = "@MAJOR@" minor = "@MINOR@" '
+      'build = "@BUILD@" patch = "@PATCH@" version_id = @VERSION_ID@ ')
+
+  _ANDROID_CHROME_VARS = [
+      'chrome_version_code',
+      'chrome_modern_version_code',
+      'monochrome_version_code',
+      'trichrome_version_code',
+      'webview_stable_version_code',
+      'webview_beta_version_code',
+      'webview_dev_version_code',
+  ]
+
+  _EXAMPLE_ANDROID_TEMPLATE = (
+      _EXAMPLE_TEMPLATE + ''.join(
+          ['%s = "@%s@" ' % (el, el.upper()) for el in _ANDROID_CHROME_VARS]))
+
+  _EXAMPLE_ARGS = [
+      '-f',
+      _CHROME_VERSION_FILE,
+      '-t',
+      _EXAMPLE_TEMPLATE,
+  ]
+
+  _EXAMPLE_ANDROID_ARGS = _ReplaceArgs(_EXAMPLE_ARGS,
+                                       ['-t', _EXAMPLE_ANDROID_TEMPLATE]) + [
+                                           '-a',
+                                           'arm',
+                                           '--os',
+                                           'android',
+                                       ]
+
+  @staticmethod
+  def _RunBuildOutput(new_version_values={},
+                      get_new_args=lambda old_args: old_args):
+    """Parameterized helper method for running the main testable method in
+    version.py.
+
+    Keyword arguments:
+    new_version_values -- dict used to update _EXAMPLE_VERSION
+    get_new_args -- lambda for updating _EXAMPLE_ANDROID_ARGS
+    """
+
+    with mock.patch('version.FetchValuesFromFile') as \
+        fetch_values_from_file_mock:
+
+      fetch_values_from_file_mock.side_effect = (lambda values, file :
+          values.update(
+              dict(_VersionTest._EXAMPLE_VERSION, **new_version_values)))
+
+      new_args = get_new_args(_VersionTest._EXAMPLE_ARGS)
+      return version.BuildOutput(new_args)
+
+  def testFetchValuesFromFile(self):
+    """It returns a dict in correct format - { <str>: <str> }, to verify
+    assumption of other tests that mock this function
+    """
+    result = {}
+    version.FetchValuesFromFile(result, self._CHROME_VERSION_FILE)
+
+    for key, val in result.iteritems():
+      self.assertIsInstance(key, str)
+      self.assertIsInstance(val, str)
+
+  def testBuildOutputAndroid(self):
+    """Assert it gives includes assignments of expected variables"""
+    output = self._RunBuildOutput(
+        get_new_args=lambda args: self._EXAMPLE_ANDROID_ARGS)
+    contents = output['contents']
+
+    self.assertRegexpMatches(contents, r'\bchrome_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\bchrome_modern_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents, r'\bmonochrome_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents, r'\btrichrome_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\bwebview_stable_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents, r'\bwebview_beta_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents, r'\bwebview_dev_version_code = "\d+"\s')
+
+  def testBuildOutputAndroidArchVariantsArm64(self):
+    """Assert 64-bit-specific version codes"""
+    new_template = (
+        self._EXAMPLE_ANDROID_TEMPLATE +
+        "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" "
+        "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" "
+        "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" "
+        "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ")
+    args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS,
+                                      ['-t', new_template])
+    new_args = _ReplaceArgs(args_with_template, ['-a', 'arm64'])
+    output = self._RunBuildOutput(get_new_args=lambda args: new_args)
+    contents = output['contents']
+
+    self.assertRegexpMatches(contents,
+                             r'\bmonochrome_64_32_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\bmonochrome_64_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\btrichrome_64_32_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\btrichrome_64_version_code = "\d+"\s')
+
+  def testBuildOutputAndroidArchVariantsX64(self):
+    """Assert 64-bit-specific version codes"""
+    new_template = (
+        self._EXAMPLE_ANDROID_TEMPLATE +
+        "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" "
+        "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" "
+        "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" "
+        "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ")
+    args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS,
+                                      ['-t', new_template])
+    new_args = _ReplaceArgs(args_with_template, ['-a', 'x64'])
+    output = self._RunBuildOutput(get_new_args=lambda args: new_args)
+    contents = output['contents']
+
+    self.assertRegexpMatches(contents,
+                             r'\bmonochrome_64_32_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\bmonochrome_64_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\btrichrome_64_32_version_code = "\d+"\s')
+    self.assertRegexpMatches(contents,
+                             r'\btrichrome_64_version_code = "\d+"\s')
+
+  def testBuildOutputAndroidChromeArchInput(self):
+    """Assert it raises an exception when using an invalid architecture input"""
+    new_args = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, ['-a', 'foobar'])
+    with self.assertRaises(SystemExit) as cm:
+      self._RunBuildOutput(get_new_args=lambda args: new_args)
+
+    self.assertEqual(cm.exception.code, 2)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/src/build/util/webkit_version.h.in b/src/build/util/webkit_version.h.in
new file mode 100644
index 0000000..41960e7
--- /dev/null
+++ b/src/build/util/webkit_version.h.in
@@ -0,0 +1,9 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// webkit_version.h is generated from webkit_version.h.in.  Edit the source!
+
+#define WEBKIT_VERSION_MAJOR 537
+#define WEBKIT_VERSION_MINOR 36
+#define WEBKIT_SVN_REVISION "@@LASTCHANGE@"
diff --git a/src/build/vs_toolchain.py b/src/build/vs_toolchain.py
new file mode 100755
index 0000000..c3b1182
--- /dev/null
+++ b/src/build/vs_toolchain.py
@@ -0,0 +1,573 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import collections
+import glob
+import json
+import os
+import pipes
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import sys
+
+from gn_helpers import ToGNString
+
+# VS 2019 16.61 with 10.0.19041 SDK, and 10.0.17134 version of
+# d3dcompiler_47.dll, with ARM64 libraries and UWP support.
+# See go/chromium-msvc-toolchain for instructions about how to update the
+# toolchain.
+#
+# When updating the toolchain, consider the following areas impacted by the
+# toolchain version:
+#
+# * //base/win/windows_version.cc NTDDI preprocessor check
+#   Triggers a compiler error if the available SDK is older than the minimum.
+# * //build/config/win/BUILD.gn NTDDI_VERSION value
+#   Affects the availability of APIs in the toolchain headers.
+# * //docs/windows_build_instructions.md mentions of VS or Windows SDK.
+#   Keeps the document consistent with the toolchain version.
+TOOLCHAIN_HASH = '20d5f2553f'
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+# VS versions are listed in descending order of priority (highest first).
+MSVS_VERSIONS = collections.OrderedDict([
+  ('2019', '16.0'),
+  ('2017', '15.0'),
+])
+
+# List of preferred VC toolset version based on MSVS
+MSVC_TOOLSET_VERSION = {
+   '2019' : 'VC142',
+   '2017' : 'VC141',
+}
+
+def _HostIsWindows():
+  """Returns True if running on a Windows host (including under cygwin)."""
+  return sys.platform in ('win32', 'cygwin')
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+  returns the location of the VC runtime DLLs so they can be copied into
+  the output directory after gyp generation.
+
+  Return value is [x64path, x86path, 'Arm64Unused'] or None. arm64path is
+  generated separately because there are multiple folders for the arm64 VC
+  runtime.
+  """
+  vs_runtime_dll_dirs = None
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  # When running on a non-Windows host, only do this if the SDK has explicitly
+  # been downloaded before (in which case json_data_file will exist).
+  if ((_HostIsWindows() or os.path.exists(json_data_file))
+      and depot_tools_win_toolchain):
+    if ShouldUpdateToolchain():
+      if len(sys.argv) > 1 and sys.argv[1] == 'update':
+        update_result = Update()
+      else:
+        update_result = Update(no_download=True)
+      if update_result != 0:
+        raise Exception('Failed to update, error code %d.' % update_result)
+    with open(json_data_file, 'r') as tempf:
+      toolchain_data = json.load(tempf)
+
+    toolchain = toolchain_data['path']
+    version = toolchain_data['version']
+    win_sdk = toolchain_data.get('win_sdk')
+    wdk = toolchain_data['wdk']
+    # TODO(scottmg): The order unfortunately matters in these. They should be
+    # split into separate keys for x64/x86/arm64. (See CopyDlls call below).
+    # http://crbug.com/345992
+    vs_runtime_dll_dirs = toolchain_data['runtime_dirs']
+    # The number of runtime_dirs in the toolchain_data was two (x64/x86) but
+    # changed to three (x64/x86/arm64) and this code needs to handle both
+    # possibilities, which can change independently from this code.
+    if len(vs_runtime_dll_dirs) == 2:
+      vs_runtime_dll_dirs.append('Arm64Unused')
+
+    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+
+    os.environ['WINDOWSSDKDIR'] = win_sdk
+    os.environ['WDK_DIR'] = wdk
+    # Include the VS runtime in the PATH in case it's not machine-installed.
+    runtime_path = os.path.pathsep.join(vs_runtime_dll_dirs)
+    os.environ['PATH'] = runtime_path + os.path.pathsep + os.environ['PATH']
+  elif sys.platform == 'win32' and not depot_tools_win_toolchain:
+    if not 'GYP_MSVS_OVERRIDE_PATH' in os.environ:
+      os.environ['GYP_MSVS_OVERRIDE_PATH'] = DetectVisualStudioPath()
+
+    # When using an installed toolchain these files aren't needed in the output
+    # directory in order to run binaries locally, but they are needed in order
+    # to create isolates or the mini_installer. Copying them to the output
+    # directory ensures that they are available when needed.
+    bitness = platform.architecture()[0]
+    # When running 64-bit python the x64 DLLs will be in System32
+    # ARM64 binaries will not be available in the system directories because we
+    # don't build on ARM64 machines.
+    x64_path = 'System32' if bitness == '64bit' else 'Sysnative'
+    x64_path = os.path.join(os.path.expandvars('%windir%'), x64_path)
+    vs_runtime_dll_dirs = [x64_path,
+                           os.path.join(os.path.expandvars('%windir%'),
+                                        'SysWOW64'),
+                           'Arm64Unused']
+
+  return vs_runtime_dll_dirs
+
+
+def _RegistryGetValueUsingWinReg(key, value):
+  """Use the _winreg module to obtain the value of a registry key.
+
+  Args:
+    key: The registry key.
+    value: The particular registry value to read.
+  Return:
+    contents of the registry key's value, or None on failure.  Throws
+    ImportError if _winreg is unavailable.
+  """
+  import _winreg
+  try:
+    root, subkey = key.split('\\', 1)
+    assert root == 'HKLM'  # Only need HKLM for now.
+    with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+      return _winreg.QueryValueEx(hkey, value)[0]
+  except WindowsError:
+    return None
+
+
+def _RegistryGetValue(key, value):
+  try:
+    return _RegistryGetValueUsingWinReg(key, value)
+  except ImportError:
+    raise Exception('The python library _winreg not found.')
+
+
+def GetVisualStudioVersion():
+  """Return best available version of Visual Studio.
+  """
+  supported_versions = list(MSVS_VERSIONS.keys())
+
+  # VS installed in depot_tools for Googlers
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))):
+    return supported_versions[0]
+
+  # VS installed in system for external developers
+  supported_versions_str = ', '.join('{} ({})'.format(v,k)
+      for k,v in MSVS_VERSIONS.items())
+  available_versions = []
+  for version in supported_versions:
+    # Checking vs%s_install environment variables.
+    # For example, vs2019_install could have the value
+    # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community".
+    # Only vs2017_install and vs2019_install are supported.
+    path = os.environ.get('vs%s_install' % version)
+    if path and os.path.exists(path):
+      available_versions.append(version)
+      break
+    # Detecting VS under possible paths.
+    path = os.path.expandvars('%ProgramFiles(x86)%' +
+                              '/Microsoft Visual Studio/%s' % version)
+    if path and any(
+        os.path.exists(os.path.join(path, edition))
+        for edition in ('Enterprise', 'Professional', 'Community', 'Preview',
+                        'BuildTools')):
+      available_versions.append(version)
+      break
+
+  if not available_versions:
+    raise Exception('No supported Visual Studio can be found.'
+                    ' Supported versions are: %s.' % supported_versions_str)
+  return available_versions[0]
+
+
+def DetectVisualStudioPath():
+  """Return path to the installed Visual Studio.
+  """
+
+  # Note that this code is used from
+  # build/toolchain/win/setup_toolchain.py as well.
+  version_as_year = GetVisualStudioVersion()
+
+  # The VC++ >=2017 install location needs to be located using COM instead of
+  # the registry. For details see:
+  # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/
+  # For now we use a hardcoded default with an environment variable override.
+  for path in (
+      os.environ.get('vs%s_install' % version_as_year),
+      os.path.expandvars('%ProgramFiles(x86)%' +
+                         '/Microsoft Visual Studio/%s/Enterprise' %
+                         version_as_year),
+      os.path.expandvars('%ProgramFiles(x86)%' +
+                         '/Microsoft Visual Studio/%s/Professional' %
+                         version_as_year),
+      os.path.expandvars('%ProgramFiles(x86)%' +
+                         '/Microsoft Visual Studio/%s/Community' %
+                         version_as_year),
+      os.path.expandvars('%ProgramFiles(x86)%' +
+                         '/Microsoft Visual Studio/%s/Preview' %
+                         version_as_year),
+      os.path.expandvars('%ProgramFiles(x86)%' +
+                         '/Microsoft Visual Studio/%s/BuildTools' %
+                         version_as_year)):
+    if path and os.path.exists(path):
+      return path
+
+  raise Exception('Visual Studio Version %s not found.' % version_as_year)
+
+
+def _CopyRuntimeImpl(target, source, verbose=True):
+  """Copy |source| to |target| if it doesn't already exist or if it needs to be
+  updated (comparing last modified time as an approximate float match as for
+  some reason the values tend to differ by ~1e-07 despite being copies of the
+  same file... https://crbug.com/603603).
+  """
+  if (os.path.isdir(os.path.dirname(target)) and
+      (not os.path.isfile(target) or
+       abs(os.stat(target).st_mtime - os.stat(source).st_mtime) >= 0.01)):
+    if verbose:
+      print('Copying %s to %s...' % (source, target))
+    if os.path.exists(target):
+      # Make the file writable so that we can delete it now, and keep it
+      # readable.
+      os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
+      os.unlink(target)
+    shutil.copy2(source, target)
+    # Make the file writable so that we can overwrite or delete it later,
+    # keep it readable.
+    os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
+
+def _SortByHighestVersionNumberFirst(list_of_str_versions):
+  """This sorts |list_of_str_versions| according to version number rules
+  so that version "1.12" is higher than version "1.9". Does not work
+  with non-numeric versions like 1.4.a8 which will be higher than
+  1.4.a12. It does handle the versions being embedded in file paths.
+  """
+  def to_int_if_int(x):
+    try:
+      return int(x)
+    except ValueError:
+      return x
+
+  def to_number_sequence(x):
+    part_sequence = re.split(r'[\\/\.]', x)
+    return [to_int_if_int(x) for x in part_sequence]
+
+  list_of_str_versions.sort(key=to_number_sequence, reverse=True)
+
+
+def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix):
+  """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  if target_cpu == 'arm64':
+    # Windows ARM64 VCRuntime is located at {toolchain_root}/VC/Redist/MSVC/
+    # {x.y.z}/[debug_nonredist/]arm64/Microsoft.VC14x.CRT/.
+    # Select VC toolset directory based on Visual Studio version
+    vc_redist_root = FindVCRedistRoot()
+    if suffix.startswith('.'):
+      vc_toolset_dir = 'Microsoft.{}.CRT' \
+         .format(MSVC_TOOLSET_VERSION[GetVisualStudioVersion()])
+      source_dir = os.path.join(vc_redist_root,
+                                'arm64', vc_toolset_dir)
+    else:
+      vc_toolset_dir = 'Microsoft.{}.DebugCRT' \
+         .format(MSVC_TOOLSET_VERSION[GetVisualStudioVersion()])
+      source_dir = os.path.join(vc_redist_root, 'debug_nonredist',
+                                'arm64', vc_toolset_dir)
+  file_parts = ('msvcp140', 'vccorlib140', 'vcruntime140')
+  if target_cpu == 'x64' and GetVisualStudioVersion() != '2017':
+    file_parts = file_parts + ('vcruntime140_1', )
+  for file_part in file_parts:
+    dll = file_part + suffix
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+  # Copy the UCRT files from the Windows SDK. This location includes the
+  # api-ms-win-crt-*.dll files that are not found in the Windows directory.
+  # These files are needed for component builds. If WINDOWSSDKDIR is not set
+  # use the default SDK path. This will be the case when
+  # DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+  win_sdk_dir = os.path.normpath(
+      os.environ.get('WINDOWSSDKDIR',
+                     os.path.expandvars('%ProgramFiles(x86)%'
+                                        '\\Windows Kits\\10')))
+  # ARM64 doesn't have a redist for the ucrt DLLs because they are always
+  # present in the OS.
+  if target_cpu != 'arm64':
+    # Starting with the 10.0.17763 SDK the ucrt files are in a version-named
+    # directory - this handles both cases.
+    redist_dir = os.path.join(win_sdk_dir, 'Redist')
+    version_dirs = glob.glob(os.path.join(redist_dir, '10.*'))
+    if len(version_dirs) > 0:
+      _SortByHighestVersionNumberFirst(version_dirs)
+      redist_dir = version_dirs[0]
+    ucrt_dll_dirs = os.path.join(redist_dir, 'ucrt', 'DLLs', target_cpu)
+    ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
+    assert len(ucrt_files) > 0
+    for ucrt_src_file in ucrt_files:
+      file_part = os.path.basename(ucrt_src_file)
+      ucrt_dst_file = os.path.join(target_dir, file_part)
+      _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
+  # We must copy ucrtbase.dll for x64/x86, and ucrtbased.dll for all CPU types.
+  if target_cpu != 'arm64' or not suffix.startswith('.'):
+    if not suffix.startswith('.'):
+      # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/
+      # ucrt/.
+      sdk_bin_root = os.path.join(win_sdk_dir, 'bin')
+      sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*'))
+      # Select the most recent SDK if there are multiple versions installed.
+      _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs)
+      for directory in sdk_bin_sub_dirs:
+        sdk_redist_root_version = os.path.join(sdk_bin_root, directory)
+        if not os.path.isdir(sdk_redist_root_version):
+          continue
+        source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt')
+        break
+    _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
+                     os.path.join(source_dir, 'ucrtbase' + suffix))
+
+
+def FindVCComponentRoot(component):
+  """Find the most recent Tools or Redist or other directory in an MSVC install.
+  Typical results are {toolchain_root}/VC/{component}/MSVC/{x.y.z}. The {x.y.z}
+  version number part changes frequently so the highest version number found is
+  used.
+  """
+
+  SetEnvironmentAndGetRuntimeDllDirs()
+  assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ)
+  vc_component_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+      'VC', component, 'MSVC')
+  vc_component_msvc_contents = glob.glob(
+      os.path.join(vc_component_msvc_root, '14.*'))
+  # Select the most recent toolchain if there are several.
+  _SortByHighestVersionNumberFirst(vc_component_msvc_contents)
+  for directory in vc_component_msvc_contents:
+    if os.path.isdir(directory):
+      return directory
+  raise Exception('Unable to find the VC %s directory.' % component)
+
+
+def FindVCRedistRoot():
+  """In >=VS2017, Redist binaries are located in
+  {toolchain_root}/VC/Redist/MSVC/{x.y.z}/{target_cpu}/.
+
+  This returns the '{toolchain_root}/VC/Redist/MSVC/{x.y.z}/' path.
+  """
+  return FindVCComponentRoot('Redist')
+
+
+def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
+  """Copy the VS runtime DLLs, only if the target doesn't exist, but the target
+  directory does exist. Handles VS 2015, 2017 and 2019."""
+  suffix = 'd.dll' if debug else '.dll'
+  # VS 2015, 2017 and 2019 use the same CRT DLLs.
+  _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix)
+
+
+def CopyDlls(target_dir, configuration, target_cpu):
+  """Copy the VS runtime DLLs into the requested directory as needed.
+
+  configuration is one of 'Debug' or 'Release'.
+  target_cpu is one of 'x86', 'x64' or 'arm64'.
+
+  The debug configuration gets both the debug and release DLLs; the
+  release config only the latter.
+  """
+  vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  if not vs_runtime_dll_dirs:
+    return
+
+  x64_runtime, x86_runtime, arm64_runtime = vs_runtime_dll_dirs
+  if target_cpu == 'x64':
+    runtime_dir = x64_runtime
+  elif target_cpu == 'x86':
+    runtime_dir = x86_runtime
+  elif target_cpu == 'arm64':
+    runtime_dir = arm64_runtime
+  else:
+    raise Exception('Unknown target_cpu: ' + target_cpu)
+  _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
+  if configuration == 'Debug':
+    _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
+  _CopyDebugger(target_dir, target_cpu)
+
+
+def _CopyDebugger(target_dir, target_cpu):
+  """Copy dbghelp.dll and dbgcore.dll into the requested directory as needed.
+
+  target_cpu is one of 'x86', 'x64' or 'arm64'.
+
+  dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
+  from the SDK directory avoids using the system copy of dbghelp.dll which then
+  ensures compatibility with recent debug information formats, such as VS
+  2017 /debug:fastlink PDBs.
+
+  dbgcore.dll is needed when using some functions from dbghelp.dll (like
+  MinidumpWriteDump).
+  """
+  win_sdk_dir = SetEnvironmentAndGetSDKDir()
+  if not win_sdk_dir:
+    return
+
+  # List of debug files that should be copied, the first element of the tuple is
+  # the name of the file and the second indicates if it's optional.
+  debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)]
+  # The UCRT is not a redistributable component on arm64.
+  if target_cpu != 'arm64':
+    debug_files.extend([('api-ms-win-downlevel-kernel32-l2-1-0.dll', False),
+                        ('api-ms-win-eventing-provider-l1-1-0.dll', False)])
+  for debug_file, is_optional in debug_files:
+    full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
+    if not os.path.exists(full_path):
+      if is_optional:
+        continue
+      else:
+        raise Exception('%s not found in "%s"\r\nYou must install'
+                        'Windows 10 SDK version 10.0.19041.0 including the '
+                        '"Debugging Tools for Windows" feature.' %
+                        (debug_file, full_path))
+    target_path = os.path.join(target_dir, debug_file)
+    _CopyRuntimeImpl(target_path, full_path)
+
+
+def _GetDesiredVsToolchainHashes():
+  """Load a list of SHA1s corresponding to the toolchains that we want installed
+  to build with."""
+  # Third parties that do not have access to the canonical toolchain can map
+  # canonical toolchain version to their own toolchain versions.
+  toolchain_hash_mapping_key = 'GYP_MSVS_HASH_%s' % TOOLCHAIN_HASH
+  return [os.environ.get(toolchain_hash_mapping_key, TOOLCHAIN_HASH)]
+
+
+def ShouldUpdateToolchain():
+  """Check if the toolchain should be upgraded."""
+  if not os.path.exists(json_data_file):
+    return True
+  with open(json_data_file, 'r') as tempf:
+    toolchain_data = json.load(tempf)
+  version = toolchain_data['version']
+  env_version = GetVisualStudioVersion()
+  # If there's a mismatch between the version set in the environment and the one
+  # in the json file then the toolchain should be updated.
+  return version != env_version
+
+
+def Update(force=False, no_download=False):
+  """Requests an update of the toolchain to the specific hashes we have at
+  this revision. The update outputs a .json of the various configuration
+  information required to pass to gyp which we use in |GetToolchainDir()|.
+  If no_download is true then the toolchain will be configured if present but
+  will not be downloaded.
+  """
+  if force != False and force != '--force':
+    print('Unknown parameter "%s"' % force, file=sys.stderr)
+    return 1
+  if force == '--force' or os.path.exists(json_data_file):
+    force = True
+
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if (_HostIsWindows() or force) and depot_tools_win_toolchain:
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+
+    # On Linux, the file system is usually case-sensitive while the Windows
+    # SDK only works on case-insensitive file systems.  If it doesn't already
+    # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive
+    # part of the file system.
+    toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files')
+    # For testing this block, unmount existing mounts with
+    # fusermount -u third_party/depot_tools/win_toolchain/vs_files
+    if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir):
+      import distutils.spawn
+      ciopfs = distutils.spawn.find_executable('ciopfs')
+      if not ciopfs:
+        # ciopfs not found in PATH; try the one downloaded from the DEPS hook.
+        ciopfs = os.path.join(script_dir, 'ciopfs')
+      if not os.path.isdir(toolchain_dir):
+        os.mkdir(toolchain_dir)
+      if not os.path.isdir(toolchain_dir + '.ciopfs'):
+        os.mkdir(toolchain_dir + '.ciopfs')
+      # Without use_ino, clang's #pragma once and Wnonportable-include-path
+      # both don't work right, see https://llvm.org/PR34931
+      # use_ino doesn't slow down builds, so it seems there's no drawback to
+      # just using it always.
+      subprocess.check_call([
+          ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir])
+
+    get_toolchain_args = [
+        sys.executable,
+        os.path.join(depot_tools_path,
+                    'win_toolchain',
+                    'get_toolchain_if_necessary.py'),
+        '--output-json', json_data_file,
+      ] + _GetDesiredVsToolchainHashes()
+    if force:
+      get_toolchain_args.append('--force')
+    if no_download:
+      get_toolchain_args.append('--no-download')
+    subprocess.check_call(get_toolchain_args)
+
+  return 0
+
+
+def NormalizePath(path):
+  while path.endswith('\\'):
+    path = path[:-1]
+  return path
+
+
+def SetEnvironmentAndGetSDKDir():
+  """Gets location information about the current sdk (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  SetEnvironmentAndGetRuntimeDllDirs()
+
+  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+  if not 'WINDOWSSDKDIR' in os.environ:
+    default_sdk_path = os.path.expandvars('%ProgramFiles(x86)%'
+                                          '\\Windows Kits\\10')
+    if os.path.isdir(default_sdk_path):
+      os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+  return NormalizePath(os.environ['WINDOWSSDKDIR'])
+
+
+def GetToolchainDir():
+  """Gets location information about the current toolchain (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  win_sdk_dir = SetEnvironmentAndGetSDKDir()
+
+  print('''vs_path = %s
+sdk_path = %s
+vs_version = %s
+wdk_dir = %s
+runtime_dirs = %s
+''' % (ToGNString(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])),
+       ToGNString(win_sdk_dir), ToGNString(GetVisualStudioVersion()),
+       ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))),
+       ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None']))))
+
+
+def main():
+  commands = {
+      'update': Update,
+      'get_toolchain_dir': GetToolchainDir,
+      'copy_dlls': CopyDlls,
+  }
+  if len(sys.argv) < 2 or sys.argv[1] not in commands:
+    print('Expected one of: %s' % ', '.join(commands), file=sys.stderr)
+    return 1
+  return commands[sys.argv[1]](*sys.argv[2:])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/whitespace_file.txt b/src/build/whitespace_file.txt
new file mode 100644
index 0000000..c6768a3
--- /dev/null
+++ b/src/build/whitespace_file.txt
@@ -0,0 +1,190 @@
+Copyright 2014 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+======================================================================
+
+Let's make a story. Add zero+ sentences for every commit:
+
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the elements. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt," he snickered.
+
+The pause was filled with the sound of compile errors.
+
+CHAPTER 2:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed so hard.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred just the week before.
+
+Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
+adorning breakfast cereal with halibut -- shameful.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000¥ sake. He tries to recall the cook's face.  Green? Probably.
+
+CHAPTER 5:
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+$CHAPTER6
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
+
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+*
+Yay, another first commit! What a beautiful day!
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. As
+Domo-Kun looked around, it realized that some of the mirrors were actually but
+pale reflections of true reality.
+
+A herd of wild gits appears!  Time for CQ :D
+And one more for sizes.py...
+
+What's an overmarketed dietary supplement expressing sadness, relief,
+tiredness, or a similar feeling.?  Ah-Sigh-ee.
+
+It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, cronuts.
+
+If you stand on your head, you will get footprints in your hair.
+
+sigh
+sigher
+pick up cls
+
+In the BUILD we trust.
+^_^
+
+In the masters we don't.
+In the tryservers, we don't either.
+In the CQ sometimes.
+Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR)
+My sandwiches are like my children: I love them all.
+No, really, I couldn't eat another bit.
+When I hunger I think of you, and a pastrami sandwich.
+Do make a terrible mistake every once in a while.
+I just made two.
+Mistakes are the best sometimes.
+\o/
+This is groovy.
+
+SECRET ENDING: IT WAS _____ ALL ALONG!
+testing trailing line.
+
+So many books, so little time.
+
+Hi there, it's my first commit.
+!false isn't funny because it's true.
+
+Lalala
+
+Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+Vestibulum rhoncus neque sodales nibh lobortis, non fringilla odio aliquet.
+Praesent ultrices quam eu pretium ultrices.
+Quisque et consequat ex. Curabitur sed nunc neque.
+foo
diff --git a/src/build/win/BUILD.gn b/src/build/win/BUILD.gn
new file mode 100644
index 0000000..d449f59
--- /dev/null
+++ b/src/build/win/BUILD.gn
@@ -0,0 +1,175 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/manifest.gni")
+
+# Depending on this target will cause the manifests for Chrome's default
+# Windows and common control compatibility and elevation for executables.
+windows_manifest("default_exe_manifest") {
+  sources = [
+    as_invoker_manifest,
+    common_controls_manifest,
+    default_compatibility_manifest,
+  ]
+}
+
+if (is_win) {
+  assert(host_os != "mac" || target_cpu != "x86",
+         "Windows cross-builds from Mac must be 64-bit.")
+
+  action("copy_cdb_to_output") {
+    script = "//build/win/copy_cdb_to_output.py"
+    inputs = [
+      script,
+      "//build/vs_toolchain.py",
+    ]
+    outputs = [
+      "$root_out_dir/cdb/cdb.exe",
+      "$root_out_dir/cdb/dbgeng.dll",
+      "$root_out_dir/cdb/dbghelp.dll",
+      "$root_out_dir/cdb/dbgmodel.dll",
+      "$root_out_dir/cdb/winext/ext.dll",
+      "$root_out_dir/cdb/winext/uext.dll",
+      "$root_out_dir/cdb/winxp/exts.dll",
+      "$root_out_dir/cdb/winxp/ntsdexts.dll",
+    ]
+    if (current_cpu != "arm64") {
+      # The UCRT files are not redistributable for ARM64 Win32.
+      outputs += [
+        "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
+        "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-downlevel-kernel32-l2-1-0.dll",
+        "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll",
+        "$root_out_dir/cdb/ucrtbase.dll",
+      ]
+    }
+    args = [
+      rebase_path("$root_out_dir/cdb", root_out_dir),
+      current_cpu,
+    ]
+  }
+
+  group("runtime_libs") {
+    data = []
+    if (is_component_build) {
+      # Copy the VS runtime DLLs into the isolate so that they don't have to be
+      # preinstalled on the target machine. The debug runtimes have a "d" at
+      # the end.
+      if (is_debug) {
+        vcrt_suffix = "d"
+      } else {
+        vcrt_suffix = ""
+      }
+
+      # These runtime files are copied to the output directory by the
+      # vs_toolchain script that runs as part of toolchain configuration.
+      data += [
+        "$root_out_dir/msvcp140${vcrt_suffix}.dll",
+        "$root_out_dir/vccorlib140${vcrt_suffix}.dll",
+        "$root_out_dir/vcruntime140${vcrt_suffix}.dll",
+      ]
+      if (current_cpu == "x64") {
+        data += [ "$root_out_dir/vcruntime140_1${vcrt_suffix}.dll" ]
+      }
+      if (is_debug) {
+        data += [ "$root_out_dir/ucrtbased.dll" ]
+      }
+      if (is_asan) {
+        assert(target_cpu == "x64",
+               "ASan is only supported in 64-bit builds on Windows.")
+        data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ]
+      }
+      if (current_cpu != "arm64") {
+        data += [
+          # Universal Windows 10 CRT files
+          "$root_out_dir/api-ms-win-core-console-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-datetime-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-debug-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-errorhandling-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-file-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-file-l1-2-0.dll",
+          "$root_out_dir/api-ms-win-core-file-l2-1-0.dll",
+          "$root_out_dir/api-ms-win-core-handle-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-heap-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-interlocked-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-libraryloader-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-localization-l1-2-0.dll",
+          "$root_out_dir/api-ms-win-core-memory-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-namedpipe-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-processenvironment-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-processthreads-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-processthreads-l1-1-1.dll",
+          "$root_out_dir/api-ms-win-core-profile-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-rtlsupport-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-string-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-synch-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-synch-l1-2-0.dll",
+          "$root_out_dir/api-ms-win-core-sysinfo-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-timezone-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-core-util-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-conio-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-convert-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-environment-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-filesystem-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-heap-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-locale-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-math-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-multibyte-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-private-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-process-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-runtime-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-stdio-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll",
+          "$root_out_dir/api-ms-win-downlevel-kernel32-l2-1-0.dll",
+          "$root_out_dir/api-ms-win-eventing-provider-l1-1-0.dll",
+        ]
+        if (!is_debug) {
+          data += [ "$root_out_dir/ucrtbase.dll" ]
+        }
+      }
+    }
+  }
+}
diff --git a/src/build/win/as_invoker.manifest b/src/build/win/as_invoker.manifest
new file mode 100644
index 0000000..df046fd
--- /dev/null
+++ b/src/build/win/as_invoker.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+  <security>
+    <requestedPrivileges>
+      <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
+    </requestedPrivileges>
+  </security>
+</trustInfo></assembly>
diff --git a/src/build/win/common_controls.manifest b/src/build/win/common_controls.manifest
new file mode 100644
index 0000000..1710196
--- /dev/null
+++ b/src/build/win/common_controls.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>
+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*' />
+    </dependentAssembly>
+  </dependency>
+</assembly>
diff --git a/src/build/win/compatibility.manifest b/src/build/win/compatibility.manifest
new file mode 100644
index 0000000..10d10da
--- /dev/null
+++ b/src/build/win/compatibility.manifest
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!--The ID below indicates application support for Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!--The ID below indicates application support for Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!--The ID below indicates application support for Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!--The ID below indicates application support for Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+      <!--The ID below indicates application support for Windows 10 -->
+      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+    </application>
+  </compatibility>
+</assembly>
diff --git a/src/build/win/copy_cdb_to_output.py b/src/build/win/copy_cdb_to_output.py
new file mode 100755
index 0000000..a0b99bb
--- /dev/null
+++ b/src/build/win/copy_cdb_to_output.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import glob
+import hashlib
+import os
+import shutil
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
+sys.path.insert(0, src_build_dir)
+
+import vs_toolchain
+
+
+def _HexDigest(file_name):
+  hasher = hashlib.sha256()
+  afile = open(file_name, 'rb')
+  blocksize = 65536
+  buf = afile.read(blocksize)
+  while len(buf) > 0:
+    hasher.update(buf)
+    buf = afile.read(blocksize)
+  afile.close()
+  return hasher.hexdigest()
+
+
+def _CopyImpl(file_name, target_dir, source_dir, verbose=False):
+  """Copy |source| to |target| if it doesn't already exist or if it
+  needs to be updated.
+  """
+  target = os.path.join(target_dir, file_name)
+  source = os.path.join(source_dir, file_name)
+  if (os.path.isdir(os.path.dirname(target)) and
+      ((not os.path.isfile(target)) or
+       _HexDigest(source) != _HexDigest(target))):
+    if verbose:
+      print('Copying %s to %s...' % (source, target))
+    if os.path.exists(target):
+      os.unlink(target)
+    shutil.copy(source, target)
+
+
+def _ConditionalMkdir(output_dir):
+  if not os.path.isdir(output_dir):
+    os.makedirs(output_dir)
+
+
+def _CopyCDBToOutput(output_dir, target_arch):
+  """Copies the Windows debugging executable cdb.exe to the output
+  directory, which is created if it does not exist. The output
+  directory, and target architecture that should be copied, are
+  passed. Supported values for the target architecture are the GYP
+  values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64".
+  """
+  _ConditionalMkdir(output_dir)
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+  # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
+  # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+  win_sdk_dir = os.path.normpath(
+      os.environ.get('WINDOWSSDKDIR',
+                     os.path.expandvars('%ProgramFiles(x86)%'
+                                        '\\Windows Kits\\10')))
+  if target_arch == 'ia32' or target_arch == 'x86':
+    src_arch = 'x86'
+  elif target_arch in ['x64', 'arm64']:
+    src_arch = target_arch
+  else:
+    print('copy_cdb_to_output.py: unknown target_arch %s' % target_arch)
+    sys.exit(1)
+  # We need to copy multiple files, so cache the computed source directory.
+  src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch)
+  # We need to copy some helper DLLs to get access to the !uniqstack
+  # command to dump all threads' stacks.
+  src_winext_dir = os.path.join(src_dir, 'winext')
+  dst_winext_dir = os.path.join(output_dir, 'winext')
+  src_winxp_dir = os.path.join(src_dir, 'winxp')
+  dst_winxp_dir = os.path.join(output_dir, 'winxp')
+  # Starting with the 10.0.17763 SDK the ucrt files are in a version-named
+  # directory - this handles both cases.
+  redist_dir = os.path.join(win_sdk_dir, 'Redist')
+  version_dirs = glob.glob(os.path.join(redist_dir, '10.*'))
+  if len(version_dirs) > 0:
+    version_dirs.sort(reverse=True)
+    redist_dir = version_dirs[0]
+  src_crt_dir = os.path.join(redist_dir, 'ucrt', 'DLLs', src_arch)
+  _ConditionalMkdir(dst_winext_dir)
+  _ConditionalMkdir(dst_winxp_dir)
+  # Note that the outputs from the "copy_cdb_to_output" target need to
+  # be kept in sync with this list.
+  _CopyImpl('cdb.exe', output_dir, src_dir)
+  _CopyImpl('dbgeng.dll', output_dir, src_dir)
+  _CopyImpl('dbghelp.dll', output_dir, src_dir)
+  _CopyImpl('dbgmodel.dll', output_dir, src_dir)
+  _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir)
+  _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
+  _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
+  _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
+  if src_arch in ['x64', 'x86']:
+    # Copy all UCRT files from the debuggers directory, for compatibility with
+    # the Windows 10 18362 SDK (one UCRT file) and later versions (two UCRT
+    # files). The new file is api-ms-win-downlevel-kernel32-l2-1-0.dll and
+    # should be added to the copy_cdb_to_output outputs when we require a newer
+    # SDK.
+    for file in glob.glob(os.path.join(src_dir, 'api-ms-win*.dll')):
+      _CopyImpl(os.path.split(file)[1], output_dir, src_dir)
+    _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir)
+  for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')):
+    _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir)
+  return 0
+
+
+def main():
+  if len(sys.argv) < 2:
+    print('Usage: copy_cdb_to_output.py <output_dir> ' + \
+        '<target_arch>', file=sys.stderr)
+    return 1
+  return _CopyCDBToOutput(sys.argv[1], sys.argv[2])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/win/gn_meta_sln.py b/src/build/win/gn_meta_sln.py
new file mode 100644
index 0000000..862d278
--- /dev/null
+++ b/src/build/win/gn_meta_sln.py
@@ -0,0 +1,214 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# gn_meta_sln.py
+#   Helper utility to combine GN-generated Visual Studio projects into
+#   a single meta-solution.
+
+from __future__ import print_function
+
+import os
+import glob
+import re
+import sys
+from shutil import copyfile
+
+# Helpers
+def EnsureExists(path):
+    try:
+        os.makedirs(path)
+    except OSError:
+        pass
+
+def WriteLinesToFile(lines, file_name):
+    EnsureExists(os.path.dirname(file_name))
+    with open(file_name, "w") as f:
+        f.writelines(lines)
+
+def ExtractIdg(proj_file_name):
+    result = []
+    with open(proj_file_name) as proj_file:
+        lines = iter(proj_file)
+        for p_line in lines:
+            if "<ItemDefinitionGroup" in p_line:
+                while not "</ItemDefinitionGroup" in p_line:
+                    result.append(p_line)
+                    p_line = lines.next()
+                result.append(p_line)
+                return result
+
+# [ (name, solution_name, vs_version), ... ]
+configs = []
+
+def GetVSVersion(solution_file):
+    with open(solution_file) as f:
+        f.readline()
+        comment = f.readline().strip()
+        return comment[-4:]
+
+# Find all directories that can be used as configs (and record if they have VS
+# files present)
+for root, dirs, files in os.walk("out"):
+    for out_dir in dirs:
+        gn_file = os.path.join("out", out_dir, "build.ninja.d")
+        if os.path.exists(gn_file):
+            solutions = glob.glob(os.path.join("out", out_dir, "*.sln"))
+            for solution in solutions:
+                vs_version = GetVSVersion(solution)
+                configs.append((out_dir, os.path.basename(solution),
+                                vs_version))
+    break
+
+# Every project has a GUID that encodes the type. We only care about C++.
+cpp_type_guid = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+
+# Work around MSBuild limitations by always using a fixed arch.
+hard_coded_arch = "x64"
+
+# name -> [ (config, pathToProject, GUID, arch), ... ]
+all_projects = {}
+project_pattern = (r'Project\("\{' + cpp_type_guid +
+                   r'\}"\) = "([^"]*)", "([^"]*)", "\{([^\}]*)\}"')
+
+# We need something to work with. Typically, this will fail if no GN folders
+# have IDE files
+if len(configs) == 0:
+    print("ERROR: At least one GN directory must have been built with --ide=vs")
+    sys.exit()
+
+# Filter out configs which don't match the name and vs version of the first.
+name = configs[0][1]
+vs_version = configs[0][2]
+
+for config in configs:
+    if config[1] != name or config[2] != vs_version:
+        continue
+
+    sln_lines = iter(open(os.path.join("out", config[0], config[1])))
+    for sln_line in sln_lines:
+        match_obj = re.match(project_pattern, sln_line)
+        if match_obj:
+            proj_name = match_obj.group(1)
+            if proj_name not in all_projects:
+                all_projects[proj_name] = []
+            all_projects[proj_name].append((config[0], match_obj.group(2),
+                                            match_obj.group(3)))
+
+# We need something to work with. Typically, this will fail if no GN folders
+# have IDE files
+if len(all_projects) == 0:
+    print("ERROR: At least one GN directory must have been built with --ide=vs")
+    sys.exit()
+
+# Create a new solution. We arbitrarily use the first config as the GUID source
+# (but we need to match that behavior later, when we copy/generate the project
+# files).
+new_sln_lines = []
+new_sln_lines.append(
+    'Microsoft Visual Studio Solution File, Format Version 12.00\n')
+new_sln_lines.append('# Visual Studio ' + vs_version + '\n')
+for proj_name, proj_configs in all_projects.items():
+    new_sln_lines.append('Project("{' + cpp_type_guid + '}") = "' + proj_name +
+                         '", "' + proj_configs[0][1] + '", "{' +
+                         proj_configs[0][2] + '}"\n')
+    new_sln_lines.append('EndProject\n')
+
+new_sln_lines.append('Global\n')
+new_sln_lines.append(
+    '\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
+for config in configs:
+    match = config[0] + '|' + hard_coded_arch
+    new_sln_lines.append('\t\t' + match + ' = ' + match + '\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append(
+    '\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
+for proj_name, proj_configs in all_projects.items():
+    proj_guid = proj_configs[0][2]
+    for config in configs:
+        match = config[0] + '|' + hard_coded_arch
+        new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
+                           '.ActiveCfg = ' + match + '\n')
+        new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
+                           '.Build.0 = ' + match + '\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('\tGlobalSection(SolutionProperties) = preSolution\n')
+new_sln_lines.append('\t\tHideSolutionNode = FALSE\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('\tGlobalSection(NestedProjects) = preSolution\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('EndGlobal\n')
+
+# Write solution file
+WriteLinesToFile(new_sln_lines, 'out/sln/' + name)
+
+idg_hdr = "<ItemDefinitionGroup Condition=\"'$(Configuration)|$(Platform)'=='"
+
+configuration_template = """    <ProjectConfiguration Include="{config}|{arch}">
+      <Configuration>{config}</Configuration>
+      <Platform>{arch}</Platform>
+    </ProjectConfiguration>
+"""
+
+def FormatProjectConfig(config):
+    return configuration_template.format(
+        config = config[0], arch = hard_coded_arch)
+
+# Now, bring over the project files
+for proj_name, proj_configs in all_projects.items():
+    # Paths to project and filter file in src and dst locations
+    src_proj_path = os.path.join("out", proj_configs[0][0], proj_configs[0][1])
+    dst_proj_path = os.path.join("out", "sln", proj_configs[0][1])
+    src_filter_path = src_proj_path + ".filters"
+    dst_filter_path = dst_proj_path + ".filters"
+
+    # Copy the filter file unmodified
+    EnsureExists(os.path.dirname(dst_proj_path))
+    copyfile(src_filter_path, dst_filter_path)
+
+    preferred_tool_arch = None
+    config_arch = {}
+
+    # Bring over the project file, modified with extra configs
+    with open(src_proj_path) as src_proj_file:
+        proj_lines = iter(src_proj_file)
+        new_proj_lines = []
+        for line in proj_lines:
+            if "<ItemDefinitionGroup" in line:
+                # This is a large group that contains many settings. We need to
+                # replicate it, with conditions so it varies per configuration.
+                idg_lines = []
+                while not "</ItemDefinitionGroup" in line:
+                    idg_lines.append(line)
+                    line = proj_lines.next()
+                idg_lines.append(line)
+                for proj_config in proj_configs:
+                    config_idg_lines = ExtractIdg(os.path.join("out",
+                                                             proj_config[0],
+                                                             proj_config[1]))
+                    match = proj_config[0] + '|' + hard_coded_arch
+                    new_proj_lines.append(idg_hdr + match + "'\">\n")
+                    for idg_line in config_idg_lines[1:]:
+                        new_proj_lines.append(idg_line)
+            elif "ProjectConfigurations" in line:
+                new_proj_lines.append(line)
+                proj_lines.next()
+                proj_lines.next()
+                proj_lines.next()
+                proj_lines.next()
+                for config in configs:
+                    new_proj_lines.append(FormatProjectConfig(config))
+
+            elif "<OutDir" in line:
+                new_proj_lines.append(line.replace(proj_configs[0][0],
+                                                 "$(Configuration)"))
+            elif "<PreferredToolArchitecture" in line:
+                new_proj_lines.append("    <PreferredToolArchitecture>" +
+                                      hard_coded_arch +
+                                      "</PreferredToolArchitecture>\n")
+            else:
+                new_proj_lines.append(line)
+        with open(dst_proj_path, "w") as new_proj:
+            new_proj.writelines(new_proj_lines)
+
+print('Wrote meta solution to out/sln/' + name)
diff --git a/src/build/win/message_compiler.gni b/src/build/win/message_compiler.gni
new file mode 100644
index 0000000..0ddbc1f
--- /dev/null
+++ b/src/build/win/message_compiler.gni
@@ -0,0 +1,87 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win, "This only runs on Windows.")
+
+# Runs mc.exe over a list of sources. The outputs (a header and rc file) are
+# placed in the target gen dir, and compiled.
+#
+# sources
+#   List of message files to process.
+#
+# user_mode_logging (optional bool)
+#   Generates user-mode logging code. Defaults to false (no logging code).
+#
+# compile_generated_code (optional, deafults = true)
+#   If unset or true, the generated code will be compiled and linked into
+#   targets that depend on it. If set to false, the .h and .rc files will only
+#   be generated.
+#
+# deps, public_deps, visibility
+#   Normal meaning.
+template("message_compiler") {
+  if (defined(invoker.compile_generated_code) &&
+      !invoker.compile_generated_code) {
+    compile_generated_code = false
+    action_name = target_name
+  } else {
+    compile_generated_code = true
+    action_name = "${target_name}_mc"
+    source_set_name = target_name
+  }
+
+  action_foreach(action_name) {
+    if (compile_generated_code) {
+      visibility = [ ":$source_set_name" ]
+    } else {
+      forward_variables_from(invoker, [ "visibility" ])
+    }
+
+    script = "//build/win/message_compiler.py"
+
+    outputs = [
+      "$target_gen_dir/{{source_name_part}}.h",
+      "$target_gen_dir/{{source_name_part}}.rc",
+    ]
+
+    args = [
+      # The first argument is the environment file saved to the build
+      # directory. This is required because the Windows toolchain setup saves
+      # the VC paths and such so that running "mc.exe" will work with the
+      # configured toolchain. This file is in the root build dir.
+      "environment.$current_cpu",
+
+      # Where to put the header.
+      "-h",
+      rebase_path(target_gen_dir, root_build_dir),
+
+      # Where to put the .rc file.
+      "-r",
+      rebase_path(target_gen_dir, root_build_dir),
+
+      # Input is Unicode.
+      "-u",
+    ]
+    if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) {
+      args += [ "-um" ]
+    }
+    args += [ "{{source}}" ]
+
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "public_deps",
+                             "sources",
+                           ])
+  }
+
+  if (compile_generated_code) {
+    # Compile the generated rc file.
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      sources = get_target_outputs(":$action_name")
+      deps = [ ":$action_name" ]
+    }
+  }
+}
diff --git a/src/build/win/message_compiler.py b/src/build/win/message_compiler.py
new file mode 100644
index 0000000..51de52f
--- /dev/null
+++ b/src/build/win/message_compiler.py
@@ -0,0 +1,148 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the Microsoft Message Compiler (mc.exe).
+#
+# Usage: message_compiler.py <environment_file> [<args to mc.exe>*]
+
+from __future__ import print_function
+
+import difflib
+import distutils.dir_util
+import filecmp
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def main():
+  env_file, rest = sys.argv[1], sys.argv[2:]
+
+  # Parse some argument flags.
+  header_dir = None
+  resource_dir = None
+  input_file = None
+  for i, arg in enumerate(rest):
+    if arg == '-h' and len(rest) > i + 1:
+      assert header_dir == None
+      header_dir = rest[i + 1]
+    elif arg == '-r' and len(rest) > i + 1:
+      assert resource_dir == None
+      resource_dir = rest[i + 1]
+    elif arg.endswith('.mc') or arg.endswith('.man'):
+      assert input_file == None
+      input_file = arg
+
+  # Copy checked-in outputs to final location.
+  THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+  assert header_dir == resource_dir
+  source = os.path.join(THIS_DIR, "..", "..",
+      "third_party", "win_build_output",
+      re.sub(r'^(?:[^/]+/)?gen/', 'mc/', header_dir))
+  distutils.dir_util.copy_tree(source, header_dir, preserve_times=False)
+
+  # On non-Windows, that's all we can do.
+  if sys.platform != 'win32':
+    return
+
+  # On Windows, run mc.exe on the input and check that its outputs are
+  # identical to the checked-in outputs.
+
+  # Read the environment block from the file. This is stored in the format used
+  # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+  # trailing vs. separator.
+  env_pairs = open(env_file).read()[:-2].split('\0')
+  env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+  extension = os.path.splitext(input_file)[1]
+  if extension in ['.man', '.mc']:
+    # For .man files, mc's output changed significantly from Version 10.0.15063
+    # to Version 10.0.16299.  We should always have the output of the current
+    # default SDK checked in and compare to that. Early out if a different SDK
+    # is active. This also happens with .mc files.
+    # TODO(thakis): Check in new baselines and compare to 16299 instead once
+    # we use the 2017 Fall Creator's Update by default.
+    mc_help = subprocess.check_output(['mc.exe', '/?'], env=env_dict,
+                                      stderr=subprocess.STDOUT, shell=True)
+    version = re.search(br'Message Compiler\s+Version (\S+)', mc_help).group(1)
+    if version != '10.0.15063':
+      return
+
+  # mc writes to stderr, so this explicitly redirects to stdout and eats it.
+  try:
+    tmp_dir = tempfile.mkdtemp()
+    delete_tmp_dir = True
+    if header_dir:
+      rest[rest.index('-h') + 1] = tmp_dir
+      header_dir = tmp_dir
+    if resource_dir:
+      rest[rest.index('-r') + 1] = tmp_dir
+      resource_dir = tmp_dir
+
+    # This needs shell=True to search the path in env_dict for the mc
+    # executable.
+    subprocess.check_output(['mc.exe'] + rest,
+                            env=env_dict,
+                            stderr=subprocess.STDOUT,
+                            shell=True)
+    # We require all source code (in particular, the header generated here) to
+    # be UTF-8. jinja can output the intermediate .mc file in UTF-8 or UTF-16LE.
+    # However, mc.exe only supports Unicode via the -u flag, and it assumes when
+    # that is specified that the input is UTF-16LE (and errors out on UTF-8
+    # files, assuming they're ANSI). Even with -u specified and UTF16-LE input,
+    # it generates an ANSI header, and includes broken versions of the message
+    # text in the comment before the value. To work around this, for any invalid
+    # // comment lines, we simply drop the line in the header after building it.
+    # Also, mc.exe apparently doesn't always write #define lines in
+    # deterministic order, so manually sort each block of #defines.
+    if header_dir:
+      header_file = os.path.join(
+          header_dir, os.path.splitext(os.path.basename(input_file))[0] + '.h')
+      header_contents = []
+      with open(header_file, 'rb') as f:
+        define_block = []  # The current contiguous block of #defines.
+        for line in f.readlines():
+          if line.startswith('//') and '?' in line:
+            continue
+          if line.startswith('#define '):
+            define_block.append(line)
+            continue
+          # On the first non-#define line, emit the sorted preceding #define
+          # block.
+          header_contents += sorted(define_block, key=lambda s: s.split()[-1])
+          define_block = []
+          header_contents.append(line)
+        # If the .h file ends with a #define block, flush the final block.
+        header_contents += sorted(define_block, key=lambda s: s.split()[-1])
+      with open(header_file, 'wb') as f:
+        f.write(''.join(header_contents))
+
+    # mc.exe invocation and post-processing are complete, now compare the output
+    # in tmp_dir to the checked-in outputs.
+    diff = filecmp.dircmp(tmp_dir, source)
+    if diff.diff_files or set(diff.left_list) != set(diff.right_list):
+      print('mc.exe output different from files in %s, see %s' % (source,
+                                                                  tmp_dir))
+      diff.report()
+      for f in diff.diff_files:
+        if f.endswith('.bin'): continue
+        fromfile = os.path.join(source, f)
+        tofile = os.path.join(tmp_dir, f)
+        print(''.join(
+            difflib.unified_diff(
+                open(fromfile, 'U').readlines(),
+                open(tofile, 'U').readlines(), fromfile, tofile)))
+      delete_tmp_dir = False
+      sys.exit(1)
+  except subprocess.CalledProcessError as e:
+    print(e.output)
+    sys.exit(e.returncode)
+  finally:
+    if os.path.exists(tmp_dir) and delete_tmp_dir:
+      shutil.rmtree(tmp_dir)
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/win/reorder-imports.py b/src/build/win/reorder-imports.py
new file mode 100755
index 0000000..ee27ed1
--- /dev/null
+++ b/src/build/win/reorder-imports.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..',
+                                'third_party', 'pefile'))
+import pefile
+
+def reorder_imports(input_dir, output_dir, architecture):
+  """Swap chrome_elf.dll to be the first import of chrome.exe.
+  Also copy over any related files that might be needed
+  (pdbs, manifests etc.).
+  """
+  # TODO(thakis): See if there is a reliable way to write the
+  # correct executable in the first place, so that this script
+  # only needs to verify that and not write a whole new exe.
+
+  input_image = os.path.join(input_dir, 'chrome.exe')
+  output_image = os.path.join(output_dir, 'chrome.exe')
+
+  # pefile mmap()s the whole executable, and then parses parts of
+  # it into python data structures for ease of processing.
+  # To write the file again, only the mmap'd data is written back,
+  # so modifying the parsed python objects generally has no effect.
+  # However, parsed raw data ends up in pe.Structure instances,
+  # and these all get serialized back when the file gets written.
+  # So things that are in a Structure must have their data set
+  # through the Structure, while other data must bet set through
+  # the set_bytes_*() methods.
+  pe = pefile.PE(input_image, fast_load=True)
+  if architecture == 'x64' or architecture == 'arm64':
+    assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS
+  else:
+    assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE
+
+  pe.parse_data_directories(directories=[
+      pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT']])
+
+  found_elf = False
+  for i, peimport in enumerate(pe.DIRECTORY_ENTRY_IMPORT):
+    if peimport.dll.lower() == 'chrome_elf.dll':
+      assert not found_elf, 'only one chrome_elf.dll import expected'
+      found_elf = True
+      if i > 0:
+        swap = pe.DIRECTORY_ENTRY_IMPORT[0]
+
+        # Morally we want to swap peimport.struct and swap.struct here,
+        # but the pe module doesn't expose a public method on Structure
+        # to get all data of a Structure without explicitly listing all
+        # field names.
+        # NB: OriginalFirstThunk and Characteristics are an union both at
+        # offset 0, handling just one of them is enough.
+        peimport.struct.OriginalFirstThunk, swap.struct.OriginalFirstThunk = \
+            swap.struct.OriginalFirstThunk, peimport.struct.OriginalFirstThunk
+        peimport.struct.TimeDateStamp, swap.struct.TimeDateStamp = \
+            swap.struct.TimeDateStamp, peimport.struct.TimeDateStamp
+        peimport.struct.ForwarderChain, swap.struct.ForwarderChain = \
+            swap.struct.ForwarderChain, peimport.struct.ForwarderChain
+        peimport.struct.Name, swap.struct.Name = \
+            swap.struct.Name, peimport.struct.Name
+        peimport.struct.FirstThunk, swap.struct.FirstThunk = \
+            swap.struct.FirstThunk, peimport.struct.FirstThunk
+  assert found_elf, 'chrome_elf.dll import not found'
+
+  pe.write(filename=output_image)
+
+  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+  return 0
+
+
+def main(argv):
+  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+      metavar='DIR')
+  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+      metavar='DIR')
+  parser.add_option('-a', '--arch', help='architecture of build (optional)',
+      default='ia32')
+  opts, args = parser.parse_args()
+
+  if not opts.input or not opts.output:
+    parser.error('Please provide and input and output directory')
+  return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/win/require_administrator.manifest b/src/build/win/require_administrator.manifest
new file mode 100644
index 0000000..4142e73
--- /dev/null
+++ b/src/build/win/require_administrator.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+  <security>
+    <requestedPrivileges>
+      <requestedExecutionLevel level="requireAdministrator" uiAccess="false"></requestedExecutionLevel>
+    </requestedPrivileges>
+  </security>
+</trustInfo></assembly>
diff --git a/src/build/win/segment_heap.manifest b/src/build/win/segment_heap.manifest
new file mode 100644
index 0000000..fc930a4
--- /dev/null
+++ b/src/build/win/segment_heap.manifest
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <application xmlns="urn:schemas-microsoft-com:asm.v3">
+    <windowsSettings>
+      <heapType xmlns="http://schemas.microsoft.com/SMI/2020/WindowsSettings">SegmentHeap</heapType>
+    </windowsSettings>
+  </application>
+</assembly>
diff --git a/src/build/win/set_appcontainer_acls.py b/src/build/win/set_appcontainer_acls.py
new file mode 100755
index 0000000..4a4e616
--- /dev/null
+++ b/src/build/win/set_appcontainer_acls.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Sets the app container ACLs on directory."""
+
+import os
+import argparse
+import sys
+
+SRC_DIR = os.path.dirname(
+    os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+sys.path.append(os.path.join(SRC_DIR, 'testing', 'scripts'))
+
+import common
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Sets App Container ACL on a directory.')
+  parser.add_argument('--stamp',
+                      required=False,
+                      help='Touch this stamp file on success.')
+  parser.add_argument('--dir', required=True, help='Set ACL on this directory.')
+  #  parser.add_argument('--fail', required=True, help='Argument to fail.')
+  args = parser.parse_args()
+
+  common.set_lpac_acls(os.path.abspath(args.dir))
+  if args.stamp:
+    open(args.stamp, 'w').close()  # Update mtime on stamp file.
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/win/use_ansi_codes.py b/src/build/win/use_ansi_codes.py
new file mode 100755
index 0000000..5951c2a
--- /dev/null
+++ b/src/build/win/use_ansi_codes.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints if the the terminal is likely to understand ANSI codes."""
+
+from __future__ import print_function
+
+import os
+
+# Add more terminals here as needed.
+print('ANSICON' in os.environ)
diff --git a/src/build/write_build_date_header.py b/src/build/write_build_date_header.py
new file mode 100755
index 0000000..7738828
--- /dev/null
+++ b/src/build/write_build_date_header.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Takes a timestamp and writes it in as readable text to a .h file."""
+
+import argparse
+import datetime
+import os
+import sys
+
+
+def main():
+  argument_parser = argparse.ArgumentParser()
+  argument_parser.add_argument('output_file', help='The file to write to')
+  argument_parser.add_argument('timestamp')
+  args = argument_parser.parse_args()
+
+  date = datetime.datetime.utcfromtimestamp(int(args.timestamp))
+  output = ('// Generated by //build/write_build_date_header.py\n'
+           '#ifndef BUILD_DATE\n'
+           '#define BUILD_DATE "{:%b %d %Y %H:%M:%S}"\n'
+           '#endif // BUILD_DATE\n'.format(date))
+
+  current_contents = ''
+  if os.path.isfile(args.output_file):
+    with open(args.output_file, 'r') as current_file:
+      current_contents = current_file.read()
+
+  if current_contents != output:
+    with open(args.output_file, 'w') as output_file:
+      output_file.write(output)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/write_buildflag_header.py b/src/build/write_buildflag_header.py
new file mode 100755
index 0000000..47b9a03
--- /dev/null
+++ b/src/build/write_buildflag_header.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This writes headers for build flags. See buildflag_header.gni for usage of
+# this system as a whole.
+#
+# The parameters are passed in a response file so we don't have to worry
+# about command line lengths. The name of the response file is passed on the
+# command line.
+#
+# The format of the response file is:
+#    [--flags <list of one or more flag values>]
+
+import optparse
+import os
+import shlex
+import sys
+
+
+class Options:
+  def __init__(self, output, rulename, header_guard, flags):
+    self.output = output
+    self.rulename = rulename
+    self.header_guard = header_guard
+    self.flags = flags
+
+
+def GetOptions():
+  parser = optparse.OptionParser()
+  parser.add_option('--output', help="Output header name inside --gen-dir.")
+  parser.add_option('--rulename',
+                    help="Helpful name of build rule for including in the " +
+                         "comment at the top of the file.")
+  parser.add_option('--gen-dir',
+                    help="Path to root of generated file directory tree.")
+  parser.add_option('--definitions',
+                    help="Name of the response file containing the flags.")
+  cmdline_options, cmdline_flags = parser.parse_args()
+
+  # Compute header guard by replacing some chars with _ and upper-casing.
+  header_guard = cmdline_options.output.upper()
+  header_guard = \
+      header_guard.replace('/', '_').replace('\\', '_').replace('.', '_')
+  header_guard += '_'
+
+  # The actual output file is inside the gen dir.
+  output = os.path.join(cmdline_options.gen_dir, cmdline_options.output)
+
+  # Definition file in GYP is newline separated, in GN they are shell formatted.
+  # shlex can parse both of these.
+  with open(cmdline_options.definitions, 'r') as def_file:
+    defs = shlex.split(def_file.read())
+  flags_index = defs.index('--flags')
+
+  # Everything after --flags are flags. true/false are remapped to 1/0,
+  # everything else is passed through.
+  flags = []
+  for flag in defs[flags_index + 1 :]:
+    equals_index = flag.index('=')
+    key = flag[:equals_index]
+    value = flag[equals_index + 1:]
+
+    # Canonicalize and validate the value.
+    if value == 'true':
+      value = '1'
+    elif value == 'false':
+      value = '0'
+    flags.append((key, str(value)))
+
+  return Options(output=output,
+                 rulename=cmdline_options.rulename,
+                 header_guard=header_guard,
+                 flags=flags)
+
+
+def WriteHeader(options):
+  with open(options.output, 'w') as output_file:
+    output_file.write("// Generated by build/write_buildflag_header.py\n")
+    if options.rulename:
+      output_file.write('// From "' + options.rulename + '"\n')
+
+    output_file.write('\n#ifndef %s\n' % options.header_guard)
+    output_file.write('#define %s\n\n' % options.header_guard)
+    output_file.write('#include "build/buildflag.h"\n\n')
+
+    for pair in options.flags:
+      output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair)
+
+    output_file.write('\n#endif  // %s\n' % options.header_guard)
+
+
+if os.name == 'nt':
+  major, minor, build, platform, service_pack = sys.getwindowsversion()
+  # Windows 10 will be 6.2 on Python 2 and 10.0 on Python 3. This check
+  # handles both.
+  if major < 6 or (major == 6 and minor < 2):
+    raise Exception(
+        'Unsupported OS. Building Chromium requires Windows 10. %s detected.' %
+        str(sys.getwindowsversion()))
+options = GetOptions()
+WriteHeader(options)
diff --git a/src/build/xcode_binaries.yaml b/src/build/xcode_binaries.yaml
new file mode 100644
index 0000000..779b3ac
--- /dev/null
+++ b/src/build/xcode_binaries.yaml
@@ -0,0 +1,57 @@
+# This yaml file is used to package binaries from Xcode.app.
+# To use this:
+#   1) Move Xcode.app to the same directory as this file,
+#      and rename Xcode.app to xcode_binaries. Or make a symlink:
+#      $ rm -rf build/xcode_binaries && \
+#        ln -s /Applications/Xcode.app build/xcode_binaries
+#   2) Call `cipd create --pkg-def build/xcode_binaries.yaml`
+# To deploy the newly created cipd package across the fleet, modify
+# mac_toolchain.py to point to the new cipd hash.
+#
+# Note that runhooks extracts the cipd file to build/mac/xcode_binaries -- your
+# build/xcode_binaries you're creating in step 1 above isn't used as part of
+# the Chromium build, build/mac_files/xcode_binaries is. So you need to
+# `runhooks` after updating the hash in mac_toolchain.py like everyone else to
+# get the new bits for your local build.
+#
+# The ACLs for this package are determined by the directory structure. The
+# nomenclature mirrors that of the hermetic toolchain to avoid ACL duplication.
+package: infra_internal/ios/xcode/xcode_binaries/mac-amd64
+description: A hermetic deployment of all Xcode binaries used to build Chromium.
+root: "xcode_binaries"
+data:
+  - dir: Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
+  - dir: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/share/bison
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/bison
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gm4
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gperf
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/libtool
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/lipo
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-nm
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-objdump
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-otool
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-size
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/mig
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/nm
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool-classic
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/size
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/size-classic
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/strip
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libLTO.dylib
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libcodedirectory.dylib
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libswiftDemangle.dylib
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libtapi.dylib
+  - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/libexec/migcom
+  - file: Contents/Developer/usr/bin/atos
+  - file: Contents/Resources/English.lproj/License.rtf
+  - file: Contents/Resources/LicenseInfo.plist
+  - dir: Contents/SharedFrameworks/CoreSymbolicationDT.framework
+  - dir: Contents/SharedFrameworks/DebugSymbolsDT.framework
+  - dir: Contents/SharedFrameworks/MallocStackLoggingDT.framework
+  - dir: Contents/SharedFrameworks/SymbolicationDT.framework
+  - file: Contents/version.plist
+# llvm-size and size are run on swarming, those are symlinked to out of
+# isolated tree and produce invdalid symlink if we don't use copy mode here.
+install_mode: copy
diff --git a/src/build/apk_test.gypi b/src/build_gyp/apk_test.gypi
similarity index 100%
rename from src/build/apk_test.gypi
rename to src/build_gyp/apk_test.gypi
diff --git a/src/build/common.gypi b/src/build_gyp/common.gypi
similarity index 100%
rename from src/build/common.gypi
rename to src/build_gyp/common.gypi
diff --git a/src/build/filename_rules.gypi b/src/build_gyp/filename_rules.gypi
similarity index 100%
rename from src/build/filename_rules.gypi
rename to src/build_gyp/filename_rules.gypi
diff --git a/src/build/protoc.gypi b/src/build_gyp/protoc.gypi
similarity index 100%
rename from src/build/protoc.gypi
rename to src/build_gyp/protoc.gypi
diff --git a/src/build/win_precompile.gypi b/src/build_gyp/win_precompile.gypi
similarity index 100%
rename from src/build/win_precompile.gypi
rename to src/build_gyp/win_precompile.gypi
diff --git a/src/build_overrides/build.gni b/src/build_overrides/build.gni
new file mode 100644
index 0000000..560cb57
--- /dev/null
+++ b/src/build_overrides/build.gni
@@ -0,0 +1,22 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
+
+declare_args() {
+  # Android 32-bit non-component, non-clang builds cannot have symbol_level=2
+  # due to 4GiB file size limit, see https://crbug.com/648948.
+  # Set this flag to true to skip the assertion.
+  ignore_elf32_limitations = false
+}
diff --git a/src/cobalt/CHANGELOG.md b/src/cobalt/CHANGELOG.md
index 48dcdc8..5cd14df 100644
--- a/src/cobalt/CHANGELOG.md
+++ b/src/cobalt/CHANGELOG.md
@@ -2,6 +2,33 @@
 
 This document records all notable changes made to Cobalt since the last release.
 
+## Version 22
+ - **C++14 is required to compile Cobalt 22.**
+   Cobalt code now requires C++14-compatible toolchains to compile. This
+   requirement helps us stay updated with C++ standards and integrate
+   third-party libraries much easier.
+
+ - **SpiderMonkey(mozjs-45) JavaScript Engine library is removed.**
+   As stated last year, V8 should be the choice of JavaScript engine on
+   every platform. SpiderMonkey is now completely removed.
+
+ - **V8 JavaScript Engine is rebased to version v8.8**
+   We rebased V8 from v7.7 in Cobalt 21 to v8.8 in Cobalt 22. V8 8.8 provides a
+   new feature, pointer compression, that reduces JavaScript heap memory usage by
+   60% on 64-bit platforms(arm64 and x64), saving about 5MB on startup and more
+   than 8MB in active sessions. This feature is turned on automatically when a
+   platform uses 64-bit CPU architecture.
+
+ - **window.navigator.onLine property and its change events are added.**
+   To improve user experience during network connect/disconnect situations
+   and enable auto-reconnect, Cobalt added web APIs including Navigator.onLine
+   property and its change events. To enable using the property and events
+   on a platform, the platform's Starboard must implement these new Starboard
+   APIs:
+   SbSystemNetworkIsDisconnected(),
+   kSbEventTypeOsNetworkDisconnected Starboard event,
+   kSbEventTypeOsNetworkConnected Starboard event.
+
 ## Version 21
 
  - **SpiderMonkey(mozjs-45) JavaScript Engine is no longer supported.**
@@ -142,6 +169,12 @@
    Platforms can provide javascript code caching by implementing
    CobaltExtensionJavaScriptCacheApi.
 
+
+ - **Added support for UrlFetcher observer.**
+
+   Platforms can implement UrlFetcher observer for performance tracing by
+   implementing CobaltExtensionUrlFetcherObserverApi.
+
 ## Version 20
 
  - **Support for QUIC and SPDY is now enabled.**
diff --git a/src/cobalt/audio/audio_context.cc b/src/cobalt/audio/audio_context.cc
index 6c4808e..4ea6847 100644
--- a/src/cobalt/audio/audio_context.cc
+++ b/src/cobalt/audio/audio_context.cc
@@ -15,6 +15,7 @@
 #include "cobalt/audio/audio_context.h"
 
 #include <memory>
+#include <utility>
 
 #include "base/callback.h"
 #include "cobalt/base/polymorphic_downcast.h"
@@ -109,25 +110,23 @@
 }
 
 void AudioContext::DecodeAudioData(
-    script::EnvironmentSettings* settings,
     const script::Handle<script::ArrayBuffer>& audio_data,
     const DecodeSuccessCallbackArg& success_handler) {
   DCHECK(main_message_loop_->BelongsToCurrentThread());
 
   std::unique_ptr<DecodeCallbackInfo> info(
-      new DecodeCallbackInfo(settings, audio_data, this, success_handler));
+      new DecodeCallbackInfo(audio_data, this, success_handler));
   DecodeAudioDataInternal(std::move(info));
 }
 
 void AudioContext::DecodeAudioData(
-    script::EnvironmentSettings* settings,
     const script::Handle<script::ArrayBuffer>& audio_data,
     const DecodeSuccessCallbackArg& success_handler,
     const DecodeErrorCallbackArg& error_handler) {
   DCHECK(main_message_loop_->BelongsToCurrentThread());
 
-  std::unique_ptr<DecodeCallbackInfo> info(new DecodeCallbackInfo(
-      settings, audio_data, this, success_handler, error_handler));
+  std::unique_ptr<DecodeCallbackInfo> info(
+      new DecodeCallbackInfo(audio_data, this, success_handler, error_handler));
   DecodeAudioDataInternal(std::move(info));
 }
 
@@ -138,14 +137,13 @@
   const int callback_id = next_callback_id_++;
   CHECK(pending_decode_callbacks_.find(callback_id) ==
         pending_decode_callbacks_.end());
-  script::Handle<script::ArrayBuffer> audio_data =
-      script::Handle<script::ArrayBuffer>(info->audio_data_reference);
+  const std::string& audio_data = info->audio_data;
   pending_decode_callbacks_[callback_id] = info.release();
 
   AsyncAudioDecoder::DecodeFinishCallback decode_callback = base::Bind(
       &AudioContext::DecodeFinish, base::Unretained(this), callback_id);
-  audio_decoder_.AsyncDecode(static_cast<const uint8*>(audio_data->Data()),
-                             audio_data->ByteLength(), decode_callback);
+  audio_decoder_.AsyncDecode(reinterpret_cast<const uint8*>(audio_data.data()),
+                             audio_data.size(), decode_callback);
 }
 
 // Success callback and error callback should be scheduled to run on the main
diff --git a/src/cobalt/audio/audio_context.h b/src/cobalt/audio/audio_context.h
index 5245af8..01e7b6f 100644
--- a/src/cobalt/audio/audio_context.h
+++ b/src/cobalt/audio/audio_context.h
@@ -124,11 +124,9 @@
   // ArrayBuffer can, for example, be loaded from an XMLHttpRequest's response
   // attribute after setting the responseType to "arraybuffer". Audio file data
   // can be in any of the formats supported by the audio element.
-  void DecodeAudioData(script::EnvironmentSettings* settings,
-                       const script::Handle<script::ArrayBuffer>& audio_data,
+  void DecodeAudioData(const script::Handle<script::ArrayBuffer>& audio_data,
                        const DecodeSuccessCallbackArg& success_handler);
-  void DecodeAudioData(script::EnvironmentSettings* settings,
-                       const script::Handle<script::ArrayBuffer>& audio_data,
+  void DecodeAudioData(const script::Handle<script::ArrayBuffer>& audio_data,
                        const DecodeSuccessCallbackArg& success_handler,
                        const DecodeErrorCallbackArg& error_handler);
 
@@ -155,27 +153,26 @@
 
  private:
   struct DecodeCallbackInfo {
-    DecodeCallbackInfo(script::EnvironmentSettings* settings,
-                       const script::Handle<script::ArrayBuffer>& data_handle,
+    DecodeCallbackInfo(const script::Handle<script::ArrayBuffer>& data_handle,
                        AudioContext* const audio_context,
                        const DecodeSuccessCallbackArg& success_handler)
-        : env_settings(settings),
-          audio_data_reference(audio_context, data_handle),
+        : audio_data(reinterpret_cast<const char*>(data_handle->Data()),
+                     data_handle->ByteLength()),
           success_callback(audio_context, success_handler) {}
 
-    DecodeCallbackInfo(script::EnvironmentSettings* settings,
-                       const script::Handle<script::ArrayBuffer>& data_handle,
+    DecodeCallbackInfo(const script::Handle<script::ArrayBuffer>& data_handle,
                        AudioContext* const audio_context,
                        const DecodeSuccessCallbackArg& success_handler,
                        const DecodeErrorCallbackArg& error_handler)
-        : env_settings(settings),
-          audio_data_reference(audio_context, data_handle),
+        : audio_data(reinterpret_cast<const char*>(data_handle->Data()),
+                     data_handle->ByteLength()),
           success_callback(audio_context, success_handler) {
       error_callback.emplace(audio_context, error_handler);
     }
 
-    script::EnvironmentSettings* env_settings;
-    script::ScriptValue<script::ArrayBuffer>::Reference audio_data_reference;
+    DecodeCallbackInfo(const DecodeCallbackInfo&) = delete;
+
+    std::string audio_data;
     DecodeSuccessCallbackReference success_callback;
     base::Optional<DecodeErrorCallbackReference> error_callback;
   };
diff --git a/src/cobalt/audio/audio_context.idl b/src/cobalt/audio/audio_context.idl
index f62770a..4069974 100644
--- a/src/cobalt/audio/audio_context.idl
+++ b/src/cobalt/audio/audio_context.idl
@@ -23,9 +23,9 @@
   readonly attribute float sampleRate;
   readonly attribute double currentTime;
 
-  [CallWith=EnvironmentSettings] void decodeAudioData(
-      ArrayBuffer audioData, DecodeSuccessCallback successCallback,
-      optional DecodeErrorCallback errorCallback);
+  void decodeAudioData(ArrayBuffer audioData,
+                       DecodeSuccessCallback successCallback,
+                       optional DecodeErrorCallback errorCallback);
 
   // AudioNode creation
   [CallWith=EnvironmentSettings] AudioBufferSourceNode createBufferSource();
diff --git a/src/cobalt/audio/audio_file_reader_wav.cc b/src/cobalt/audio/audio_file_reader_wav.cc
index 6e75c56..4000dfe 100644
--- a/src/cobalt/audio/audio_file_reader_wav.cc
+++ b/src/cobalt/audio/audio_file_reader_wav.cc
@@ -199,9 +199,12 @@
 #if SB_IS(LITTLE_ENDIAN)
   if ((!is_src_sample_in_float && sample_type_ == kSampleTypeInt16) ||
       (is_src_sample_in_float && sample_type_ == kSampleTypeFloat32)) {
+    SB_LOG(INFO) << "Copying " << size << " bytes of wav data.";
     SbMemoryCopy(audio_bus_->interleaved_data(), data + offset, size);
   } else if (!is_src_sample_in_float && sample_type_ == kSampleTypeFloat32) {
     // Convert from int16 to float32
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " samples from int16 to float32.";
     const int16* src_samples = reinterpret_cast<const int16*>(data + offset);
     float* dest_samples =
         reinterpret_cast<float*>(audio_bus_->interleaved_data());
@@ -213,6 +216,8 @@
     }
   } else {
     // Convert from float32 to int16
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " samples from float32 to int16.";
     const float* src_samples = reinterpret_cast<const float*>(data + offset);
     int16* dest_samples =
         reinterpret_cast<int16*>(audio_bus_->interleaved_data());
@@ -225,6 +230,8 @@
   }
 #else   // SB_IS(LITTLE_ENDIAN)
   if (!is_src_sample_in_float && sample_type_ == kSampleTypeInt16) {
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " int16 samples from little endian to big endian.";
     const uint8_t* src_samples = data + offset;
     int16* dest_samples =
         reinterpret_cast<int16*>(audio_bus_->interleaved_data());
@@ -234,6 +241,8 @@
       ++dest_samples;
     }
   } else if (is_src_sample_in_float && sample_type_ == kSampleTypeFloat32) {
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " float32 samples from little endian to big endian.";
     const uint8_t* src_samples = data + offset;
     float* dest_samples =
         reinterpret_cast<float*>(audio_bus_->interleaved_data());
@@ -245,6 +254,8 @@
     }
   } else if (!is_src_sample_in_float && sample_type_ == kSampleTypeFloat32) {
     // Convert from int16 to float32
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " int16 samples in little endian to float32 in big endian.";
     const uint8_t* src_samples = data + offset;
     float* dest_samples =
         reinterpret_cast<float*>(audio_bus_->interleaved_data());
@@ -256,6 +267,8 @@
     }
   } else {
     // Convert from float32 to int16
+    SB_LOG(INFO) << "Converting " << number_of_frames_ * number_of_channels_
+                 << " float32 samples in little endian to int16 in big endian.";
     const uint8_t* src_samples = data + offset;
     int16* dest_samples =
         reinterpret_cast<int16*>(audio_bus_->interleaved_data());
diff --git a/src/cobalt/base/base.gyp b/src/cobalt/base/base.gyp
index 4e4962b..920a39c 100644
--- a/src/cobalt/base/base.gyp
+++ b/src/cobalt/base/base.gyp
@@ -42,6 +42,7 @@
         'c_val_collection_timer_stats.h',
         'c_val_time_interval_entry_stats.h',
         'c_val_time_interval_timer_stats.h',
+        'date_time_configuration_changed_event.h',
         'deep_link_event.h',
         'do_main.h',
         'do_main_starboard.h',
diff --git a/src/cobalt/base/date_time_configuration_changed_event.h b/src/cobalt/base/date_time_configuration_changed_event.h
new file mode 100644
index 0000000..35ca40f
--- /dev/null
+++ b/src/cobalt/base/date_time_configuration_changed_event.h
@@ -0,0 +1,35 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_BASE_DATE_TIME_CONFIGURATION_CHANGED_EVENT_H_
+#define COBALT_BASE_DATE_TIME_CONFIGURATION_CHANGED_EVENT_H_
+
+#include <string>
+
+#include "base/compiler_specific.h"
+#include "base/strings/string_util.h"
+#include "cobalt/base/event.h"
+
+namespace base {
+
+class DateTimeConfigurationChangedEvent : public Event {
+ public:
+  DateTimeConfigurationChangedEvent() {}
+
+  BASE_EVENT_SUBCLASS(DateTimeConfigurationChangedEvent);
+};
+
+}  // namespace base
+
+#endif  // COBALT_BASE_DATE_TIME_CONFIGURATION_CHANGED_EVENT_H_
diff --git a/src/cobalt/base/wrap_main_starboard.h b/src/cobalt/base/wrap_main_starboard.h
index 23a2d91..c02a6eb 100644
--- a/src/cobalt/base/wrap_main_starboard.h
+++ b/src/cobalt/base/wrap_main_starboard.h
@@ -146,6 +146,9 @@
     case kSbEventTypeOsNetworkDisconnected:
     case kSbEventTypeOsNetworkConnected:
 #endif
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+    case kSbEventDateTimeConfigurationChanged:
+#endif
       event_function(event);
       break;
   }
diff --git a/src/cobalt/black_box_tests/black_box_tests.py b/src/cobalt/black_box_tests/black_box_tests.py
index f1cde49..43cf1a7 100644
--- a/src/cobalt/black_box_tests/black_box_tests.py
+++ b/src/cobalt/black_box_tests/black_box_tests.py
@@ -61,7 +61,7 @@
     'allow_eval',
     'disable_eval_with_csp',
     'persistent_cookie',
-    # 'web_debugger',
+    'web_debugger',
     'web_platform_tests',
 ]
 # These tests can only be run on platforms whose app launcher can send deep
diff --git a/src/cobalt/browser/application.cc b/src/cobalt/browser/application.cc
index 7bcacc5..03f20f9 100644
--- a/src/cobalt/browser/application.cc
+++ b/src/cobalt/browser/application.cc
@@ -41,6 +41,7 @@
 #include "cobalt/base/accessibility_settings_changed_event.h"
 #include "cobalt/base/accessibility_text_to_speech_settings_changed_event.h"
 #include "cobalt/base/cobalt_paths.h"
+#include "cobalt/base/date_time_configuration_changed_event.h"
 #include "cobalt/base/deep_link_event.h"
 #include "cobalt/base/get_application_key.h"
 #include "cobalt/base/init_cobalt.h"
@@ -851,7 +852,7 @@
 
   app_status_ = (should_preload ? kConcealedAppStatus : kRunningAppStatus);
 
-// Register event callbacks.
+  // Register event callbacks.
   window_size_change_event_callback_ = base::Bind(
       &Application::OnWindowSizeChangedEvent, base::Unretained(this));
   event_dispatcher_.AddEventCallback(base::WindowSizeChangedEvent::TypeId(),
@@ -900,6 +901,15 @@
       base::Bind(&Application::OnWindowOnOfflineEvent, base::Unretained(this));
   event_dispatcher_.AddEventCallback(base::WindowOnOfflineEvent::TypeId(),
                                      on_window_on_offline_event_callback_);
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  on_date_time_configuration_changed_event_callback_ =
+      base::Bind(&Application::OnDateTimeConfigurationChangedEvent,
+                 base::Unretained(this));
+  event_dispatcher_.AddEventCallback(
+      base::DateTimeConfigurationChangedEvent::TypeId(),
+      on_date_time_configuration_changed_event_callback_);
+#endif
+
 #if defined(ENABLE_WEBDRIVER)
 #if defined(ENABLE_DEBUG_COMMAND_LINE_SWITCHES)
   bool create_webdriver_module =
@@ -958,7 +968,7 @@
   memory_tracker_tool_.reset(NULL);
 #endif  // defined(ENABLE_DEBUGGER) && defined(STARBOARD_ALLOWS_MEMORY_TRACKING)
 
-// Unregister event callbacks.
+  // Unregister event callbacks.
   event_dispatcher_.RemoveEventCallback(base::WindowSizeChangedEvent::TypeId(),
                                         window_size_change_event_callback_);
 #if SB_API_VERSION >= 12 || SB_HAS(ON_SCREEN_KEYBOARD)
@@ -984,6 +994,11 @@
       base::AccessibilityCaptionSettingsChangedEvent::TypeId(),
       on_caption_settings_changed_event_callback_);
 #endif  // SB_API_VERSION >= 12 || SB_HAS(CAPTIONS)
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  event_dispatcher_.RemoveEventCallback(
+      base::DateTimeConfigurationChangedEvent::TypeId(),
+      on_date_time_configuration_changed_event_callback_);
+#endif
 
   app_status_ = kShutDownAppStatus;
 }
@@ -1121,6 +1136,11 @@
       DispatchEventInternal(new base::WindowOnOnlineEvent());
       break;
 #endif
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+    case kSbEventDateTimeConfigurationChanged:
+      DispatchEventInternal(new base::DateTimeConfigurationChangedEvent());
+      break;
+#endif
     // Explicitly list unhandled cases here so that the compiler can give a
     // warning when a value is added, but not handled.
     case kSbEventTypeInput:
@@ -1275,6 +1295,9 @@
     case kSbEventTypeOsNetworkDisconnected:
     case kSbEventTypeOsNetworkConnected:
 #endif
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+    case kSbEventDateTimeConfigurationChanged:
+#endif
       NOTREACHED() << "Unexpected event type: " << event_type;
       return;
   }
@@ -1361,6 +1384,17 @@
       base::polymorphic_downcast<const base::WindowOnOfflineEvent*>(event));
 }
 
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+void Application::OnDateTimeConfigurationChangedEvent(
+    const base::Event* event) {
+  TRACE_EVENT0("cobalt::browser",
+               "Application::OnDateTimeConfigurationChangedEvent()");
+  browser_module_->OnDateTimeConfigurationChanged(
+      base::polymorphic_downcast<
+          const base::DateTimeConfigurationChangedEvent*>(event));
+}
+#endif
+
 void Application::WebModuleCreated() {
   TRACE_EVENT0("cobalt::browser", "Application::WebModuleCreated()");
   DispatchDeepLinkIfNotConsumed();
diff --git a/src/cobalt/browser/application.h b/src/cobalt/browser/application.h
index ec833fe..9570238 100644
--- a/src/cobalt/browser/application.h
+++ b/src/cobalt/browser/application.h
@@ -88,6 +88,10 @@
   void OnWindowOnOnlineEvent(const base::Event* event);
   void OnWindowOnOfflineEvent(const base::Event* event);
 
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  void OnDateTimeConfigurationChangedEvent(const base::Event* event);
+#endif
+
   // Called when a navigation occurs in the BrowserModule.
   void WebModuleCreated();
 
@@ -111,7 +115,7 @@
   // Main components of the Cobalt browser application.
   std::unique_ptr<BrowserModule> browser_module_;
 
-// Event callbacks.
+  // Event callbacks.
   base::EventCallback window_size_change_event_callback_;
 #if SB_API_VERSION >= 12 || SB_HAS(ON_SCREEN_KEYBOARD)
   base::EventCallback on_screen_keyboard_shown_event_callback_;
@@ -128,6 +132,9 @@
   base::EventCallback on_window_on_online_event_callback_;
   base::EventCallback on_window_on_offline_event_callback_;
 #endif
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  base::EventCallback on_date_time_configuration_changed_event_callback_;
+#endif
 
   // Thread checkers to ensure that callbacks for network and application events
   // always occur on the same thread.
diff --git a/src/cobalt/browser/browser.gyp b/src/cobalt/browser/browser.gyp
index dc3fb4d..1422e02 100644
--- a/src/cobalt/browser/browser.gyp
+++ b/src/cobalt/browser/browser.gyp
@@ -114,6 +114,7 @@
         '<(DEPTH)/cobalt/css_parser/css_parser.gyp:css_parser',
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
         '<(DEPTH)/cobalt/dom_parser/dom_parser.gyp:dom_parser',
+        '<(DEPTH)/cobalt/encoding/encoding.gyp:text_encoding',
         '<(DEPTH)/cobalt/fetch/fetch.gyp:fetch',
         '<(DEPTH)/cobalt/h5vcc/h5vcc.gyp:h5vcc',
         '<(DEPTH)/cobalt/input/input.gyp:input',
@@ -138,6 +139,7 @@
         '<(DEPTH)/cobalt/xhr/xhr.gyp:xhr',
         '<(DEPTH)/net/net.gyp:net',
         '<(DEPTH)/nb/nb.gyp:nb',
+        '<(DEPTH)/third_party/icu/icu.gyp:icui18n',
         '<(DEPTH)/url/url.gyp:url',
         'browser_bindings.gyp:bindings',
         '<(cobalt_webapi_extension_gyp_target)',
diff --git a/src/cobalt/browser/browser_bindings_gen.gyp b/src/cobalt/browser/browser_bindings_gen.gyp
index 6d48b70..2fdb5b3 100644
--- a/src/cobalt/browser/browser_bindings_gen.gyp
+++ b/src/cobalt/browser/browser_bindings_gen.gyp
@@ -126,6 +126,7 @@
         '../dom/mutation_record.idl',
         '../dom/named_node_map.idl',
         '../dom/navigator.idl',
+        '../dom/navigator_ua_data.idl',
         '../dom/node.idl',
         '../dom/node_list.idl',
         '../dom/on_error_event_listener.idl',
@@ -163,6 +164,9 @@
         '../dom/xml_document.idl',
         '../dom/xml_serializer.idl',
 
+        '../encoding/text_decoder.idl',
+        '../encoding/text_encoder.idl',
+
         '../fetch/fetch_internal.idl',
 
         '../h5vcc/dial/dial_http_request.idl',
@@ -250,6 +254,7 @@
         '../dom/captions/caption_font_size_percentage.idl',
         '../dom/captions/caption_opacity_percentage.idl',
         '../dom/captions/caption_state.idl',
+        '../dom/cobalt_ua_data_values.idl',
         '../dom/custom_event_init.idl',
         '../dom/device_orientation_event_init.idl',
         '../dom/document_ready_state.idl',
@@ -275,14 +280,20 @@
         '../dom/media_source_ready_state.idl',
         '../dom/mouse_event_init.idl',
         '../dom/mutation_observer_init.idl',
+        '../dom/navigator_ua_brand_version.idl',
         '../dom/performance_observer_callback_options.idl',
         '../dom/performance_observer_init.idl',
         '../dom/pointer_event_init.idl',
         '../dom/source_buffer_append_mode.idl',
         '../dom/track_default_type.idl',
+        '../dom/ua_data_values.idl',
+        '../dom/ua_low_entropy_json.idl',
         '../dom/ui_event_init.idl',
         '../dom/visibility_state.idl',
         '../dom/wheel_event_init.idl',
+        '../encoding/text_decode_options.idl',
+        '../encoding/text_decoder_options.idl',
+        '../encoding/text_encoder_encode_into_result.idl',
         '../h5vcc/h5vcc_crash_type.idl',
         '../media_capture/blob_event_init.idl',
         '../media_capture/media_device_kind.idl',
@@ -341,6 +352,7 @@
         '../dom/navigator_online.idl',
         '../dom/navigator_plugins.idl',
         '../dom/navigator_storage_utils.idl',
+        '../dom/navigator_ua.idl',
         '../dom/non_document_type_child_node.idl',
         '../dom/non_element_parent_node.idl',
         '../dom/parent_node.idl',
diff --git a/src/cobalt/browser/browser_module.cc b/src/cobalt/browser/browser_module.cc
index 688e916..50ae7bc 100644
--- a/src/cobalt/browser/browser_module.cc
+++ b/src/cobalt/browser/browser_module.cc
@@ -57,6 +57,7 @@
 #include "starboard/configuration.h"
 #include "starboard/system.h"
 #include "starboard/time.h"
+#include "third_party/icu/source/i18n/unicode/timezone.h"
 
 #if SB_HAS(CORE_DUMP_HANDLER_SUPPORT)
 #include "base/memory/ptr_util.h"
@@ -1027,6 +1028,16 @@
 }
 #endif  // SB_API_VERSION >= 12 || SB_HAS(CAPTIONS)
 
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+void BrowserModule::OnDateTimeConfigurationChanged(
+    const base::DateTimeConfigurationChangedEvent* event) {
+  icu::TimeZone::adoptDefault(icu::TimeZone::detectHostTimeZone());
+  if (web_module_) {
+    web_module_->UpdateDateTimeConfiguration();
+  }
+}
+#endif
+
 #if defined(ENABLE_DEBUGGER)
 void BrowserModule::OnFuzzerToggle(const std::string& message) {
   if (base::MessageLoop::current() != self_message_loop_) {
diff --git a/src/cobalt/browser/browser_module.h b/src/cobalt/browser/browser_module.h
index 87e676e..37993c3 100644
--- a/src/cobalt/browser/browser_module.h
+++ b/src/cobalt/browser/browser_module.h
@@ -28,6 +28,7 @@
 #include "cobalt/account/account_manager.h"
 #include "cobalt/base/accessibility_caption_settings_changed_event.h"
 #include "cobalt/base/application_state.h"
+#include "cobalt/base/date_time_configuration_changed_event.h"
 #include "cobalt/base/message_queue.h"
 #include "cobalt/base/on_screen_keyboard_blurred_event.h"
 #include "cobalt/base/on_screen_keyboard_focused_event.h"
@@ -215,6 +216,11 @@
   void OnWindowOnOnlineEvent(const base::Event* event);
   void OnWindowOnOfflineEvent(const base::Event* event);
 
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  void OnDateTimeConfigurationChanged(
+      const base::DateTimeConfigurationChangedEvent* event);
+#endif
+
   bool IsWebModuleLoaded() { return web_module_loaded_.IsSignaled(); }
 
   // Parses url and defines a mapping of parameter values of the form
diff --git a/src/cobalt/browser/switches.cc b/src/cobalt/browser/switches.cc
index 64653d5..17a88b4 100644
--- a/src/cobalt/browser/switches.cc
+++ b/src/cobalt/browser/switches.cc
@@ -110,7 +110,8 @@
 
 const char kMemoryTracker[] = "memory_tracker";
 const char kMemoryTrackerHelp[] =
-    "Enables memory tracking by installing the memory tracker on startup.";
+    "Enables memory tracking by installing the memory tracker on startup. Run "
+    "--memory_tracker=help for more info.";
 
 const char kMinCompatibilityVersion[] = "min_compatibility_version";
 const char kMinCompatibilityVersionHelp[] =
@@ -417,7 +418,7 @@
 
 std::string HelpMessage() {
   std::string help_message;
-  std::map<const char*, const char*> help_map {
+  std::map<std::string, const char*> help_map {
 #if defined(ENABLE_DEBUG_COMMAND_LINE_SWITCHES)
     {kDebugConsoleMode, kDebugConsoleModeHelp},
         {kDevServersListenIp, kDevServersListenIpHelp},
diff --git a/src/cobalt/browser/web_module.cc b/src/cobalt/browser/web_module.cc
index 8c0af39..a2a5343 100644
--- a/src/cobalt/browser/web_module.cc
+++ b/src/cobalt/browser/web_module.cc
@@ -182,6 +182,8 @@
   void InjectWindowOnOnlineEvent();
   void InjectWindowOnOfflineEvent();
 
+  void UpdateDateTimeConfiguration();
+
   // Executes JavaScript in this WebModule. Sets the |result| output parameter
   // and signals |got_result|.
   void ExecuteJavascript(const std::string& script_utf8,
@@ -910,6 +912,12 @@
   InjectInputEvent(element, wheel_event);
 }
 
+void WebModule::Impl::UpdateDateTimeConfiguration() {
+  if (javascript_engine_) {
+    javascript_engine_->UpdateDateTimeConfiguration();
+  }
+}
+
 void WebModule::Impl::ExecuteJavascript(
     const std::string& script_utf8, const base::SourceLocation& script_location,
     base::WaitableEvent* got_result, std::string* result, bool* out_succeeded) {
@@ -1532,6 +1540,13 @@
                             base::Unretained(impl_.get())));
 }
 
+void WebModule::UpdateDateTimeConfiguration() {
+  DCHECK(impl_);
+  message_loop()->task_runner()->PostTask(
+      FROM_HERE, base::Bind(&WebModule::Impl::UpdateDateTimeConfiguration,
+                            base::Unretained(impl_.get())));
+}
+
 std::string WebModule::ExecuteJavascript(
     const std::string& script_utf8, const base::SourceLocation& script_location,
     bool* out_succeeded) {
diff --git a/src/cobalt/browser/web_module.h b/src/cobalt/browser/web_module.h
index 5ad671f..4657840 100644
--- a/src/cobalt/browser/web_module.h
+++ b/src/cobalt/browser/web_module.h
@@ -186,7 +186,7 @@
     base::ThreadPriority loader_thread_priority =
         base::ThreadPriority::BACKGROUND;
 
-    // Specifies the priority tha the web module's animated image decoding
+    // Specifies the priority that the web module's animated image decoding
     // thread will be assigned. This thread is responsible for decoding,
     // blending and constructing individual frames from animated images. The
     // default value is base::ThreadPriority::BACKGROUND.
@@ -341,6 +341,9 @@
 
   void InjectCaptionSettingsChangedEvent();
 
+  // Update the date/time configuration of relevant web modules.
+  void UpdateDateTimeConfiguration();
+
   // Executes Javascript code in this web module.  The calling thread will
   // block until the JavaScript has executed and the output results are
   // available.
diff --git a/src/cobalt/build/all.gyp b/src/cobalt/build/all.gyp
index c178c7e..5a33f2a 100644
--- a/src/cobalt/build/all.gyp
+++ b/src/cobalt/build/all.gyp
@@ -46,6 +46,8 @@
         '<(DEPTH)/cobalt/dom/testing/dom_testing.gyp:*',
         '<(DEPTH)/cobalt/dom_parser/dom_parser.gyp:*',
         '<(DEPTH)/cobalt/dom_parser/dom_parser_test.gyp:*',
+        '<(DEPTH)/cobalt/encoding/encoding.gyp:*',
+        '<(DEPTH)/cobalt/encoding/encoding_test.gyp:*',
         '<(DEPTH)/cobalt/extension/extension.gyp:*',
         '<(DEPTH)/cobalt/h5vcc/h5vcc.gyp:*',
         '<(DEPTH)/cobalt/input/input.gyp:*',
diff --git a/src/cobalt/build/build.id b/src/cobalt/build/build.id
index 1b9651f..70b9659 100644
--- a/src/cobalt/build/build.id
+++ b/src/cobalt/build/build.id
@@ -1 +1 @@
-301323
\ No newline at end of file
+301702
\ No newline at end of file
diff --git a/src/cobalt/build/cobalt_configuration.py b/src/cobalt/build/cobalt_configuration.py
index 5592bda..5aece99 100644
--- a/src/cobalt/build/cobalt_configuration.py
+++ b/src/cobalt/build/cobalt_configuration.py
@@ -174,6 +174,7 @@
         'sql_unittests',
         'storage_test',
         'storage_upgrade_test',
+        'text_encoding_test',
         'web_animations_test',
         'webdriver_test',
         'websocket_test',
diff --git a/src/cobalt/build/gyp_cobalt b/src/cobalt/build/gyp_cobalt
index f73c7be..a7b7cdb 100755
--- a/src/cobalt/build/gyp_cobalt
+++ b/src/cobalt/build/gyp_cobalt
@@ -103,8 +103,10 @@
     logging.error('GYP_DEFINES environment variable is not supported.')
     return RETVAL_ERROR
 
-  if os.environ.get('BUILD_IN_DOCKER'):
-    options.build_number= 0
+  build_id_server_url = os.environ.get('BUILD_ID_SERVER_URL')
+  if build_id_server_url:
+    options.build_number = gyp_utils.GetBuildNumber(
+      version_server=build_id_server_url)
   else:
     options.build_number = gyp_utils.GetBuildNumber()
 
diff --git a/src/cobalt/content/fonts/config/empty/fonts.xml b/src/cobalt/content/fonts/config/empty/fonts.xml
new file mode 100644
index 0000000..62a2891
--- /dev/null
+++ b/src/cobalt/content/fonts/config/empty/fonts.xml
@@ -0,0 +1,3 @@
+<?xml version="1.0" ?>
+<familyset version="1">
+</familyset>
diff --git a/src/cobalt/content/fonts/fonts.gyp b/src/cobalt/content/fonts/fonts.gyp
index a47c417..edf844e 100644
--- a/src/cobalt/content/fonts/fonts.gyp
+++ b/src/cobalt/content/fonts/fonts.gyp
@@ -85,6 +85,22 @@
             'package_fallback_symbols': 0,
           }],
 
+          [ 'cobalt_font_package == "empty"', {
+            'source_font_config_dir': 'config/empty',
+
+            'package_named_sans_serif': 0,
+            'package_named_serif': 0,
+            'package_named_fcc_fonts': 0,
+            'package_fallback_lang_non_cjk': 0,
+            'package_fallback_lang_cjk': 0,
+            'package_fallback_lang_cjk_low_quality': 0,
+            'package_fallback_historic': 0,
+            'package_fallback_color_emoji': 0,
+            'package_fallback_emoji': 0,
+            'package_fallback_symbols': 0,
+          }],
+
+
           [ 'cobalt_font_package == "android_system"', {
             # fonts.xml contains a superset of what we expect to find on Android
             # devices. The Android SbFile implementation falls back to system font
@@ -140,37 +156,47 @@
         ],
       },
 
-      'actions': [
-        {
-          'action_name': 'fonts_xml',
-          'inputs': [
-              'scripts/filter_fonts.py',
-              '<(source_font_config_dir)/fonts.xml',
-          ],
-          'outputs': [
-            '<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
-          ],
-          'action': [
-            'python2', 'scripts/filter_fonts.py',
-            '-i', '<(source_font_config_dir)/fonts.xml',
-            '-o', '<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
-            '<@(package_categories)',
-          ],
-        },
-      ],
       'conditions': [
-        [ 'copy_font_files == 0', {
+        [ 'cobalt_font_package == "empty"', {
           'copies': [{
-            # Copy at least the fallback Roboto Subsetted font.
-            'files': [ '<(source_font_files_dir)/Roboto-Regular-Subsetted.woff2' ],
+            'files': [ 'config/empty/fonts.xml' ],
             'destination': '<(sb_static_contents_output_data_dir)/fonts/',
           }],
         }, {
-          'copies': [{
-            # Late expansion so <@(package_categories) is resolved.
-            'files': [ '>!@pymod_do_main(cobalt.content.fonts.scripts.filter_fonts -i <(source_font_config_dir)/fonts.xml -f <(source_font_files_dir) <@(package_categories))' ],
-            'destination': '<(sb_static_contents_output_data_dir)/fonts/',
-          }],
+          'actions': [
+            {
+              'action_name': 'fonts_xml',
+              'inputs': [
+                  'scripts/filter_fonts.py',
+                  '<(source_font_config_dir)/fonts.xml',
+              ],
+              'outputs': [
+                '<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
+              ],
+              'action': [
+                'python2', 'scripts/filter_fonts.py',
+                '-i', '<(source_font_config_dir)/fonts.xml',
+                '-o', '<(sb_static_contents_output_data_dir)/fonts/fonts.xml',
+                '<@(package_categories)',
+              ],
+            },
+          ],
+
+          'conditions': [
+            [ 'copy_font_files == 0', {
+              'copies': [{
+                # Copy at least the fallback Roboto Subsetted font.
+                'files': [ '<(source_font_files_dir)/Roboto-Regular-Subsetted.woff2' ],
+                'destination': '<(sb_static_contents_output_data_dir)/fonts/',
+              }],
+            }, {
+              'copies': [{
+                # Late expansion so <@(package_categories) is resolved.
+                'files': [ '>!@pymod_do_main(cobalt.content.fonts.scripts.filter_fonts -i <(source_font_config_dir)/fonts.xml -f <(source_font_files_dir) <@(package_categories))' ],
+                'destination': '<(sb_static_contents_output_data_dir)/fonts/',
+              }],
+            }],
+          ],
         }],
       ],
 
diff --git a/src/cobalt/content/licenses/platform/evergreen/licenses_cobalt.txt b/src/cobalt/content/licenses/platform/evergreen/licenses_cobalt.txt
new file mode 100644
index 0000000..bf30a41
--- /dev/null
+++ b/src/cobalt/content/licenses/platform/evergreen/licenses_cobalt.txt
@@ -0,0 +1,5495 @@
+Where applicable, source code for modified versions of the libraries below is available at:
+https://cobalt.googlesource.com/cobalt.
+
+
+
+  Cobalt
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+   1. Definitions.
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+   END OF TERMS AND CONDITIONS
+   APPENDIX: How to apply the Apache License to your work.
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+   Copyright [yyyy] [name of copyright owner]
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+       http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+  Chromium
+
+  // Copyright 2015 The Chromium Authors. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  V8
+
+  This license applies to all parts of V8 that are not externally
+  maintained libraries.  The externally maintained libraries used by V8
+  are:
+
+    - PCRE test suite, located in
+      test/mjsunit/third_party/regexp-pcre/regexp-pcre.js.  This is based on the
+      test suite from PCRE-7.3, which is copyrighted by the University
+      of Cambridge and Google, Inc.  The copyright notice and license
+      are embedded in regexp-pcre.js.
+
+    - Layout tests, located in test/mjsunit/third_party/object-keys.  These are
+      based on layout tests from webkit.org which are copyrighted by
+      Apple Computer, Inc. and released under a 3-clause BSD license.
+
+    - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
+      assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
+      assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+      assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+      assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
+      This code is copyrighted by Sun Microsystems Inc. and released
+      under a 3-clause BSD license.
+
+    - Valgrind client API header, located at src/third_party/valgrind/valgrind.h
+      This is released under the BSD license.
+
+    - The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}
+      This is released under the Apache license. The API's upstream prototype
+      implementation also formed the basis of V8's implementation in
+      src/wasm/c-api.cc.
+
+  These libraries have their own licenses; we recommend you read them,
+  as their terms may differ from the terms below.
+
+  Further license information can be found in LICENSE files located in
+  sub-directories.
+
+  Copyright 2014, the V8 project authors. All rights reserved.
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+      * Redistributions of source code must retain the above copyright
+        notice, this list of conditions and the following disclaimer.
+      * Redistributions in binary form must reproduce the above
+        copyright notice, this list of conditions and the following
+        disclaimer in the documentation and/or other materials provided
+        with the distribution.
+      * Neither the name of Google Inc. nor the names of its
+        contributors may be used to endorse or promote products derived
+        from this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  devtools
+
+
+  // Copyright 2014 The Chromium Authors. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+  boringssl
+
+
+  BoringSSL is a fork of OpenSSL. As such, large parts of it fall under OpenSSL
+  licensing. Files that are completely new have a Google copyright and an ISC
+  license. This license is reproduced at the bottom of this file.
+
+  Contributors to BoringSSL are required to follow the CLA rules for Chromium:
+  https://cla.developers.google.com/clas
+
+  Files in third_party/ have their own licenses, as described therein. The MIT
+  license, for third_party/fiat, which, unlike other third_party directories, is
+  compiled into non-test libraries, is included below.
+
+  The OpenSSL toolkit stays under a dual license, i.e. both the conditions of the
+  OpenSSL License and the original SSLeay license apply to the toolkit. See below
+  for the actual license texts. Actually both licenses are BSD-style Open Source
+  licenses. In case of any license issues related to OpenSSL please contact
+  openssl-core@openssl.org.
+
+  The following are Google-internal bug numbers where explicit permission from
+  some authors is recorded for use of their work. (This is purely for our own
+  record keeping.)
+    27287199
+    27287880
+    27287883
+
+    OpenSSL License
+    ---------------
+
+  /* ====================================================================
+   * Copyright (c) 1998-2011 The OpenSSL Project.  All rights reserved.
+   *
+   * Redistribution and use in source and binary forms, with or without
+   * modification, are permitted provided that the following conditions
+   * are met:
+   *
+   * 1. Redistributions of source code must retain the above copyright
+   *    notice, this list of conditions and the following disclaimer.
+   *
+   * 2. Redistributions in binary form must reproduce the above copyright
+   *    notice, this list of conditions and the following disclaimer in
+   *    the documentation and/or other materials provided with the
+   *    distribution.
+   *
+   * 3. All advertising materials mentioning features or use of this
+   *    software must display the following acknowledgment:
+   *    "This product includes software developed by the OpenSSL Project
+   *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+   *
+   * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+   *    endorse or promote products derived from this software without
+   *    prior written permission. For written permission, please contact
+   *    openssl-core@openssl.org.
+   *
+   * 5. Products derived from this software may not be called "OpenSSL"
+   *    nor may "OpenSSL" appear in their names without prior written
+   *    permission of the OpenSSL Project.
+   *
+   * 6. Redistributions of any form whatsoever must retain the following
+   *    acknowledgment:
+   *    "This product includes software developed by the OpenSSL Project
+   *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+   *
+   * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+   * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+   * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+   * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+   * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+   * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+   * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+   * OF THE POSSIBILITY OF SUCH DAMAGE.
+   * ====================================================================
+   *
+   * This product includes cryptographic software written by Eric Young
+   * (eay@cryptsoft.com).  This product includes software written by Tim
+   * Hudson (tjh@cryptsoft.com).
+   *
+   */
+
+   Original SSLeay License
+   -----------------------
+
+  /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+   * All rights reserved.
+   *
+   * This package is an SSL implementation written
+   * by Eric Young (eay@cryptsoft.com).
+   * The implementation was written so as to conform with Netscapes SSL.
+   *
+   * This library is free for commercial and non-commercial use as long as
+   * the following conditions are aheared to.  The following conditions
+   * apply to all code found in this distribution, be it the RC4, RSA,
+   * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
+   * included with this distribution is covered by the same copyright terms
+   * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+   *
+   * Copyright remains Eric Young's, and as such any Copyright notices in
+   * the code are not to be removed.
+   * If this package is used in a product, Eric Young should be given attribution
+   * as the author of the parts of the library used.
+   * This can be in the form of a textual message at program startup or
+   * in documentation (online or textual) provided with the package.
+   *
+   * Redistribution and use in source and binary forms, with or without
+   * modification, are permitted provided that the following conditions
+   * are met:
+   * 1. Redistributions of source code must retain the copyright
+   *    notice, this list of conditions and the following disclaimer.
+   * 2. Redistributions in binary form must reproduce the above copyright
+   *    notice, this list of conditions and the following disclaimer in the
+   *    documentation and/or other materials provided with the distribution.
+   * 3. All advertising materials mentioning features or use of this software
+   *    must display the following acknowledgement:
+   *    "This product includes cryptographic software written by
+   *     Eric Young (eay@cryptsoft.com)"
+   *    The word 'cryptographic' can be left out if the rouines from the library
+   *    being used are not cryptographic related :-).
+   * 4. If you include any Windows specific code (or a derivative thereof) from
+   *    the apps directory (application code) you must include an acknowledgement:
+   *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+   *
+   * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+   * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+   * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+   * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+   * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+   * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+   * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+   * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+   * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+   * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+   * SUCH DAMAGE.
+   *
+   * The licence and distribution terms for any publically available version or
+   * derivative of this code cannot be changed.  i.e. this code cannot simply be
+   * copied and put under another distribution licence
+   * [including the GNU Public Licence.]
+   */
+
+
+  ISC license used for completely new code in BoringSSL:
+
+  /* Copyright (c) 2015, Google Inc.
+   *
+   * Permission to use, copy, modify, and/or distribute this software for any
+   * purpose with or without fee is hereby granted, provided that the above
+   * copyright notice and this permission notice appear in all copies.
+   *
+   * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+   * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+   * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
+   * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+   * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
+   * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+   * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
+
+
+  The code in third_party/fiat carries the MIT license:
+
+  Copyright (c) 2015-2016 the fiat-crypto authors (see
+  https://github.com/mit-plv/fiat-crypto/blob/master/AUTHORS).
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in all
+  copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+
+
+  Licenses for support code
+  -------------------------
+
+  Parts of the TLS test suite are under the Go license. This code is not included
+  in BoringSSL (i.e. libcrypto and libssl) when compiled, however, so
+  distributing code linked against BoringSSL does not trigger this license:
+
+  Copyright (c) 2009 The Go Authors. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+     * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+     * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+     * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+  BoringSSL uses the Chromium test infrastructure to run a continuous build,
+  trybots etc. The scripts which manage this, and the script for generating build
+  metadata, are under the Chromium license. Distributing code linked against
+  BoringSSL does not trigger this license.
+
+  Copyright 2015 The Chromium Authors. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+     * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+     * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+     * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  dmg_fp
+
+
+  /****************************************************************
+   *
+   * The author of this software is David M. Gay.
+   *
+   * Copyright (c) 1991, 2000, 2001 by Lucent Technologies.
+   *
+   * Permission to use, copy, modify, and distribute this software for any
+   * purpose without fee is hereby granted, provided that this entire notice
+   * is included in all copies of any software which is or includes a copy
+   * or modification of this software and in all copies of the supporting
+   * documentation for such software.
+   *
+   * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
+   * WARRANTY.  IN PARTICULAR, NEITHER THE AUTHOR NOR LUCENT MAKES ANY
+   * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
+   * OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
+   *
+   ***************************************************************/
+
+
+
+  dynamic_annotations
+
+
+  /* Copyright (c) 2008-2009, Google Inc.
+   * All rights reserved.
+   *
+   * Redistribution and use in source and binary forms, with or without
+   * modification, are permitted provided that the following conditions are
+   * met:
+   *
+   *     * Redistributions of source code must retain the above copyright
+   * notice, this list of conditions and the following disclaimer.
+   *     * Neither the name of Google Inc. nor the names of its
+   * contributors may be used to endorse or promote products derived from
+   * this software without specific prior written permission.
+   *
+   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+   *
+   * ---
+   * Author: Kostya Serebryany
+   */
+
+
+
+  icu(base/third_party/icu)
+
+
+  COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later)
+
+  Copyright © 1991-2017 Unicode, Inc. All rights reserved.
+  Distributed under the Terms of Use in http://www.unicode.org/copyright.html
+
+  Permission is hereby granted, free of charge, to any person obtaining
+  a copy of the Unicode data files and any associated documentation
+  (the "Data Files") or Unicode software and any associated documentation
+  (the "Software") to deal in the Data Files or Software
+  without restriction, including without limitation the rights to use,
+  copy, modify, merge, publish, distribute, and/or sell copies of
+  the Data Files or Software, and to permit persons to whom the Data Files
+  or Software are furnished to do so, provided that either
+  (a) this copyright and permission notice appear with all copies
+  of the Data Files or Software, or
+  (b) this copyright and permission notice appear in associated
+  Documentation.
+
+  THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
+  ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+  WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+  NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+  IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
+  NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
+  DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+  DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+  TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+  PERFORMANCE OF THE DATA FILES OR SOFTWARE.
+
+  Except as contained in this notice, the name of a copyright holder
+  shall not be used in advertising or otherwise to promote the sale,
+  use or other dealings in these Data Files or Software without prior
+  written authorization of the copyright holder.
+
+  ---------------------
+
+  Third-Party Software Licenses
+
+  This section contains third-party software notices and/or additional
+  terms for licensed third-party software components included within ICU
+  libraries.
+
+  1. ICU License - ICU 1.8.1 to ICU 57.1
+
+  COPYRIGHT AND PERMISSION NOTICE
+
+  Copyright (c) 1995-2016 International Business Machines Corporation and others
+  All rights reserved.
+
+  Permission is hereby granted, free of charge, to any person obtaining
+  a copy of this software and associated documentation files (the
+  "Software"), to deal in the Software without restriction, including
+  without limitation the rights to use, copy, modify, merge, publish,
+  distribute, and/or sell copies of the Software, and to permit persons
+  to whom the Software is furnished to do so, provided that the above
+  copyright notice(s) and this permission notice appear in all copies of
+  the Software and that both the above copyright notice(s) and this
+  permission notice appear in supporting documentation.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+  OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+  HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY
+  SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER
+  RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+  CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+  CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+  Except as contained in this notice, the name of a copyright holder
+  shall not be used in advertising or otherwise to promote the sale, use
+  or other dealings in this Software without prior written authorization
+  of the copyright holder.
+
+  All trademarks and registered trademarks mentioned herein are the
+  property of their respective owners.
+
+
+
+  libxml
+
+
+  LibXml Ruby Project
+    Copyright (c) 2008-2013 Charlie Savage and contributors
+    Copyright (c) 2002-2007 Sean Chittenden and contributors
+    Copyright (c) 2001 Wai-Sun "Squidster" Chia
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy of
+  this software and associated documentation files (the "Software"), to deal in
+  the Software without restriction, including without limitation the rights to
+  use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+  of the Software, and to permit persons to whom the Software is furnished to do
+  so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in all
+  copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+
+
+
+  Netscape Portable Runtime (NSPR)
+
+
+  /* ***** BEGIN LICENSE BLOCK *****
+   * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+   *
+   * The contents of this file are subject to the Mozilla Public License Version
+   * 1.1 (the "License"); you may not use this file except in compliance with
+   * the License. You may obtain a copy of the License at
+   * http://www.mozilla.org/MPL/
+   *
+   * Software distributed under the License is distributed on an "AS IS" basis,
+   * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+   * for the specific language governing rights and limitations under the
+   * License.
+   *
+   * The Original Code is the Netscape Portable Runtime (NSPR).
+   *
+   * The Initial Developer of the Original Code is
+   * Netscape Communications Corporation.
+   * Portions created by the Initial Developer are Copyright (C) 1998-2000
+   * the Initial Developer. All Rights Reserved.
+   *
+   * Contributor(s):
+   *
+   * Alternatively, the contents of this file may be used under the terms of
+   * either the GNU General Public License Version 2 or later (the "GPL"), or
+   * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+   * in which case the provisions of the GPL or the LGPL are applicable instead
+   * of those above. If you wish to allow use of your version of this file only
+   * under the terms of either the GPL or the LGPL, and not to allow others to
+   * use your version of this file under the terms of the MPL, indicate your
+   * decision by deleting the provisions above and replace them with the notice
+   * and other provisions required by the GPL or the LGPL. If you do not delete
+   * the provisions above, a recipient may use your version of this file under
+   * the terms of any one of the MPL, the GPL or the LGPL.
+   *
+   * ***** END LICENSE BLOCK ***** */
+
+
+  symbolize
+
+
+  // Copyright (c) 2006, Google Inc.
+  // All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //     * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //     * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //     * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+  valgrind
+
+
+   Notice that the following BSD-style license applies to the Valgrind header
+   files used by Chromium (valgrind.h and memcheck.h). However, the rest of
+   Valgrind is licensed under the terms of the GNU General Public License,
+   version 2, unless otherwise indicated.
+
+   ----------------------------------------------------------------
+
+   Copyright (C) 2000-2008 Julian Seward.  All rights reserved.
+
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   1. Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+   2. The origin of this software must not be misrepresented; you must
+      not claim that you wrote the original software.  If you use this
+      software in a product, an acknowledgment in the product
+      documentation would be appreciated but is not required.
+
+   3. Altered source versions must be plainly marked as such, and must
+      not be misrepresented as being the original software.
+
+   4. The name of the author may not be used to endorse or promote
+      products derived from this software without specific prior written
+      permission.
+
+   THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
+   OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+   WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+   ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
+   DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+   DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+   GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+   INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+   WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+  xdg_mime
+
+
+  --------------------------------------------------------------------------------
+  Academic Free License v. 2.0
+  --------------------------------------------------------------------------------
+
+  This Academic Free License (the "License") applies to any original work of
+  authorship (the "Original Work") whose owner (the "Licensor") has placed the
+  following notice immediately following the copyright notice for the Original
+  Work:
+
+  Licensed under the Academic Free License version 2.0
+  1) Grant of Copyright License. Licensor hereby grants You a world-wide,
+  royalty-free, non-exclusive, perpetual, sublicenseable license to do the
+  following:
+
+  a) to reproduce the Original Work in copies;
+  b) to prepare derivative works ("Derivative Works") based upon the Original
+     Work;
+  c) to distribute copies of the Original Work and Derivative Works to the
+     public;
+  d) to perform the Original Work publicly; and
+  e) to display the Original Work publicly.
+
+  2) Grant of Patent License. Licensor hereby grants You a world-wide,
+  royalty-free, non-exclusive, perpetual, sublicenseable license, under patent
+  claims owned or controlled by the Licensor that are embodied in the Original
+  Work as furnished by the Licensor, to make, use, sell and offer for sale the
+  Original Work and Derivative Works.
+
+  3) Grant of Source Code License. The term "Source Code" means the preferred
+  form of the Original Work for making modifications to it and all available
+  documentation describing how to modify the Original Work. Licensor hereby
+  agrees to provide a machine-readable copy of the Source Code of the Original
+  Work along with each copy of the Original Work that Licensor distributes.
+  Licensor reserves the right to satisfy this obligation by placing a
+  machine-readable copy of the Source Code in an information repository
+  reasonably calculated to permit inexpensive and convenient access by You for as
+  long as Licensor continues to distribute the Original Work, and by publishing
+  the address of that information repository in a notice immediately following
+  the copyright notice that applies to the Original Work.
+
+  4) Exclusions From License Grant. Neither the names of Licensor, nor the names
+  of any contributors to the Original Work, nor any of their trademarks or
+  service marks, may be used to endorse or promote products derived from this
+  Original Work without express prior written permission of the Licensor. Nothing
+  in this License shall be deemed to grant any rights to trademarks, copyrights,
+  patents, trade secrets or any other intellectual property of Licensor except as
+  expressly stated herein. No patent license is granted to make, use, sell or
+  offer to sell embodiments of any patent claims other than the licensed claims
+  defined in Section 2. No right is granted to the trademarks of Licensor even if
+  such marks are included in the Original Work. Nothing in this License shall be
+  interpreted to prohibit Licensor from licensing under different terms from this
+  License any Original Work that Licensor otherwise would have a right to
+  license.
+
+  5) This section intentionally omitted.
+
+  6) Attribution Rights. You must retain, in the Source Code of any Derivative
+  Works that You create, all copyright, patent or trademark notices from the
+  Source Code of the Original Work, as well as any notices of licensing and any
+  descriptive text identified therein as an "Attribution Notice." You must cause
+  the Source Code for any Derivative Works that You create to carry a prominent
+  Attribution Notice reasonably calculated to inform recipients that You have
+  modified the Original Work.
+
+  7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that
+  the copyright in and to the Original Work and the patent rights granted herein
+  by Licensor are owned by the Licensor or are sublicensed to You under the terms
+  of this License with the permission of the contributor(s) of those copyrights
+  and patent rights. Except as expressly stated in the immediately proceeding
+  sentence, the Original Work is provided under this License on an "AS IS" BASIS
+  and WITHOUT WARRANTY, either express or implied, including, without limitation,
+  the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU.
+  This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No
+  license to Original Work is granted hereunder except under this disclaimer.
+
+  8) Limitation of Liability. Under no circumstances and under no legal theory,
+  whether in tort (including negligence), contract, or otherwise, shall the
+  Licensor be liable to any person for any direct, indirect, special, incidental,
+  or consequential damages of any character arising as a result of this License
+  or the use of the Original Work including, without limitation, damages for loss
+  of goodwill, work stoppage, computer failure or malfunction, or any and all
+  other commercial damages or losses. This limitation of liability shall not
+  apply to liability for death or personal injury resulting from Licensor's
+  negligence to the extent applicable law prohibits such limitation. Some
+  jurisdictions do not allow the exclusion or limitation of incidental or
+  consequential damages, so this exclusion and limitation may not apply to You.
+
+  9) Acceptance and Termination. If You distribute copies of the Original Work or
+  a Derivative Work, You must make a reasonable effort under the circumstances to
+  obtain the express assent of recipients to the terms of this License. Nothing
+  else but this License (or another written agreement between Licensor and You)
+  grants You permission to create Derivative Works based upon the Original Work
+  or to exercise any of the rights granted in Section 1 herein, and any attempt
+  to do so except under the terms of this License (or another written agreement
+  between Licensor and You) is expressly prohibited by U.S. copyright law, the
+  equivalent laws of other countries, and by international treaty. Therefore, by
+  exercising any of the rights granted to You in Section 1 herein, You indicate
+  Your acceptance of this License and all of its terms and conditions.
+
+  10) Termination for Patent Action. This License shall terminate automatically
+  and You may no longer exercise any of the rights granted to You by this License
+  as of the date You commence an action, including a cross-claim or counterclaim,
+  for patent infringement (i) against Licensor with respect to a patent
+  applicable to software or (ii) against any entity with respect to a patent
+  applicable to the Original Work (but excluding combinations of the Original
+  Work with other software or hardware).
+
+  11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this
+  License may be brought only in the courts of a jurisdiction wherein the
+  Licensor resides or in which Licensor conducts its primary business, and under
+  the laws of that jurisdiction excluding its conflict-of-law provisions. The
+  application of the United Nations Convention on Contracts for the International
+  Sale of Goods is expressly excluded. Any use of the Original Work outside the
+  scope of this License or after its termination shall be subject to the
+  requirements and penalties of the U.S. Copyright Act, 17 U.S.C. 101 et seq.,
+  the equivalent laws of other countries, and international treaty. This section
+  shall survive the termination of this License.
+
+  12) Attorneys Fees. In any action to enforce the terms of this License or
+  seeking damages relating thereto, the prevailing party shall be entitled to
+  recover its costs and expenses, including, without limitation, reasonable
+  attorneys' fees and costs incurred in connection with such action, including
+  any appeal of such action. This section shall survive the termination of this
+  License.
+
+  13) Miscellaneous. This License represents the complete agreement concerning
+  the subject matter hereof. If any provision of this License is held to be
+  unenforceable, such provision shall be reformed only to the extent necessary to
+  make it enforceable.
+
+  14) Definition of "You" in This License. "You" throughout this License, whether
+  in upper or lower case, means an individual or a legal entity exercising rights
+  under, and complying with all of the terms of, this License. For legal
+  entities, "You" includes any entity that controls, is controlled by, or is
+  under common control with you. For purposes of this definition, "control" means
+  (i) the power, direct or indirect, to cause the direction or management of such
+  entity, whether by contract or otherwise, or (ii) ownership of fifty percent
+  (50%) or more of the outstanding shares, or (iii) beneficial ownership of such
+  entity.
+
+  15) Right to Use. You may use the Original Work in all ways not otherwise
+  restricted or conditioned by this License or by law, and Licensor promises not
+  to interfere with or be responsible for such uses by You.
+
+  This license is Copyright (C) 2003 Lawrence E. Rosen. All rights reserved.
+  Permission is hereby granted to copy and distribute this license without
+  modification. This license may not be modified without the express written
+  permission of its copyright owner.
+
+
+  xdg_user_dirs
+
+
+  Copyright (c) 2007 Red Hat, inc
+
+  Permission is hereby granted, free of charge, to any person
+  obtaining a copy of this software and associated documentation files
+  (the "Software"), to deal in the Software without restriction,
+  including without limitation the rights to use, copy, modify, merge,
+  publish, distribute, sublicense, and/or sell copies of the Software,
+  and to permit persons to whom the Software is furnished to do so,
+  subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be
+  included in all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+  NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+  BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+  ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+  CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+
+
+  uri_template
+
+
+                                   Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+  nss
+
+
+    NSS is available under the Mozilla Public License, version 2, a copy of which
+  is below.
+
+  Note on GPL Compatibility
+  -------------------------
+
+  The MPL 2, section 3.3, permits you to combine NSS with code under the GNU
+  General Public License (GPL) version 2, or any later version of that
+  license, to make a Larger Work, and distribute the result under the GPL.
+  The only condition is that you must also make NSS, and any changes you
+  have made to it, available to recipients under the terms of the MPL 2 also.
+
+  Anyone who receives the combined code from you does not have to continue
+  to dual licence in this way, and may, if they wish, distribute under the
+  terms of either of the two licences - either the MPL alone or the GPL
+  alone. However, we discourage people from distributing copies of NSS under
+  the GPL alone, because it means that any improvements they make cannot be
+  reincorporated into the main version of NSS. There is never a need to do
+  this for license compatibility reasons.
+
+  Note on LGPL Compatibility
+  --------------------------
+
+  The above also applies to combining MPLed code in a single library with
+  code under the GNU Lesser General Public License (LGPL) version 2.1, or
+  any later version of that license. If the LGPLed code and the MPLed code
+  are not in the same library, then the copyleft coverage of the two
+  licences does not overlap, so no issues arise.
+
+
+  Mozilla Public License Version 2.0
+  ==================================
+
+  1. Definitions
+  --------------
+
+  1.1. "Contributor"
+      means each individual or legal entity that creates, contributes to
+      the creation of, or owns Covered Software.
+
+  1.2. "Contributor Version"
+      means the combination of the Contributions of others (if any) used
+      by a Contributor and that particular Contributor's Contribution.
+
+  1.3. "Contribution"
+      means Covered Software of a particular Contributor.
+
+  1.4. "Covered Software"
+      means Source Code Form to which the initial Contributor has attached
+      the notice in Exhibit A, the Executable Form of such Source Code
+      Form, and Modifications of such Source Code Form, in each case
+      including portions thereof.
+
+  1.5. "Incompatible With Secondary Licenses"
+      means
+
+      (a) that the initial Contributor has attached the notice described
+          in Exhibit B to the Covered Software; or
+
+      (b) that the Covered Software was made available under the terms of
+          version 1.1 or earlier of the License, but not also under the
+          terms of a Secondary License.
+
+  1.6. "Executable Form"
+      means any form of the work other than Source Code Form.
+
+  1.7. "Larger Work"
+      means a work that combines Covered Software with other material, in
+      a separate file or files, that is not Covered Software.
+
+  1.8. "License"
+      means this document.
+
+  1.9. "Licensable"
+      means having the right to grant, to the maximum extent possible,
+      whether at the time of the initial grant or subsequently, any and
+      all of the rights conveyed by this License.
+
+  1.10. "Modifications"
+      means any of the following:
+
+      (a) any file in Source Code Form that results from an addition to,
+          deletion from, or modification of the contents of Covered
+          Software; or
+
+      (b) any new file in Source Code Form that contains any Covered
+          Software.
+
+  1.11. "Patent Claims" of a Contributor
+      means any patent claim(s), including without limitation, method,
+      process, and apparatus claims, in any patent Licensable by such
+      Contributor that would be infringed, but for the grant of the
+      License, by the making, using, selling, offering for sale, having
+      made, import, or transfer of either its Contributions or its
+      Contributor Version.
+
+  1.12. "Secondary License"
+      means either the GNU General Public License, Version 2.0, the GNU
+      Lesser General Public License, Version 2.1, the GNU Affero General
+      Public License, Version 3.0, or any later versions of those
+      licenses.
+
+  1.13. "Source Code Form"
+      means the form of the work preferred for making modifications.
+
+  1.14. "You" (or "Your")
+      means an individual or a legal entity exercising rights under this
+      License. For legal entities, "You" includes any entity that
+      controls, is controlled by, or is under common control with You. For
+      purposes of this definition, "control" means (a) the power, direct
+      or indirect, to cause the direction or management of such entity,
+      whether by contract or otherwise, or (b) ownership of more than
+      fifty percent (50%) of the outstanding shares or beneficial
+      ownership of such entity.
+
+  2. License Grants and Conditions
+  --------------------------------
+
+  2.1. Grants
+
+  Each Contributor hereby grants You a world-wide, royalty-free,
+  non-exclusive license:
+
+  (a) under intellectual property rights (other than patent or trademark)
+      Licensable by such Contributor to use, reproduce, make available,
+      modify, display, perform, distribute, and otherwise exploit its
+      Contributions, either on an unmodified basis, with Modifications, or
+      as part of a Larger Work; and
+
+  (b) under Patent Claims of such Contributor to make, use, sell, offer
+      for sale, have made, import, and otherwise transfer either its
+      Contributions or its Contributor Version.
+
+  2.2. Effective Date
+
+  The licenses granted in Section 2.1 with respect to any Contribution
+  become effective for each Contribution on the date the Contributor first
+  distributes such Contribution.
+
+  2.3. Limitations on Grant Scope
+
+  The licenses granted in this Section 2 are the only rights granted under
+  this License. No additional rights or licenses will be implied from the
+  distribution or licensing of Covered Software under this License.
+  Notwithstanding Section 2.1(b) above, no patent license is granted by a
+  Contributor:
+
+  (a) for any code that a Contributor has removed from Covered Software;
+      or
+
+  (b) for infringements caused by: (i) Your and any other third party's
+      modifications of Covered Software, or (ii) the combination of its
+      Contributions with other software (except as part of its Contributor
+      Version); or
+
+  (c) under Patent Claims infringed by Covered Software in the absence of
+      its Contributions.
+
+  This License does not grant any rights in the trademarks, service marks,
+  or logos of any Contributor (except as may be necessary to comply with
+  the notice requirements in Section 3.4).
+
+  2.4. Subsequent Licenses
+
+  No Contributor makes additional grants as a result of Your choice to
+  distribute the Covered Software under a subsequent version of this
+  License (see Section 10.2) or under the terms of a Secondary License (if
+  permitted under the terms of Section 3.3).
+
+  2.5. Representation
+
+  Each Contributor represents that the Contributor believes its
+  Contributions are its original creation(s) or it has sufficient rights
+  to grant the rights to its Contributions conveyed by this License.
+
+  2.6. Fair Use
+
+  This License is not intended to limit any rights You have under
+  applicable copyright doctrines of fair use, fair dealing, or other
+  equivalents.
+
+  2.7. Conditions
+
+  Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+  in Section 2.1.
+
+  3. Responsibilities
+  -------------------
+
+  3.1. Distribution of Source Form
+
+  All distribution of Covered Software in Source Code Form, including any
+  Modifications that You create or to which You contribute, must be under
+  the terms of this License. You must inform recipients that the Source
+  Code Form of the Covered Software is governed by the terms of this
+  License, and how they can obtain a copy of this License. You may not
+  attempt to alter or restrict the recipients' rights in the Source Code
+  Form.
+
+  3.2. Distribution of Executable Form
+
+  If You distribute Covered Software in Executable Form then:
+
+  (a) such Covered Software must also be made available in Source Code
+      Form, as described in Section 3.1, and You must inform recipients of
+      the Executable Form how they can obtain a copy of such Source Code
+      Form by reasonable means in a timely manner, at a charge no more
+      than the cost of distribution to the recipient; and
+
+  (b) You may distribute such Executable Form under the terms of this
+      License, or sublicense it under different terms, provided that the
+      license for the Executable Form does not attempt to limit or alter
+      the recipients' rights in the Source Code Form under this License.
+
+  3.3. Distribution of a Larger Work
+
+  You may create and distribute a Larger Work under terms of Your choice,
+  provided that You also comply with the requirements of this License for
+  the Covered Software. If the Larger Work is a combination of Covered
+  Software with a work governed by one or more Secondary Licenses, and the
+  Covered Software is not Incompatible With Secondary Licenses, this
+  License permits You to additionally distribute such Covered Software
+  under the terms of such Secondary License(s), so that the recipient of
+  the Larger Work may, at their option, further distribute the Covered
+  Software under the terms of either this License or such Secondary
+  License(s).
+
+  3.4. Notices
+
+  You may not remove or alter the substance of any license notices
+  (including copyright notices, patent notices, disclaimers of warranty,
+  or limitations of liability) contained within the Source Code Form of
+  the Covered Software, except that You may alter any license notices to
+  the extent required to remedy known factual inaccuracies.
+
+  3.5. Application of Additional Terms
+
+  You may choose to offer, and to charge a fee for, warranty, support,
+  indemnity or liability obligations to one or more recipients of Covered
+  Software. However, You may do so only on Your own behalf, and not on
+  behalf of any Contributor. You must make it absolutely clear that any
+  such warranty, support, indemnity, or liability obligation is offered by
+  You alone, and You hereby agree to indemnify every Contributor for any
+  liability incurred by such Contributor as a result of warranty, support,
+  indemnity or liability terms You offer. You may include additional
+  disclaimers of warranty and limitations of liability specific to any
+  jurisdiction.
+
+  4. Inability to Comply Due to Statute or Regulation
+  ---------------------------------------------------
+
+  If it is impossible for You to comply with any of the terms of this
+  License with respect to some or all of the Covered Software due to
+  statute, judicial order, or regulation then You must: (a) comply with
+  the terms of this License to the maximum extent possible; and (b)
+  describe the limitations and the code they affect. Such description must
+  be placed in a text file included with all distributions of the Covered
+  Software under this License. Except to the extent prohibited by statute
+  or regulation, such description must be sufficiently detailed for a
+  recipient of ordinary skill to be able to understand it.
+
+  5. Termination
+  --------------
+
+  5.1. The rights granted under this License will terminate automatically
+  if You fail to comply with any of its terms. However, if You become
+  compliant, then the rights granted under this License from a particular
+  Contributor are reinstated (a) provisionally, unless and until such
+  Contributor explicitly and finally terminates Your grants, and (b) on an
+  ongoing basis, if such Contributor fails to notify You of the
+  non-compliance by some reasonable means prior to 60 days after You have
+  come back into compliance. Moreover, Your grants from a particular
+  Contributor are reinstated on an ongoing basis if such Contributor
+  notifies You of the non-compliance by some reasonable means, this is the
+  first time You have received notice of non-compliance with this License
+  from such Contributor, and You become compliant prior to 30 days after
+  Your receipt of the notice.
+
+  5.2. If You initiate litigation against any entity by asserting a patent
+  infringement claim (excluding declaratory judgment actions,
+  counter-claims, and cross-claims) alleging that a Contributor Version
+  directly or indirectly infringes any patent, then the rights granted to
+  You by any and all Contributors for the Covered Software under Section
+  2.1 of this License shall terminate.
+
+  5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+  end user license agreements (excluding distributors and resellers) which
+  have been validly granted by You or Your distributors under this License
+  prior to termination shall survive termination.
+
+  ************************************************************************
+  *                                                                      *
+  *  6. Disclaimer of Warranty                                           *
+  *  -------------------------                                           *
+  *                                                                      *
+  *  Covered Software is provided under this License on an "as is"       *
+  *  basis, without warranty of any kind, either expressed, implied, or  *
+  *  statutory, including, without limitation, warranties that the       *
+  *  Covered Software is free of defects, merchantable, fit for a        *
+  *  particular purpose or non-infringing. The entire risk as to the     *
+  *  quality and performance of the Covered Software is with You.        *
+  *  Should any Covered Software prove defective in any respect, You     *
+  *  (not any Contributor) assume the cost of any necessary servicing,   *
+  *  repair, or correction. This disclaimer of warranty constitutes an   *
+  *  essential part of this License. No use of any Covered Software is   *
+  *  authorized under this License except under this disclaimer.         *
+  *                                                                      *
+  ************************************************************************
+
+  ************************************************************************
+  *                                                                      *
+  *  7. Limitation of Liability                                          *
+  *  --------------------------                                          *
+  *                                                                      *
+  *  Under no circumstances and under no legal theory, whether tort      *
+  *  (including negligence), contract, or otherwise, shall any           *
+  *  Contributor, or anyone who distributes Covered Software as          *
+  *  permitted above, be liable to You for any direct, indirect,         *
+  *  special, incidental, or consequential damages of any character      *
+  *  including, without limitation, damages for lost profits, loss of    *
+  *  goodwill, work stoppage, computer failure or malfunction, or any    *
+  *  and all other commercial damages or losses, even if such party      *
+  *  shall have been informed of the possibility of such damages. This   *
+  *  limitation of liability shall not apply to liability for death or   *
+  *  personal injury resulting from such party's negligence to the       *
+  *  extent applicable law prohibits such limitation. Some               *
+  *  jurisdictions do not allow the exclusion or limitation of           *
+  *  incidental or consequential damages, so this exclusion and          *
+  *  limitation may not apply to You.                                    *
+  *                                                                      *
+  ************************************************************************
+
+  8. Litigation
+  -------------
+
+  Any litigation relating to this License may be brought only in the
+  courts of a jurisdiction where the defendant maintains its principal
+  place of business and such litigation shall be governed by laws of that
+  jurisdiction, without reference to its conflict-of-law provisions.
+  Nothing in this Section shall prevent a party's ability to bring
+  cross-claims or counter-claims.
+
+  9. Miscellaneous
+  ----------------
+
+  This License represents the complete agreement concerning the subject
+  matter hereof. If any provision of this License is held to be
+  unenforceable, such provision shall be reformed only to the extent
+  necessary to make it enforceable. Any law or regulation which provides
+  that the language of a contract shall be construed against the drafter
+  shall not be used to construe this License against a Contributor.
+
+  10. Versions of the License
+  ---------------------------
+
+  10.1. New Versions
+
+  Mozilla Foundation is the license steward. Except as provided in Section
+  10.3, no one other than the license steward has the right to modify or
+  publish new versions of this License. Each version will be given a
+  distinguishing version number.
+
+  10.2. Effect of New Versions
+
+  You may distribute the Covered Software under the terms of the version
+  of the License under which You originally received the Covered Software,
+  or under the terms of any subsequent version published by the license
+  steward.
+
+  10.3. Modified Versions
+
+  If you create software not governed by this License, and you want to
+  create a new license for such software, you may create and use a
+  modified version of this License if you rename the license and remove
+  any references to the name of the license steward (except to note that
+  such modified license differs from this License).
+
+  10.4. Distributing Source Code Form that is Incompatible With Secondary
+  Licenses
+
+  If You choose to distribute Source Code Form that is Incompatible With
+  Secondary Licenses under the terms of this version of the License, the
+  notice described in Exhibit B of this License must be attached.
+
+  Exhibit A - Source Code Form License Notice
+  -------------------------------------------
+
+    This Source Code Form is subject to the terms of the Mozilla Public
+    License, v. 2.0. If a copy of the MPL was not distributed with this
+    file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+  If it is not possible or desirable to put the notice in a particular
+  file, then You may include the notice in a location (such as a LICENSE
+  file in a relevant directory) where a recipient would be likely to look
+  for such a notice.
+
+  You may add additional accurate notices of copyright ownership.
+
+  Exhibit B - "Incompatible With Secondary Licenses" Notice
+  ---------------------------------------------------------
+
+    This Source Code Form is "Incompatible With Secondary Licenses", as
+    defined by the Mozilla Public License, v. 2.0.
+
+
+
+  mozilla_security_manager
+
+
+  /* ***** BEGIN LICENSE BLOCK *****
+   * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+   *
+   * The contents of this file are subject to the Mozilla Public License Version
+   * 1.1 (the "License"); you may not use this file except in compliance with
+   * the License. You may obtain a copy of the License at
+   * http://www.mozilla.org/MPL/
+   *
+   * Software distributed under the License is distributed on an "AS IS" basis,
+   * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+   * for the specific language governing rights and limitations under the
+   * License.
+   *
+   * The Original Code is mozilla.org code.
+   *
+   * The Initial Developer of the Original Code is
+   * Netscape Communications Corporation.
+   * Portions created by the Initial Developer are Copyright (C) 2001
+   * the Initial Developer. All Rights Reserved.
+   *
+   * Contributor(s):
+   *
+   * Alternatively, the contents of this file may be used under the terms of
+   * either the GNU General Public License Version 2 or later (the "GPL"), or
+   * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+   * in which case the provisions of the GPL or the LGPL are applicable instead
+   * of those above. If you wish to allow use of your version of this file only
+   * under the terms of either the GPL or the LGPL, and not to allow others to
+   * use your version of this file under the terms of the MPL, indicate your
+   * decision by deleting the provisions above and replace them with the notice
+   * and other provisions required by the GPL or the LGPL. If you do not delete
+   * the provisions above, a recipient may use your version of this file under
+   * the terms of any one of the MPL, the GPL or the LGPL.
+   *
+   * ***** END LICENSE BLOCK ***** */
+
+
+  mozilla(url/third_party/mozilla)
+
+
+  Copyright 2007, Google Inc.
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+      * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+      * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+      * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+  -------------------------------------------------------------------------------
+
+  The file url_parse.cc is based on nsURLParsers.cc from Mozilla. This file is
+  licensed separately as follows:
+
+  The contents of this file are subject to the Mozilla Public License Version
+  1.1 (the "License"); you may not use this file except in compliance with
+  the License. You may obtain a copy of the License at
+  http://www.mozilla.org/MPL/
+
+  Software distributed under the License is distributed on an "AS IS" basis,
+  WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+  for the specific language governing rights and limitations under the
+  License.
+
+  The Original Code is mozilla.org code.
+
+  The Initial Developer of the Original Code is
+  Netscape Communications Corporation.
+  Portions created by the Initial Developer are Copyright (C) 1998
+  the Initial Developer. All Rights Reserved.
+
+  Contributor(s):
+    Darin Fisher (original author)
+
+  Alternatively, the contents of this file may be used under the terms of
+  either the GNU General Public License Version 2 or later (the "GPL"), or
+  the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+  in which case the provisions of the GPL or the LGPL are applicable instead
+  of those above. If you wish to allow use of your version of this file only
+  under the terms of either the GPL or the LGPL, and not to allow others to
+  use your version of this file under the terms of the MPL, indicate your
+  decision by deleting the provisions above and replace them with the notice
+  and other provisions required by the GPL or the LGPL. If you do not delete
+  the provisions above, a recipient may use your version of this file under
+  the terms of any one of the MPL, the GPL or the LGPL.
+
+
+  nist-pkits
+
+  Name: NIST Public Key Interoperability Test Suite
+  Short Name: NIST PKITS
+  URL: http://csrc.nist.gov/groups/ST/crypto_apps_infra/pki/pkitesting.html
+  Version: 1.0.1
+  Date: April 14, 2011
+  License: Public Domain: United States Government Work under 17 U.S.C. 105
+  License File: NOT_SHIPPED
+
+  Description:
+  The Public Key Interoperability Test Suite (PKITS) is a comprehensive X.509
+  path validation test suite that was developed by NIST in conjunction with BAE
+  Systems and NSA.  The PKITS path validation test suite is designed to cover
+  most of the features specified in X.509 and RFC 3280.
+
+  Local Modifications:
+  Only the certs/ and crls/ directories were extracted from PKITS_data.zip.
+
+  pkits_testcases-inl.h is generated from the test descriptions in PKITS.pdf
+  using generate_tests.py.
+
+
+
+  FreeType
+
+
+                      The FreeType Project LICENSE
+                      ----------------------------
+
+                              2006-Jan-27
+
+                      Copyright 1996-2002, 2006 by
+            David Turner, Robert Wilhelm, and Werner Lemberg
+
+
+
+  Introduction
+  ============
+
+    The FreeType  Project is distributed in  several archive packages;
+    some of them may contain, in addition to the FreeType font engine,
+    various tools and  contributions which rely on, or  relate to, the
+    FreeType Project.
+
+    This  license applies  to all  files found  in such  packages, and
+    which do not  fall under their own explicit  license.  The license
+    affects  thus  the  FreeType   font  engine,  the  test  programs,
+    documentation and makefiles, at the very least.
+
+    This  license   was  inspired  by  the  BSD,   Artistic,  and  IJG
+    (Independent JPEG  Group) licenses, which  all encourage inclusion
+    and  use of  free  software in  commercial  and freeware  products
+    alike.  As a consequence, its main points are that:
+
+      o We don't promise that this software works. However, we will be
+        interested in any kind of bug reports. (`as is' distribution)
+
+      o You can  use this software for whatever you  want, in parts or
+        full form, without having to pay us. (`royalty-free' usage)
+
+      o You may not pretend that  you wrote this software.  If you use
+        it, or  only parts of it,  in a program,  you must acknowledge
+        somewhere  in  your  documentation  that  you  have  used  the
+        FreeType code. (`credits')
+
+    We  specifically  permit  and  encourage  the  inclusion  of  this
+    software, with  or without modifications,  in commercial products.
+    We  disclaim  all warranties  covering  The  FreeType Project  and
+    assume no liability related to The FreeType Project.
+
+
+    Finally,  many  people  asked  us  for  a  preferred  form  for  a
+    credit/disclaimer to use in compliance with this license.  We thus
+    encourage you to use the following text:
+
+     """
+      Portions of this software are copyright © <year> The FreeType
+      Project (www.freetype.org).  All rights reserved.
+     """
+
+    Please replace <year> with the value from the FreeType version you
+    actually use.
+
+
+  Legal Terms
+  ===========
+
+  0. Definitions
+  --------------
+
+    Throughout this license,  the terms `package', `FreeType Project',
+    and  `FreeType  archive' refer  to  the  set  of files  originally
+    distributed  by the  authors  (David Turner,  Robert Wilhelm,  and
+    Werner Lemberg) as the `FreeType Project', be they named as alpha,
+    beta or final release.
+
+    `You' refers to  the licensee, or person using  the project, where
+    `using' is a generic term including compiling the project's source
+    code as  well as linking it  to form a  `program' or `executable'.
+    This  program is  referred to  as  `a program  using the  FreeType
+    engine'.
+
+    This  license applies  to all  files distributed  in  the original
+    FreeType  Project,   including  all  source   code,  binaries  and
+    documentation,  unless  otherwise  stated   in  the  file  in  its
+    original, unmodified form as  distributed in the original archive.
+    If you are  unsure whether or not a particular  file is covered by
+    this license, you must contact us to verify this.
+
+    The FreeType  Project is copyright (C) 1996-2000  by David Turner,
+    Robert Wilhelm, and Werner Lemberg.  All rights reserved except as
+    specified below.
+
+  1. No Warranty
+  --------------
+
+    THE FREETYPE PROJECT  IS PROVIDED `AS IS' WITHOUT  WARRANTY OF ANY
+    KIND, EITHER  EXPRESS OR IMPLIED,  INCLUDING, BUT NOT  LIMITED TO,
+    WARRANTIES  OF  MERCHANTABILITY   AND  FITNESS  FOR  A  PARTICULAR
+    PURPOSE.  IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS
+    BE LIABLE  FOR ANY DAMAGES CAUSED  BY THE USE OR  THE INABILITY TO
+    USE, OF THE FREETYPE PROJECT.
+
+  2. Redistribution
+  -----------------
+
+    This  license  grants  a  worldwide, royalty-free,  perpetual  and
+    irrevocable right  and license to use,  execute, perform, compile,
+    display,  copy,   create  derivative  works   of,  distribute  and
+    sublicense the  FreeType Project (in  both source and  object code
+    forms)  and  derivative works  thereof  for  any  purpose; and  to
+    authorize others  to exercise  some or all  of the  rights granted
+    herein, subject to the following conditions:
+
+      o Redistribution of  source code  must retain this  license file
+        (`FTL.TXT') unaltered; any  additions, deletions or changes to
+        the original  files must be clearly  indicated in accompanying
+        documentation.   The  copyright   notices  of  the  unaltered,
+        original  files must  be  preserved in  all  copies of  source
+        files.
+
+      o Redistribution in binary form must provide a  disclaimer  that
+        states  that  the software is based in part of the work of the
+        FreeType Team,  in  the  distribution  documentation.  We also
+        encourage you to put an URL to the FreeType web page  in  your
+        documentation, though this isn't mandatory.
+
+    These conditions  apply to any  software derived from or  based on
+    the FreeType Project,  not just the unmodified files.   If you use
+    our work, you  must acknowledge us.  However, no  fee need be paid
+    to us.
+
+  3. Advertising
+  --------------
+
+    Neither the  FreeType authors and  contributors nor you  shall use
+    the name of the  other for commercial, advertising, or promotional
+    purposes without specific prior written permission.
+
+    We suggest,  but do not require, that  you use one or  more of the
+    following phrases to refer  to this software in your documentation
+    or advertising  materials: `FreeType Project',  `FreeType Engine',
+    `FreeType library', or `FreeType Distribution'.
+
+    As  you have  not signed  this license,  you are  not  required to
+    accept  it.   However,  as  the FreeType  Project  is  copyrighted
+    material, only  this license, or  another one contracted  with the
+    authors, grants you  the right to use, distribute,  and modify it.
+    Therefore,  by  using,  distributing,  or modifying  the  FreeType
+    Project, you indicate that you understand and accept all the terms
+    of this license.
+
+  4. Contacts
+  -----------
+
+    There are two mailing lists related to FreeType:
+
+      o freetype@nongnu.org
+
+        Discusses general use and applications of FreeType, as well as
+        future and  wanted additions to the  library and distribution.
+        If  you are looking  for support,  start in  this list  if you
+        haven't found anything to help you in the documentation.
+
+      o freetype-devel@nongnu.org
+
+        Discusses bugs,  as well  as engine internals,  design issues,
+        specific licenses, porting, etc.
+
+    Our home page can be found at
+
+      http://www.freetype.org
+
+
+  --- end of FTL.TXT ---
+
+
+
+  harfbuzz-ng
+
+  HarfBuzz is licensed under the so-called "Old MIT" license.  Details follow.
+  For parts of HarfBuzz that are licensed under different licenses see individual
+  files names COPYING in subdirectories where applicable.
+
+  Copyright © 2010,2011,2012  Google, Inc.
+  Copyright © 2012  Mozilla Foundation
+  Copyright © 2011  Codethink Limited
+  Copyright © 2008,2010  Nokia Corporation and/or its subsidiary(-ies)
+  Copyright © 2009  Keith Stribley
+  Copyright © 2009  Martin Hosken and SIL International
+  Copyright © 2007  Chris Wilson
+  Copyright © 2006  Behdad Esfahbod
+  Copyright © 2005  David Turner
+  Copyright © 2004,2007,2008,2009,2010  Red Hat, Inc.
+  Copyright © 1998-2004  David Turner and Werner Lemberg
+
+  For full copyright notices consult the individual files in the package.
+
+
+  Permission is hereby granted, without written agreement and without
+  license or royalty fees, to use, copy, modify, and distribute this
+  software and its documentation for any purpose, provided that the
+  above copyright notice and the following two paragraphs appear in
+  all copies of this software.
+
+  IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+  DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+  ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+  IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+  DAMAGE.
+
+  THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+  BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+  FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
+  ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+  PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+
+
+  icu
+
+
+  ICU License - ICU 1.8.1 and later
+
+  COPYRIGHT AND PERMISSION NOTICE
+
+  Copyright (c) 1995-2010 International Business Machines Corporation and others
+
+  All rights reserved.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"),
+  to deal in the Software without restriction, including without limitation
+  the rights to use, copy, modify, merge, publish, distribute, and/or sell
+  copies of the Software, and to permit persons
+  to whom the Software is furnished to do so, provided that the above
+  copyright notice(s) and this permission notice appear in all copies
+  of the Software and that both the above copyright notice(s) and this
+  permission notice appear in supporting documentation.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+  FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT
+  SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY
+  CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES
+  WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+  CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+  WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+  Except as contained in this notice, the name of a copyright holder shall not be
+  used in advertising or otherwise to promote the sale, use or other dealings in
+  this Software without prior written authorization of the copyright holder.
+
+  All trademarks and registered trademarks mentioned herein are the property of
+  their respective owners.
+
+
+
+  libevent
+
+  Copyright 2000-2007 Niels Provos <provos@citi.umich.edu>
+  Copyright 2007-2009 Niels Provos and Nick Mathewson
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions
+  are met:
+  1. Redistributions of source code must retain the above copyright
+     notice, this list of conditions and the following disclaimer.
+  2. Redistributions in binary form must reproduce the above copyright
+     notice, this list of conditions and the following disclaimer in the
+     documentation and/or other materials provided with the distribution.
+  3. The name of the author may not be used to endorse or promote products
+     derived from this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+  IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+  OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+  NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+  THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  libjpeg
+
+
+  (Copied from the README.)
+
+  --------------------------------------------------------------------------------
+
+  The authors make NO WARRANTY or representation, either express or implied,
+  with respect to this software, its quality, accuracy, merchantability, or
+  fitness for a particular purpose.  This software is provided "AS IS", and you,
+  its user, assume the entire risk as to its quality and accuracy.
+
+  This software is copyright (C) 1991-1998, Thomas G. Lane.
+  All Rights Reserved except as specified below.
+
+  Permission is hereby granted to use, copy, modify, and distribute this
+  software (or portions thereof) for any purpose, without fee, subject to these
+  conditions:
+  (1) If any part of the source code for this software is distributed, then this
+  README file must be included, with this copyright and no-warranty notice
+  unaltered; and any additions, deletions, or changes to the original files
+  must be clearly indicated in accompanying documentation.
+  (2) If only executable code is distributed, then the accompanying
+  documentation must state that "this software is based in part on the work of
+  the Independent JPEG Group".
+  (3) Permission for use of this software is granted only if the user accepts
+  full responsibility for any undesirable consequences; the authors accept
+  NO LIABILITY for damages of any kind.
+
+  These conditions apply to any software derived from or based on the IJG code,
+  not just to the unmodified library.  If you use our work, you ought to
+  acknowledge us.
+
+  Permission is NOT granted for the use of any IJG author's name or company name
+  in advertising or publicity relating to this software or products derived from
+  it.  This software may be referred to only as "the Independent JPEG Group's
+  software".
+
+  We specifically permit and encourage the use of this software as the basis of
+  commercial products, provided that all warranty or liability claims are
+  assumed by the product vendor.
+
+
+  ansi2knr.c is included in this distribution by permission of L. Peter Deutsch,
+  sole proprietor of its copyright holder, Aladdin Enterprises of Menlo Park, CA.
+  ansi2knr.c is NOT covered by the above copyright and conditions, but instead
+  by the usual distribution terms of the Free Software Foundation; principally,
+  that you must include source code if you redistribute it.  (See the file
+  ansi2knr.c for full details.)  However, since ansi2knr.c is not needed as part
+  of any program generated from the IJG code, this does not limit you more than
+  the foregoing paragraphs do.
+
+  The Unix configuration script "configure" was produced with GNU Autoconf.
+  It is copyright by the Free Software Foundation but is freely distributable.
+  The same holds for its supporting scripts (config.guess, config.sub,
+  ltconfig, ltmain.sh).  Another support script, install-sh, is copyright
+  by M.I.T. but is also freely distributable.
+
+  It appears that the arithmetic coding option of the JPEG spec is covered by
+  patents owned by IBM, AT&T, and Mitsubishi.  Hence arithmetic coding cannot
+  legally be used without obtaining one or more licenses.  For this reason,
+  support for arithmetic coding has been removed from the free JPEG software.
+  (Since arithmetic coding provides only a marginal gain over the unpatented
+  Huffman mode, it is unlikely that very many implementations will support it.)
+  So far as we are aware, there are no patent restrictions on the remaining
+  code.
+
+  The IJG distribution formerly included code to read and write GIF files.
+  To avoid entanglement with the Unisys LZW patent, GIF reading support has
+  been removed altogether, and the GIF writer has been simplified to produce
+  "uncompressed GIFs".  This technique does not use the LZW algorithm; the
+  resulting GIF files are larger than usual, but are readable by all standard
+  GIF decoders.
+
+  We are required to state that
+      "The Graphics Interchange Format(c) is the Copyright property of
+      CompuServe Incorporated.  GIF(sm) is a Service Mark property of
+      CompuServe Incorporated."
+
+
+
+  libpng
+
+
+  This copy of the libpng notices is provided for your convenience.  In case of
+  any discrepancy between this copy and the notices in the file png.h that is
+  included in the libpng distribution, the latter shall prevail.
+
+  COPYRIGHT NOTICE, DISCLAIMER, and LICENSE:
+
+  If you modify libpng you may insert additional notices immediately following
+  this sentence.
+
+  This code is released under the libpng license.
+
+  libpng versions 1.2.6, August 15, 2004, through 1.2.45, July 7, 2011, are
+  Copyright (c) 2004, 2006-2009 Glenn Randers-Pehrson, and are
+  distributed according to the same disclaimer and license as libpng-1.2.5
+  with the following individual added to the list of Contributing Authors
+
+     Cosmin Truta
+
+  libpng versions 1.0.7, July 1, 2000, through 1.2.5 - October 3, 2002, are
+  Copyright (c) 2000-2002 Glenn Randers-Pehrson, and are
+  distributed according to the same disclaimer and license as libpng-1.0.6
+  with the following individuals added to the list of Contributing Authors
+
+     Simon-Pierre Cadieux
+     Eric S. Raymond
+     Gilles Vollant
+
+  and with the following additions to the disclaimer:
+
+     There is no warranty against interference with your enjoyment of the
+     library or against infringement.  There is no warranty that our
+     efforts or the library will fulfill any of your particular purposes
+     or needs.  This library is provided with all faults, and the entire
+     risk of satisfactory quality, performance, accuracy, and effort is with
+     the user.
+
+  libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are
+  Copyright (c) 1998, 1999 Glenn Randers-Pehrson, and are
+  distributed according to the same disclaimer and license as libpng-0.96,
+  with the following individuals added to the list of Contributing Authors:
+
+     Tom Lane
+     Glenn Randers-Pehrson
+     Willem van Schaik
+
+  libpng versions 0.89, June 1996, through 0.96, May 1997, are
+  Copyright (c) 1996, 1997 Andreas Dilger
+  Distributed according to the same disclaimer and license as libpng-0.88,
+  with the following individuals added to the list of Contributing Authors:
+
+     John Bowler
+     Kevin Bracey
+     Sam Bushell
+     Magnus Holmgren
+     Greg Roelofs
+     Tom Tanner
+
+  libpng versions 0.5, May 1995, through 0.88, January 1996, are
+  Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.
+
+  For the purposes of this copyright and license, "Contributing Authors"
+  is defined as the following set of individuals:
+
+     Andreas Dilger
+     Dave Martindale
+     Guy Eric Schalnat
+     Paul Schmidt
+     Tim Wegner
+
+  The PNG Reference Library is supplied "AS IS".  The Contributing Authors
+  and Group 42, Inc. disclaim all warranties, expressed or implied,
+  including, without limitation, the warranties of merchantability and of
+  fitness for any purpose.  The Contributing Authors and Group 42, Inc.
+  assume no liability for direct, indirect, incidental, special, exemplary,
+  or consequential damages, which may result from the use of the PNG
+  Reference Library, even if advised of the possibility of such damage.
+
+  Permission is hereby granted to use, copy, modify, and distribute this
+  source code, or portions hereof, for any purpose, without fee, subject
+  to the following restrictions:
+
+  1. The origin of this source code must not be misrepresented.
+
+  2. Altered versions must be plainly marked as such and must not
+     be misrepresented as being the original source.
+
+  3. This Copyright notice may not be removed or altered from any
+     source or altered source distribution.
+
+  The Contributing Authors and Group 42, Inc. specifically permit, without
+  fee, and encourage the use of this source code as a component to
+  supporting the PNG file format in commercial products.  If you use this
+  source code in a product, acknowledgment is not required but would be
+  appreciated.
+
+
+  A "png_get_copyright" function is available, for convenient use in "about"
+  boxes and the like:
+
+     printf("%s",png_get_copyright(NULL));
+
+  Also, the PNG logo (in PNG format, of course) is supplied in the
+  files "pngbar.png" and "pngbar.jpg (88x31) and "pngnow.png" (98x31).
+
+  Libpng is OSI Certified Open Source Software.  OSI Certified Open Source is a
+  certification mark of the Open Source Initiative.
+
+  Glenn Randers-Pehrson
+  glennrp at users.sourceforge.net
+  July 7, 2011
+
+
+
+
+
+  WebP image encoder/decoder
+
+
+  Copyright (c) 2010, Google Inc. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in
+      the documentation and/or other materials provided with the
+      distribution.
+
+    * Neither the name of Google nor the names of its contributors may
+      be used to endorse or promote products derived from this software
+      without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+  Additional IP Rights Grant (Patents)
+
+  "This implementation" means the copyrightable works distributed by
+  Google as part of the WebM Project.
+
+  Google hereby grants to you a perpetual, worldwide, non-exclusive,
+  no-charge, royalty-free, irrevocable (except as stated in this section)
+  patent license to make, have made, use, offer to sell, sell, import,
+  transfer, and otherwise run, modify and propagate the contents of this
+  implementation of VP8, where such license applies only to those patent
+  claims, both currently owned by Google and acquired in the future,
+  licensable by Google that are necessarily infringed by this
+  implementation of VP8. This grant does not include claims that would be
+  infringed only as a consequence of further modification of this
+  implementation. If you or your agent or exclusive licensee institute or
+  order or agree to the institution of patent litigation against any
+  entity (including a cross-claim or counterclaim in a lawsuit) alleging
+  that this implementation of VP8 or any code incorporated within this
+  implementation of VP8 constitutes direct or contributory patent
+  infringement, or inducement of patent infringement, then any patent
+  rights granted to you under this License for this implementation of VP8
+  shall terminate as of the date such litigation is filed.
+
+
+
+  libxml
+
+
+  Except where otherwise noted in the source code (e.g. the files hash.c,
+  list.c and the trio files, which are covered by a similar licence but
+  with different Copyright notices) all the files are:
+
+   Copyright (C) 1998-2003 Daniel Veillard.  All Rights Reserved.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is fur-
+  nished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FIT-
+  NESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+  DANIEL VEILLARD BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+  IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CON-
+  NECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+  Except as contained in this notice, the name of Daniel Veillard shall not
+  be used in advertising or otherwise to promote the sale, use or other deal-
+  ings in this Software without prior written authorization from him.
+
+
+
+  modp base64 decoder
+
+
+   * MODP_B64 - High performance base64 encoder/decoder
+   * Version 1.3 -- 17-Mar-2006
+   * http://modp.com/release/base64
+   *
+   * Copyright (c) 2005, 2006  Nick Galbreath -- nickg [at] modp [dot] com
+   * All rights reserved.
+   *
+   * Redistribution and use in source and binary forms, with or without
+   * modification, are permitted provided that the following conditions are
+   * met:
+   *
+   *   Redistributions of source code must retain the above copyright
+   *   notice, this list of conditions and the following disclaimer.
+   *
+   *   Redistributions in binary form must reproduce the above copyright
+   *   notice, this list of conditions and the following disclaimer in the
+   *   documentation and/or other materials provided with the distribution.
+   *
+   *   Neither the name of the modp.com nor the names of its
+   *   contributors may be used to endorse or promote products derived from
+   *   this software without specific prior written permission.
+   *
+   * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  OTS (OpenType Sanitizer)
+
+
+  // Copyright (c) 2009 The Chromium Authors. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  protobuf
+
+  This license applies to all parts of Protocol Buffers except the following:
+
+    - Atomicops support for generic gcc, located in
+      src/google/protobuf/stubs/atomicops_internals_generic_gcc.h.
+      This file is copyrighted by Red Hat Inc.
+
+    - Atomicops support for AIX/POWER, located in
+      src/google/protobuf/stubs/atomicops_internals_power.h.
+      This file is copyrighted by Bloomberg Finance LP.
+
+  Copyright 2014, Google Inc.  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+      * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+      * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+      * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+  Code generated by the Protocol Buffer compiler is owned by the owner
+  of the input file used when generating it.  This code is not
+  standalone and requires a support library to be linked with it.  This
+  support library is itself covered by the above license.
+
+
+
+  Skia
+
+
+  // Copyright (c) 2011 Google Inc. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+  sqlite
+
+
+  The author disclaims copyright to this source code.  In place of
+  a legal notice, here is a blessing:
+
+     May you do good and not evil.
+     May you find forgiveness for yourself and forgive others.
+     May you share freely, never taking more than you give.
+
+
+
+  zlib
+
+
+  /* zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.4, March 14th, 2010
+
+    Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+       claim that you wrote the original software. If you use this software
+       in a product, an acknowledgment in the product documentation would be
+       appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+       misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly
+    Mark Adler
+
+  */
+
+
+
+  WebKit
+
+  Copyright (C) 2006, 2007, 2008, 2009 Apple Inc.  All rights reserved.
+  Copyright (C) 2007-2009 Torch Mobile, Inc.
+  Copyright (C) Research In Motion Limited 2010. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions
+  are met:
+  1. Redistributions of source code must retain the above copyright
+     notice, this list of conditions and the following disclaimer.
+  2. Redistributions in binary form must reproduce the above copyright
+     notice, this list of conditions and the following disclaimer in the
+     documentation and/or other materials provided with the distribution.
+
+  THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+  EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+  PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+  CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+  PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+  OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  SuperFastHash
+
+  Paul Hsieh OLD BSD license
+
+  Copyright (c) 2010, Paul Hsieh
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without modification,
+  are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice, this
+    list of conditions and the following disclaimer.
+  * Redistributions in binary form must reproduce the above copyright notice, this
+    list of conditions and the following disclaimer in the documentation and/or
+    other materials provided with the distribution.
+  * Neither my name, Paul Hsieh, nor the names of any other contributors to the
+    code use may not be used to endorse or promote products derived from this
+    software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+  ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  musl
+
+  ----------------------------------------------------------------------
+  Copyright © 2005-2014 Rich Felker, et al.
+
+  Permission is hereby granted, free of charge, to any person obtaining
+  a copy of this software and associated documentation files (the
+  "Software"), to deal in the Software without restriction, including
+  without limitation the rights to use, copy, modify, merge, publish,
+  distribute, sublicense, and/or sell copies of the Software, and to
+  permit persons to whom the Software is furnished to do so, subject to
+  the following conditions:
+
+  The above copyright notice and this permission notice shall be
+  included in all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+  CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+  TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+  SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+  ----------------------------------------------------------------------
+
+
+
+  brotli
+
+  Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+
+
+
+  zlib
+
+  /* zlib.h -- interface of the 'zlib' general purpose compression library
+    version 1.2.4, March 14th, 2010
+
+    Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler
+
+    This software is provided 'as-is', without any express or implied
+    warranty.  In no event will the authors be held liable for any damages
+    arising from the use of this software.
+
+    Permission is granted to anyone to use this software for any purpose,
+    including commercial applications, and to alter it and redistribute it
+    freely, subject to the following restrictions:
+
+    1. The origin of this software must not be misrepresented; you must not
+       claim that you wrote the original software. If you use this software
+       in a product, an acknowledgment in the product documentation would be
+       appreciated but is not required.
+    2. Altered source versions must be plainly marked as such, and must not be
+       misrepresented as being the original software.
+    3. This notice may not be removed or altered from any source distribution.
+
+    Jean-loup Gailly
+    Mark Adler
+
+  */
+
+
+
+  woff2
+
+  Copyright (c) 2013-2017 by the WOFF2 Authors.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+
+
+
+  jsmn(indirect usage in ce_cdm)
+
+  Copyright (c) 2010 Serge A. Zaitsev
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+
+
+
+  libvpx
+
+
+  Copyright (c) 2010, The WebM Project authors. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in
+      the documentation and/or other materials provided with the
+      distribution.
+
+    * Neither the name of Google, nor the WebM Project, nor the names
+      of its contributors may be used to endorse or promote products
+      derived from this software without specific prior written
+      permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  ots
+
+
+  Copyright (c) 2009-2017 The OTS Authors. All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+     * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+     * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+     * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  quiche
+
+  // Copyright 2015 The Chromium Authors. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  jinja2
+
+  Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+  Some rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+      * Redistributions of source code must retain the above copyright
+        notice, this list of conditions and the following disclaimer.
+
+      * Redistributions in binary form must reproduce the above
+        copyright notice, this list of conditions and the following
+        disclaimer in the documentation and/or other materials provided
+        with the distribution.
+
+      * The names of the contributors may not be used to endorse or
+        promote products derived from this software without specific
+        prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  opus
+
+
+  Contributions to the collaboration shall not be considered confidential.
+
+  Each contributor represents and warrants that it has the right and
+  authority to license copyright in its contributions to the collaboration.
+
+  Each contributor agrees to license the copyright in the contributions
+  under the Modified (2-clause or 3-clause) BSD License or the Clear BSD License.
+
+  Please see the IPR statements submitted to the IETF for the complete
+  patent licensing details:
+
+  Xiph.Org Foundation:
+  https://datatracker.ietf.org/ipr/1524/
+
+  Microsoft Corporation:
+  https://datatracker.ietf.org/ipr/1914/
+
+  Skype Limited:
+  https://datatracker.ietf.org/ipr/1602/
+
+  Broadcom Corporation:
+  https://datatracker.ietf.org/ipr/1526/
+
+
+
+  LLVM
+
+
+  ==============================================================================
+  LLVM Release License
+  ==============================================================================
+  University of Illinois/NCSA
+  Open Source License
+
+  Copyright (c) 2003-2018 University of Illinois at Urbana-Champaign.
+  All rights reserved.
+
+  Developed by:
+
+      LLVM Team
+
+      University of Illinois at Urbana-Champaign
+
+      http://llvm.org
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy of
+  this software and associated documentation files (the "Software"), to deal with
+  the Software without restriction, including without limitation the rights to
+  use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+  of the Software, and to permit persons to whom the Software is furnished to do
+  so, subject to the following conditions:
+
+      * Redistributions of source code must retain the above copyright notice,
+        this list of conditions and the following disclaimers.
+
+      * Redistributions in binary form must reproduce the above copyright notice,
+        this list of conditions and the following disclaimers in the
+        documentation and/or other materials provided with the distribution.
+
+      * Neither the names of the LLVM Team, University of Illinois at
+        Urbana-Champaign, nor the names of its contributors may be used to
+        endorse or promote products derived from this Software without specific
+        prior written permission.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+  FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+  CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
+  SOFTWARE.
+
+  ==============================================================================
+  Copyrights and Licenses for Third Party Software Distributed with LLVM:
+  ==============================================================================
+  The LLVM software contains code written by third parties.  Such software will
+  have its own individual LICENSE.TXT file in the directory in which it appears.
+  This file will describe the copyrights, license, and restrictions which apply
+  to that code.
+
+  The disclaimer of warranty in the University of Illinois Open Source License
+  applies to all code in the LLVM Distribution, and nothing in any of the
+  other licenses gives permission to use the names of the LLVM Team or the
+  University of Illinois to endorse or promote products derived from this
+  Software.
+
+  The following pieces of software have additional or alternate copyrights,
+  licenses, and/or restrictions:
+
+  Program             Directory
+  -------             ---------
+  Google Test         llvm/utils/unittest/googletest
+  OpenBSD regex       llvm/lib/Support/{reg*, COPYRIGHT.regex}
+  pyyaml tests        llvm/test/YAMLParser/{*.data, LICENSE.TXT}
+  ARM contributions   llvm/lib/Target/ARM/LICENSE.TXT
+  md5 contributions   llvm/lib/Support/MD5.cpp llvm/include/llvm/Support/MD5.h
+
+
+
+  libde265(headers only)
+
+                       GNU LESSER GENERAL PUBLIC LICENSE
+                         Version 3, 29 June 2007
+
+   Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+   Everyone is permitted to copy and distribute verbatim copies
+   of this license document, but changing it is not allowed.
+
+
+    This version of the GNU Lesser General Public License incorporates
+  the terms and conditions of version 3 of the GNU General Public
+  License, supplemented by the additional permissions listed below.
+
+    0. Additional Definitions.
+
+    As used herein, "this License" refers to version 3 of the GNU Lesser
+  General Public License, and the "GNU GPL" refers to version 3 of the GNU
+  General Public License.
+
+    "The Library" refers to a covered work governed by this License,
+  other than an Application or a Combined Work as defined below.
+
+    An "Application" is any work that makes use of an interface provided
+  by the Library, but which is not otherwise based on the Library.
+  Defining a subclass of a class defined by the Library is deemed a mode
+  of using an interface provided by the Library.
+
+    A "Combined Work" is a work produced by combining or linking an
+  Application with the Library.  The particular version of the Library
+  with which the Combined Work was made is also called the "Linked
+  Version".
+
+    The "Minimal Corresponding Source" for a Combined Work means the
+  Corresponding Source for the Combined Work, excluding any source code
+  for portions of the Combined Work that, considered in isolation, are
+  based on the Application, and not on the Linked Version.
+
+    The "Corresponding Application Code" for a Combined Work means the
+  object code and/or source code for the Application, including any data
+  and utility programs needed for reproducing the Combined Work from the
+  Application, but excluding the System Libraries of the Combined Work.
+
+    1. Exception to Section 3 of the GNU GPL.
+
+    You may convey a covered work under sections 3 and 4 of this License
+  without being bound by section 3 of the GNU GPL.
+
+    2. Conveying Modified Versions.
+
+    If you modify a copy of the Library, and, in your modifications, a
+  facility refers to a function or data to be supplied by an Application
+  that uses the facility (other than as an argument passed when the
+  facility is invoked), then you may convey a copy of the modified
+  version:
+
+     a) under this License, provided that you make a good faith effort to
+     ensure that, in the event an Application does not supply the
+     function or data, the facility still operates, and performs
+     whatever part of its purpose remains meaningful, or
+
+     b) under the GNU GPL, with none of the additional permissions of
+     this License applicable to that copy.
+
+    3. Object Code Incorporating Material from Library Header Files.
+
+    The object code form of an Application may incorporate material from
+  a header file that is part of the Library.  You may convey such object
+  code under terms of your choice, provided that, if the incorporated
+  material is not limited to numerical parameters, data structure
+  layouts and accessors, or small macros, inline functions and templates
+  (ten or fewer lines in length), you do both of the following:
+
+     a) Give prominent notice with each copy of the object code that the
+     Library is used in it and that the Library and its use are
+     covered by this License.
+
+     b) Accompany the object code with a copy of the GNU GPL and this license
+     document.
+
+    4. Combined Works.
+
+    You may convey a Combined Work under terms of your choice that,
+  taken together, effectively do not restrict modification of the
+  portions of the Library contained in the Combined Work and reverse
+  engineering for debugging such modifications, if you also do each of
+  the following:
+
+     a) Give prominent notice with each copy of the Combined Work that
+     the Library is used in it and that the Library and its use are
+     covered by this License.
+
+     b) Accompany the Combined Work with a copy of the GNU GPL and this license
+     document.
+
+     c) For a Combined Work that displays copyright notices during
+     execution, include the copyright notice for the Library among
+     these notices, as well as a reference directing the user to the
+     copies of the GNU GPL and this license document.
+
+     d) Do one of the following:
+
+         0) Convey the Minimal Corresponding Source under the terms of this
+         License, and the Corresponding Application Code in a form
+         suitable for, and under terms that permit, the user to
+         recombine or relink the Application with a modified version of
+         the Linked Version to produce a modified Combined Work, in the
+         manner specified by section 6 of the GNU GPL for conveying
+         Corresponding Source.
+
+         1) Use a suitable shared library mechanism for linking with the
+         Library.  A suitable mechanism is one that (a) uses at run time
+         a copy of the Library already present on the user's computer
+         system, and (b) will operate properly with a modified version
+         of the Library that is interface-compatible with the Linked
+         Version.
+
+     e) Provide Installation Information, but only if you would otherwise
+     be required to provide such information under section 6 of the
+     GNU GPL, and only to the extent that such information is
+     necessary to install and execute a modified version of the
+     Combined Work produced by recombining or relinking the
+     Application with a modified version of the Linked Version. (If
+     you use option 4d0, the Installation Information must accompany
+     the Minimal Corresponding Source and Corresponding Application
+     Code. If you use option 4d1, you must provide the Installation
+     Information in the manner specified by section 6 of the GNU GPL
+     for conveying Corresponding Source.)
+
+    5. Combined Libraries.
+
+    You may place library facilities that are a work based on the
+  Library side by side in a single library together with other library
+  facilities that are not Applications and are not covered by this
+  License, and convey such a combined library under terms of your
+  choice, if you do both of the following:
+
+     a) Accompany the combined library with a copy of the same work based
+     on the Library, uncombined with any other library facilities,
+     conveyed under the terms of this License.
+
+     b) Give prominent notice with the combined library that part of it
+     is a work based on the Library, and explaining where to find the
+     accompanying uncombined form of the same work.
+
+    6. Revised Versions of the GNU Lesser General Public License.
+
+    The Free Software Foundation may publish revised and/or new versions
+  of the GNU Lesser General Public License from time to time. Such new
+  versions will be similar in spirit to the present version, but may
+  differ in detail to address new problems or concerns.
+
+    Each version is given a distinguishing version number. If the
+  Library as you received it specifies that a certain numbered version
+  of the GNU Lesser General Public License "or any later version"
+  applies to it, you have the option of following the terms and
+  conditions either of that published version or of any later version
+  published by the Free Software Foundation. If the Library as you
+  received it does not specify a version number of the GNU Lesser
+  General Public License, you may choose any version of the GNU Lesser
+  General Public License ever published by the Free Software Foundation.
+
+    If the Library as you received it specifies that a proxy can decide
+  whether future versions of the GNU Lesser General Public License shall
+  apply, that proxy's public statement of acceptance of any version is
+  permanent authorization for you to choose that version for the
+  Library.
+
+  ----------------------------------------------------------------------
+
+                      GNU GENERAL PUBLIC LICENSE
+                         Version 3, 29 June 2007
+
+   Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+   Everyone is permitted to copy and distribute verbatim copies
+   of this license document, but changing it is not allowed.
+
+                              Preamble
+
+    The GNU General Public License is a free, copyleft license for
+  software and other kinds of works.
+
+    The licenses for most software and other practical works are designed
+  to take away your freedom to share and change the works.  By contrast,
+  the GNU General Public License is intended to guarantee your freedom to
+  share and change all versions of a program--to make sure it remains free
+  software for all its users.  We, the Free Software Foundation, use the
+  GNU General Public License for most of our software; it applies also to
+  any other work released this way by its authors.  You can apply it to
+  your programs, too.
+
+    When we speak of free software, we are referring to freedom, not
+  price.  Our General Public Licenses are designed to make sure that you
+  have the freedom to distribute copies of free software (and charge for
+  them if you wish), that you receive source code or can get it if you
+  want it, that you can change the software or use pieces of it in new
+  free programs, and that you know you can do these things.
+
+    To protect your rights, we need to prevent others from denying you
+  these rights or asking you to surrender the rights.  Therefore, you have
+  certain responsibilities if you distribute copies of the software, or if
+  you modify it: responsibilities to respect the freedom of others.
+
+    For example, if you distribute copies of such a program, whether
+  gratis or for a fee, you must pass on to the recipients the same
+  freedoms that you received.  You must make sure that they, too, receive
+  or can get the source code.  And you must show them these terms so they
+  know their rights.
+
+    Developers that use the GNU GPL protect your rights with two steps:
+  (1) assert copyright on the software, and (2) offer you this License
+  giving you legal permission to copy, distribute and/or modify it.
+
+    For the developers' and authors' protection, the GPL clearly explains
+  that there is no warranty for this free software.  For both users' and
+  authors' sake, the GPL requires that modified versions be marked as
+  changed, so that their problems will not be attributed erroneously to
+  authors of previous versions.
+
+    Some devices are designed to deny users access to install or run
+  modified versions of the software inside them, although the manufacturer
+  can do so.  This is fundamentally incompatible with the aim of
+  protecting users' freedom to change the software.  The systematic
+  pattern of such abuse occurs in the area of products for individuals to
+  use, which is precisely where it is most unacceptable.  Therefore, we
+  have designed this version of the GPL to prohibit the practice for those
+  products.  If such problems arise substantially in other domains, we
+  stand ready to extend this provision to those domains in future versions
+  of the GPL, as needed to protect the freedom of users.
+
+    Finally, every program is threatened constantly by software patents.
+  States should not allow patents to restrict development and use of
+  software on general-purpose computers, but in those that do, we wish to
+  avoid the special danger that patents applied to a free program could
+  make it effectively proprietary.  To prevent this, the GPL assures that
+  patents cannot be used to render the program non-free.
+
+    The precise terms and conditions for copying, distribution and
+  modification follow.
+
+                         TERMS AND CONDITIONS
+
+    0. Definitions.
+
+    "This License" refers to version 3 of the GNU General Public License.
+
+    "Copyright" also means copyright-like laws that apply to other kinds of
+  works, such as semiconductor masks.
+
+    "The Program" refers to any copyrightable work licensed under this
+  License.  Each licensee is addressed as "you".  "Licensees" and
+  "recipients" may be individuals or organizations.
+
+    To "modify" a work means to copy from or adapt all or part of the work
+  in a fashion requiring copyright permission, other than the making of an
+  exact copy.  The resulting work is called a "modified version" of the
+  earlier work or a work "based on" the earlier work.
+
+    A "covered work" means either the unmodified Program or a work based
+  on the Program.
+
+    To "propagate" a work means to do anything with it that, without
+  permission, would make you directly or secondarily liable for
+  infringement under applicable copyright law, except executing it on a
+  computer or modifying a private copy.  Propagation includes copying,
+  distribution (with or without modification), making available to the
+  public, and in some countries other activities as well.
+
+    To "convey" a work means any kind of propagation that enables other
+  parties to make or receive copies.  Mere interaction with a user through
+  a computer network, with no transfer of a copy, is not conveying.
+
+    An interactive user interface displays "Appropriate Legal Notices"
+  to the extent that it includes a convenient and prominently visible
+  feature that (1) displays an appropriate copyright notice, and (2)
+  tells the user that there is no warranty for the work (except to the
+  extent that warranties are provided), that licensees may convey the
+  work under this License, and how to view a copy of this License.  If
+  the interface presents a list of user commands or options, such as a
+  menu, a prominent item in the list meets this criterion.
+
+    1. Source Code.
+
+    The "source code" for a work means the preferred form of the work
+  for making modifications to it.  "Object code" means any non-source
+  form of a work.
+
+    A "Standard Interface" means an interface that either is an official
+  standard defined by a recognized standards body, or, in the case of
+  interfaces specified for a particular programming language, one that
+  is widely used among developers working in that language.
+
+    The "System Libraries" of an executable work include anything, other
+  than the work as a whole, that (a) is included in the normal form of
+  packaging a Major Component, but which is not part of that Major
+  Component, and (b) serves only to enable use of the work with that
+  Major Component, or to implement a Standard Interface for which an
+  implementation is available to the public in source code form.  A
+  "Major Component", in this context, means a major essential component
+  (kernel, window system, and so on) of the specific operating system
+  (if any) on which the executable work runs, or a compiler used to
+  produce the work, or an object code interpreter used to run it.
+
+    The "Corresponding Source" for a work in object code form means all
+  the source code needed to generate, install, and (for an executable
+  work) run the object code and to modify the work, including scripts to
+  control those activities.  However, it does not include the work's
+  System Libraries, or general-purpose tools or generally available free
+  programs which are used unmodified in performing those activities but
+  which are not part of the work.  For example, Corresponding Source
+  includes interface definition files associated with source files for
+  the work, and the source code for shared libraries and dynamically
+  linked subprograms that the work is specifically designed to require,
+  such as by intimate data communication or control flow between those
+  subprograms and other parts of the work.
+
+    The Corresponding Source need not include anything that users
+  can regenerate automatically from other parts of the Corresponding
+  Source.
+
+    The Corresponding Source for a work in source code form is that
+  same work.
+
+    2. Basic Permissions.
+
+    All rights granted under this License are granted for the term of
+  copyright on the Program, and are irrevocable provided the stated
+  conditions are met.  This License explicitly affirms your unlimited
+  permission to run the unmodified Program.  The output from running a
+  covered work is covered by this License only if the output, given its
+  content, constitutes a covered work.  This License acknowledges your
+  rights of fair use or other equivalent, as provided by copyright law.
+
+    You may make, run and propagate covered works that you do not
+  convey, without conditions so long as your license otherwise remains
+  in force.  You may convey covered works to others for the sole purpose
+  of having them make modifications exclusively for you, or provide you
+  with facilities for running those works, provided that you comply with
+  the terms of this License in conveying all material for which you do
+  not control copyright.  Those thus making or running the covered works
+  for you must do so exclusively on your behalf, under your direction
+  and control, on terms that prohibit them from making any copies of
+  your copyrighted material outside their relationship with you.
+
+    Conveying under any other circumstances is permitted solely under
+  the conditions stated below.  Sublicensing is not allowed; section 10
+  makes it unnecessary.
+
+    3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+    No covered work shall be deemed part of an effective technological
+  measure under any applicable law fulfilling obligations under article
+  11 of the WIPO copyright treaty adopted on 20 December 1996, or
+  similar laws prohibiting or restricting circumvention of such
+  measures.
+
+    When you convey a covered work, you waive any legal power to forbid
+  circumvention of technological measures to the extent such circumvention
+  is effected by exercising rights under this License with respect to
+  the covered work, and you disclaim any intention to limit operation or
+  modification of the work as a means of enforcing, against the work's
+  users, your or third parties' legal rights to forbid circumvention of
+  technological measures.
+
+    4. Conveying Verbatim Copies.
+
+    You may convey verbatim copies of the Program's source code as you
+  receive it, in any medium, provided that you conspicuously and
+  appropriately publish on each copy an appropriate copyright notice;
+  keep intact all notices stating that this License and any
+  non-permissive terms added in accord with section 7 apply to the code;
+  keep intact all notices of the absence of any warranty; and give all
+  recipients a copy of this License along with the Program.
+
+    You may charge any price or no price for each copy that you convey,
+  and you may offer support or warranty protection for a fee.
+
+    5. Conveying Modified Source Versions.
+
+    You may convey a work based on the Program, or the modifications to
+  produce it from the Program, in the form of source code under the
+  terms of section 4, provided that you also meet all of these conditions:
+
+      a) The work must carry prominent notices stating that you modified
+      it, and giving a relevant date.
+
+      b) The work must carry prominent notices stating that it is
+      released under this License and any conditions added under section
+      7.  This requirement modifies the requirement in section 4 to
+      "keep intact all notices".
+
+      c) You must license the entire work, as a whole, under this
+      License to anyone who comes into possession of a copy.  This
+      License will therefore apply, along with any applicable section 7
+      additional terms, to the whole of the work, and all its parts,
+      regardless of how they are packaged.  This License gives no
+      permission to license the work in any other way, but it does not
+      invalidate such permission if you have separately received it.
+
+      d) If the work has interactive user interfaces, each must display
+      Appropriate Legal Notices; however, if the Program has interactive
+      interfaces that do not display Appropriate Legal Notices, your
+      work need not make them do so.
+
+    A compilation of a covered work with other separate and independent
+  works, which are not by their nature extensions of the covered work,
+  and which are not combined with it such as to form a larger program,
+  in or on a volume of a storage or distribution medium, is called an
+  "aggregate" if the compilation and its resulting copyright are not
+  used to limit the access or legal rights of the compilation's users
+  beyond what the individual works permit.  Inclusion of a covered work
+  in an aggregate does not cause this License to apply to the other
+  parts of the aggregate.
+
+    6. Conveying Non-Source Forms.
+
+    You may convey a covered work in object code form under the terms
+  of sections 4 and 5, provided that you also convey the
+  machine-readable Corresponding Source under the terms of this License,
+  in one of these ways:
+
+      a) Convey the object code in, or embodied in, a physical product
+      (including a physical distribution medium), accompanied by the
+      Corresponding Source fixed on a durable physical medium
+      customarily used for software interchange.
+
+      b) Convey the object code in, or embodied in, a physical product
+      (including a physical distribution medium), accompanied by a
+      written offer, valid for at least three years and valid for as
+      long as you offer spare parts or customer support for that product
+      model, to give anyone who possesses the object code either (1) a
+      copy of the Corresponding Source for all the software in the
+      product that is covered by this License, on a durable physical
+      medium customarily used for software interchange, for a price no
+      more than your reasonable cost of physically performing this
+      conveying of source, or (2) access to copy the
+      Corresponding Source from a network server at no charge.
+
+      c) Convey individual copies of the object code with a copy of the
+      written offer to provide the Corresponding Source.  This
+      alternative is allowed only occasionally and noncommercially, and
+      only if you received the object code with such an offer, in accord
+      with subsection 6b.
+
+      d) Convey the object code by offering access from a designated
+      place (gratis or for a charge), and offer equivalent access to the
+      Corresponding Source in the same way through the same place at no
+      further charge.  You need not require recipients to copy the
+      Corresponding Source along with the object code.  If the place to
+      copy the object code is a network server, the Corresponding Source
+      may be on a different server (operated by you or a third party)
+      that supports equivalent copying facilities, provided you maintain
+      clear directions next to the object code saying where to find the
+      Corresponding Source.  Regardless of what server hosts the
+      Corresponding Source, you remain obligated to ensure that it is
+      available for as long as needed to satisfy these requirements.
+
+      e) Convey the object code using peer-to-peer transmission, provided
+      you inform other peers where the object code and Corresponding
+      Source of the work are being offered to the general public at no
+      charge under subsection 6d.
+
+    A separable portion of the object code, whose source code is excluded
+  from the Corresponding Source as a System Library, need not be
+  included in conveying the object code work.
+
+    A "User Product" is either (1) a "consumer product", which means any
+  tangible personal property which is normally used for personal, family,
+  or household purposes, or (2) anything designed or sold for incorporation
+  into a dwelling.  In determining whether a product is a consumer product,
+  doubtful cases shall be resolved in favor of coverage.  For a particular
+  product received by a particular user, "normally used" refers to a
+  typical or common use of that class of product, regardless of the status
+  of the particular user or of the way in which the particular user
+  actually uses, or expects or is expected to use, the product.  A product
+  is a consumer product regardless of whether the product has substantial
+  commercial, industrial or non-consumer uses, unless such uses represent
+  the only significant mode of use of the product.
+
+    "Installation Information" for a User Product means any methods,
+  procedures, authorization keys, or other information required to install
+  and execute modified versions of a covered work in that User Product from
+  a modified version of its Corresponding Source.  The information must
+  suffice to ensure that the continued functioning of the modified object
+  code is in no case prevented or interfered with solely because
+  modification has been made.
+
+    If you convey an object code work under this section in, or with, or
+  specifically for use in, a User Product, and the conveying occurs as
+  part of a transaction in which the right of possession and use of the
+  User Product is transferred to the recipient in perpetuity or for a
+  fixed term (regardless of how the transaction is characterized), the
+  Corresponding Source conveyed under this section must be accompanied
+  by the Installation Information.  But this requirement does not apply
+  if neither you nor any third party retains the ability to install
+  modified object code on the User Product (for example, the work has
+  been installed in ROM).
+
+    The requirement to provide Installation Information does not include a
+  requirement to continue to provide support service, warranty, or updates
+  for a work that has been modified or installed by the recipient, or for
+  the User Product in which it has been modified or installed.  Access to a
+  network may be denied when the modification itself materially and
+  adversely affects the operation of the network or violates the rules and
+  protocols for communication across the network.
+
+    Corresponding Source conveyed, and Installation Information provided,
+  in accord with this section must be in a format that is publicly
+  documented (and with an implementation available to the public in
+  source code form), and must require no special password or key for
+  unpacking, reading or copying.
+
+    7. Additional Terms.
+
+    "Additional permissions" are terms that supplement the terms of this
+  License by making exceptions from one or more of its conditions.
+  Additional permissions that are applicable to the entire Program shall
+  be treated as though they were included in this License, to the extent
+  that they are valid under applicable law.  If additional permissions
+  apply only to part of the Program, that part may be used separately
+  under those permissions, but the entire Program remains governed by
+  this License without regard to the additional permissions.
+
+    When you convey a copy of a covered work, you may at your option
+  remove any additional permissions from that copy, or from any part of
+  it.  (Additional permissions may be written to require their own
+  removal in certain cases when you modify the work.)  You may place
+  additional permissions on material, added by you to a covered work,
+  for which you have or can give appropriate copyright permission.
+
+    Notwithstanding any other provision of this License, for material you
+  add to a covered work, you may (if authorized by the copyright holders of
+  that material) supplement the terms of this License with terms:
+
+      a) Disclaiming warranty or limiting liability differently from the
+      terms of sections 15 and 16 of this License; or
+
+      b) Requiring preservation of specified reasonable legal notices or
+      author attributions in that material or in the Appropriate Legal
+      Notices displayed by works containing it; or
+
+      c) Prohibiting misrepresentation of the origin of that material, or
+      requiring that modified versions of such material be marked in
+      reasonable ways as different from the original version; or
+
+      d) Limiting the use for publicity purposes of names of licensors or
+      authors of the material; or
+
+      e) Declining to grant rights under trademark law for use of some
+      trade names, trademarks, or service marks; or
+
+      f) Requiring indemnification of licensors and authors of that
+      material by anyone who conveys the material (or modified versions of
+      it) with contractual assumptions of liability to the recipient, for
+      any liability that these contractual assumptions directly impose on
+      those licensors and authors.
+
+    All other non-permissive additional terms are considered "further
+  restrictions" within the meaning of section 10.  If the Program as you
+  received it, or any part of it, contains a notice stating that it is
+  governed by this License along with a term that is a further
+  restriction, you may remove that term.  If a license document contains
+  a further restriction but permits relicensing or conveying under this
+  License, you may add to a covered work material governed by the terms
+  of that license document, provided that the further restriction does
+  not survive such relicensing or conveying.
+
+    If you add terms to a covered work in accord with this section, you
+  must place, in the relevant source files, a statement of the
+  additional terms that apply to those files, or a notice indicating
+  where to find the applicable terms.
+
+    Additional terms, permissive or non-permissive, may be stated in the
+  form of a separately written license, or stated as exceptions;
+  the above requirements apply either way.
+
+    8. Termination.
+
+    You may not propagate or modify a covered work except as expressly
+  provided under this License.  Any attempt otherwise to propagate or
+  modify it is void, and will automatically terminate your rights under
+  this License (including any patent licenses granted under the third
+  paragraph of section 11).
+
+    However, if you cease all violation of this License, then your
+  license from a particular copyright holder is reinstated (a)
+  provisionally, unless and until the copyright holder explicitly and
+  finally terminates your license, and (b) permanently, if the copyright
+  holder fails to notify you of the violation by some reasonable means
+  prior to 60 days after the cessation.
+
+    Moreover, your license from a particular copyright holder is
+  reinstated permanently if the copyright holder notifies you of the
+  violation by some reasonable means, this is the first time you have
+  received notice of violation of this License (for any work) from that
+  copyright holder, and you cure the violation prior to 30 days after
+  your receipt of the notice.
+
+    Termination of your rights under this section does not terminate the
+  licenses of parties who have received copies or rights from you under
+  this License.  If your rights have been terminated and not permanently
+  reinstated, you do not qualify to receive new licenses for the same
+  material under section 10.
+
+    9. Acceptance Not Required for Having Copies.
+
+    You are not required to accept this License in order to receive or
+  run a copy of the Program.  Ancillary propagation of a covered work
+  occurring solely as a consequence of using peer-to-peer transmission
+  to receive a copy likewise does not require acceptance.  However,
+  nothing other than this License grants you permission to propagate or
+  modify any covered work.  These actions infringe copyright if you do
+  not accept this License.  Therefore, by modifying or propagating a
+  covered work, you indicate your acceptance of this License to do so.
+
+    10. Automatic Licensing of Downstream Recipients.
+
+    Each time you convey a covered work, the recipient automatically
+  receives a license from the original licensors, to run, modify and
+  propagate that work, subject to this License.  You are not responsible
+  for enforcing compliance by third parties with this License.
+
+    An "entity transaction" is a transaction transferring control of an
+  organization, or substantially all assets of one, or subdividing an
+  organization, or merging organizations.  If propagation of a covered
+  work results from an entity transaction, each party to that
+  transaction who receives a copy of the work also receives whatever
+  licenses to the work the party's predecessor in interest had or could
+  give under the previous paragraph, plus a right to possession of the
+  Corresponding Source of the work from the predecessor in interest, if
+  the predecessor has it or can get it with reasonable efforts.
+
+    You may not impose any further restrictions on the exercise of the
+  rights granted or affirmed under this License.  For example, you may
+  not impose a license fee, royalty, or other charge for exercise of
+  rights granted under this License, and you may not initiate litigation
+  (including a cross-claim or counterclaim in a lawsuit) alleging that
+  any patent claim is infringed by making, using, selling, offering for
+  sale, or importing the Program or any portion of it.
+
+    11. Patents.
+
+    A "contributor" is a copyright holder who authorizes use under this
+  License of the Program or a work on which the Program is based.  The
+  work thus licensed is called the contributor's "contributor version".
+
+    A contributor's "essential patent claims" are all patent claims
+  owned or controlled by the contributor, whether already acquired or
+  hereafter acquired, that would be infringed by some manner, permitted
+  by this License, of making, using, or selling its contributor version,
+  but do not include claims that would be infringed only as a
+  consequence of further modification of the contributor version.  For
+  purposes of this definition, "control" includes the right to grant
+  patent sublicenses in a manner consistent with the requirements of
+  this License.
+
+    Each contributor grants you a non-exclusive, worldwide, royalty-free
+  patent license under the contributor's essential patent claims, to
+  make, use, sell, offer for sale, import and otherwise run, modify and
+  propagate the contents of its contributor version.
+
+    In the following three paragraphs, a "patent license" is any express
+  agreement or commitment, however denominated, not to enforce a patent
+  (such as an express permission to practice a patent or covenant not to
+  sue for patent infringement).  To "grant" such a patent license to a
+  party means to make such an agreement or commitment not to enforce a
+  patent against the party.
+
+    If you convey a covered work, knowingly relying on a patent license,
+  and the Corresponding Source of the work is not available for anyone
+  to copy, free of charge and under the terms of this License, through a
+  publicly available network server or other readily accessible means,
+  then you must either (1) cause the Corresponding Source to be so
+  available, or (2) arrange to deprive yourself of the benefit of the
+  patent license for this particular work, or (3) arrange, in a manner
+  consistent with the requirements of this License, to extend the patent
+  license to downstream recipients.  "Knowingly relying" means you have
+  actual knowledge that, but for the patent license, your conveying the
+  covered work in a country, or your recipient's use of the covered work
+  in a country, would infringe one or more identifiable patents in that
+  country that you have reason to believe are valid.
+
+    If, pursuant to or in connection with a single transaction or
+  arrangement, you convey, or propagate by procuring conveyance of, a
+  covered work, and grant a patent license to some of the parties
+  receiving the covered work authorizing them to use, propagate, modify
+  or convey a specific copy of the covered work, then the patent license
+  you grant is automatically extended to all recipients of the covered
+  work and works based on it.
+
+    A patent license is "discriminatory" if it does not include within
+  the scope of its coverage, prohibits the exercise of, or is
+  conditioned on the non-exercise of one or more of the rights that are
+  specifically granted under this License.  You may not convey a covered
+  work if you are a party to an arrangement with a third party that is
+  in the business of distributing software, under which you make payment
+  to the third party based on the extent of your activity of conveying
+  the work, and under which the third party grants, to any of the
+  parties who would receive the covered work from you, a discriminatory
+  patent license (a) in connection with copies of the covered work
+  conveyed by you (or copies made from those copies), or (b) primarily
+  for and in connection with specific products or compilations that
+  contain the covered work, unless you entered into that arrangement,
+  or that patent license was granted, prior to 28 March 2007.
+
+    Nothing in this License shall be construed as excluding or limiting
+  any implied license or other defenses to infringement that may
+  otherwise be available to you under applicable patent law.
+
+    12. No Surrender of Others' Freedom.
+
+    If conditions are imposed on you (whether by court order, agreement or
+  otherwise) that contradict the conditions of this License, they do not
+  excuse you from the conditions of this License.  If you cannot convey a
+  covered work so as to satisfy simultaneously your obligations under this
+  License and any other pertinent obligations, then as a consequence you may
+  not convey it at all.  For example, if you agree to terms that obligate you
+  to collect a royalty for further conveying from those to whom you convey
+  the Program, the only way you could satisfy both those terms and this
+  License would be to refrain entirely from conveying the Program.
+
+    13. Use with the GNU Affero General Public License.
+
+    Notwithstanding any other provision of this License, you have
+  permission to link or combine any covered work with a work licensed
+  under version 3 of the GNU Affero General Public License into a single
+  combined work, and to convey the resulting work.  The terms of this
+  License will continue to apply to the part which is the covered work,
+  but the special requirements of the GNU Affero General Public License,
+  section 13, concerning interaction through a network will apply to the
+  combination as such.
+
+    14. Revised Versions of this License.
+
+    The Free Software Foundation may publish revised and/or new versions of
+  the GNU General Public License from time to time.  Such new versions will
+  be similar in spirit to the present version, but may differ in detail to
+  address new problems or concerns.
+
+    Each version is given a distinguishing version number.  If the
+  Program specifies that a certain numbered version of the GNU General
+  Public License "or any later version" applies to it, you have the
+  option of following the terms and conditions either of that numbered
+  version or of any later version published by the Free Software
+  Foundation.  If the Program does not specify a version number of the
+  GNU General Public License, you may choose any version ever published
+  by the Free Software Foundation.
+
+    If the Program specifies that a proxy can decide which future
+  versions of the GNU General Public License can be used, that proxy's
+  public statement of acceptance of a version permanently authorizes you
+  to choose that version for the Program.
+
+    Later license versions may give you additional or different
+  permissions.  However, no additional obligations are imposed on any
+  author or copyright holder as a result of your choosing to follow a
+  later version.
+
+    15. Disclaimer of Warranty.
+
+    THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+  APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+  HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+  OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+  THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+  PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+  IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+  ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+    16. Limitation of Liability.
+
+    IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+  WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+  THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+  GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+  USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+  DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+  PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+  EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+  SUCH DAMAGES.
+
+    17. Interpretation of Sections 15 and 16.
+
+    If the disclaimer of warranty and limitation of liability provided
+  above cannot be given local legal effect according to their terms,
+  reviewing courts shall apply local law that most closely approximates
+  an absolute waiver of all civil liability in connection with the
+  Program, unless a warranty or assumption of liability accompanies a
+  copy of the Program in return for a fee.
+
+                       END OF TERMS AND CONDITIONS
+
+              How to Apply These Terms to Your New Programs
+
+    If you develop a new program, and you want it to be of the greatest
+  possible use to the public, the best way to achieve this is to make it
+  free software which everyone can redistribute and change under these terms.
+
+    To do so, attach the following notices to the program.  It is safest
+  to attach them to the start of each source file to most effectively
+  state the exclusion of warranty; and each file should have at least
+  the "copyright" line and a pointer to where the full notice is found.
+
+      <one line to give the program's name and a brief idea of what it does.>
+      Copyright (C) <year>  <name of author>
+
+      This program is free software: you can redistribute it and/or modify
+      it under the terms of the GNU General Public License as published by
+      the Free Software Foundation, either version 3 of the License, or
+      (at your option) any later version.
+
+      This program is distributed in the hope that it will be useful,
+      but WITHOUT ANY WARRANTY; without even the implied warranty of
+      MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+      GNU General Public License for more details.
+
+      You should have received a copy of the GNU General Public License
+      along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+  Also add information on how to contact you by electronic and paper mail.
+
+    If the program does terminal interaction, make it output a short
+  notice like this when it starts in an interactive mode:
+
+      <program>  Copyright (C) <year>  <name of author>
+      This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+      This is free software, and you are welcome to redistribute it
+      under certain conditions; type `show c' for details.
+
+  The hypothetical commands `show w' and `show c' should show the appropriate
+  parts of the General Public License.  Of course, your program's commands
+  might be different; for a GUI interface, you would use an "about box".
+
+    You should also get your employer (if you work as a programmer) or school,
+  if any, to sign a "copyright disclaimer" for the program, if necessary.
+  For more information on this, and how to apply and follow the GNU GPL, see
+  <http://www.gnu.org/licenses/>.
+
+    The GNU General Public License does not permit incorporating your program
+  into proprietary programs.  If your program is a subroutine library, you
+  may consider it more useful to permit linking proprietary applications with
+  the library.  If this is what you want to do, use the GNU Lesser General
+  Public License instead of this License.  But first, please read
+  <http://www.gnu.org/philosophy/why-not-lgpl.html>.
+
+  ----------------------------------------------------------------------
+
+                               MIT License
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in all
+  copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+
+
+
+  pulseaudio(headers only)
+
+
+  All PulseAudio source files, except as noted below, are licensed under the GNU
+  Lesser General Public License. (see file LGPL for details)
+
+  However, the server side has optional GPL dependencies.  These include the
+  libsamplerate and gdbm (core libraries), LIRC (lirc module) and FFTW (equalizer
+  module), although others may also be included in the future.  If PulseAudio is
+  compiled with these optional components, this effectively downgrades the
+  license of the server part to GPL (see the file GPL for details), exercising
+  section 3 of the LGPL.  In such circumstances, you should treat the client
+  library (libpulse) of PulseAudio as being LGPL licensed and the server part
+  (libpulsecore) as being GPL licensed.  Since the PulseAudio daemon, tests,
+  various utilities/helpers and the modules link to libpulsecore and/or the afore
+  mentioned optional GPL dependencies they are of course also GPL licensed also
+  in this scenario.
+
+  In addition to this, if D-Bus support is enabled, the PulseAudio client library
+  (libpulse) MAY need to be licensed under the GPL, depending on the license
+  adopted for libdbus. libdbus is licensed under either of the Academic Free
+  License 2.1 or GPL 2.0 or above. Which of these applies is your choice, and the
+  result affects the licensing of libpulse and thus, potentially, all programs
+  that link to libpulse.
+
+  Andre Adrian's echo cancellation implementation is licensed under a less
+  restrictive license - see src/modules/echo-cancel/adrian-license.txt for
+  details.
+
+  Some other files pulled into PA source (i.e. reference implementations that are
+  considered too small and stable to be considered as an external library) use the
+  more permissive MIT license. These include the device reservation DBus protocol
+  and realtime kit implementations.
+
+  A more permissive BSD-style license is used for LFE filters, see
+  src/pulsecore/filter/LICENSE.WEBKIT for details.
+
+  Additionally, a more permissive Sun license is used for code that performs
+  u-law, A-law and linear PCM conversions.
+
+  While we attempt to provide a summary here, it is the ultimate responsibility of
+  the packager to ensure the components they use in their build of PulseAudio
+  meets their license requirements.
+
+
+
+  libaom(headers only)
+
+
+  Copyright (c) 2016, Alliance for Open Media. All rights reserved.
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions
+  are met:
+  1. Redistributions of source code must retain the above copyright
+     notice, this list of conditions and the following disclaimer.
+  2. Redistributions in binary form must reproduce the above copyright
+     notice, this list of conditions and the following disclaimer in
+     the documentation and/or other materials provided with the
+     distribution.
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+  COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+  POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  FFmpeg(headers only)
+
+
+                      GNU LESSER GENERAL PUBLIC LICENSE
+                         Version 2.1, February 1999
+
+   Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+   51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+   Everyone is permitted to copy and distribute verbatim copies
+   of this license document, but changing it is not allowed.
+
+  [This is the first released version of the Lesser GPL.  It also counts
+   as the successor of the GNU Library Public License, version 2, hence
+   the version number 2.1.]
+
+                              Preamble
+
+    The licenses for most software are designed to take away your
+  freedom to share and change it.  By contrast, the GNU General Public
+  Licenses are intended to guarantee your freedom to share and change
+  free software--to make sure the software is free for all its users.
+
+    This license, the Lesser General Public License, applies to some
+  specially designated software packages--typically libraries--of the
+  Free Software Foundation and other authors who decide to use it.  You
+  can use it too, but we suggest you first think carefully about whether
+  this license or the ordinary General Public License is the better
+  strategy to use in any particular case, based on the explanations below.
+
+    When we speak of free software, we are referring to freedom of use,
+  not price.  Our General Public Licenses are designed to make sure that
+  you have the freedom to distribute copies of free software (and charge
+  for this service if you wish); that you receive source code or can get
+  it if you want it; that you can change the software and use pieces of
+  it in new free programs; and that you are informed that you can do
+  these things.
+
+    To protect your rights, we need to make restrictions that forbid
+  distributors to deny you these rights or to ask you to surrender these
+  rights.  These restrictions translate to certain responsibilities for
+  you if you distribute copies of the library or if you modify it.
+
+    For example, if you distribute copies of the library, whether gratis
+  or for a fee, you must give the recipients all the rights that we gave
+  you.  You must make sure that they, too, receive or can get the source
+  code.  If you link other code with the library, you must provide
+  complete object files to the recipients, so that they can relink them
+  with the library after making changes to the library and recompiling
+  it.  And you must show them these terms so they know their rights.
+
+    We protect your rights with a two-step method: (1) we copyright the
+  library, and (2) we offer you this license, which gives you legal
+  permission to copy, distribute and/or modify the library.
+
+    To protect each distributor, we want to make it very clear that
+  there is no warranty for the free library.  Also, if the library is
+  modified by someone else and passed on, the recipients should know
+  that what they have is not the original version, so that the original
+  author's reputation will not be affected by problems that might be
+  introduced by others.
+
+    Finally, software patents pose a constant threat to the existence of
+  any free program.  We wish to make sure that a company cannot
+  effectively restrict the users of a free program by obtaining a
+  restrictive license from a patent holder.  Therefore, we insist that
+  any patent license obtained for a version of the library must be
+  consistent with the full freedom of use specified in this license.
+
+    Most GNU software, including some libraries, is covered by the
+  ordinary GNU General Public License.  This license, the GNU Lesser
+  General Public License, applies to certain designated libraries, and
+  is quite different from the ordinary General Public License.  We use
+  this license for certain libraries in order to permit linking those
+  libraries into non-free programs.
+
+    When a program is linked with a library, whether statically or using
+  a shared library, the combination of the two is legally speaking a
+  combined work, a derivative of the original library.  The ordinary
+  General Public License therefore permits such linking only if the
+  entire combination fits its criteria of freedom.  The Lesser General
+  Public License permits more lax criteria for linking other code with
+  the library.
+
+    We call this license the "Lesser" General Public License because it
+  does Less to protect the user's freedom than the ordinary General
+  Public License.  It also provides other free software developers Less
+  of an advantage over competing non-free programs.  These disadvantages
+  are the reason we use the ordinary General Public License for many
+  libraries.  However, the Lesser license provides advantages in certain
+  special circumstances.
+
+    For example, on rare occasions, there may be a special need to
+  encourage the widest possible use of a certain library, so that it becomes
+  a de-facto standard.  To achieve this, non-free programs must be
+  allowed to use the library.  A more frequent case is that a free
+  library does the same job as widely used non-free libraries.  In this
+  case, there is little to gain by limiting the free library to free
+  software only, so we use the Lesser General Public License.
+
+    In other cases, permission to use a particular library in non-free
+  programs enables a greater number of people to use a large body of
+  free software.  For example, permission to use the GNU C Library in
+  non-free programs enables many more people to use the whole GNU
+  operating system, as well as its variant, the GNU/Linux operating
+  system.
+
+    Although the Lesser General Public License is Less protective of the
+  users' freedom, it does ensure that the user of a program that is
+  linked with the Library has the freedom and the wherewithal to run
+  that program using a modified version of the Library.
+
+    The precise terms and conditions for copying, distribution and
+  modification follow.  Pay close attention to the difference between a
+  "work based on the library" and a "work that uses the library".  The
+  former contains code derived from the library, whereas the latter must
+  be combined with the library in order to run.
+
+                    GNU LESSER GENERAL PUBLIC LICENSE
+     TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+    0. This License Agreement applies to any software library or other
+  program which contains a notice placed by the copyright holder or
+  other authorized party saying it may be distributed under the terms of
+  this Lesser General Public License (also called "this License").
+  Each licensee is addressed as "you".
+
+    A "library" means a collection of software functions and/or data
+  prepared so as to be conveniently linked with application programs
+  (which use some of those functions and data) to form executables.
+
+    The "Library", below, refers to any such software library or work
+  which has been distributed under these terms.  A "work based on the
+  Library" means either the Library or any derivative work under
+  copyright law: that is to say, a work containing the Library or a
+  portion of it, either verbatim or with modifications and/or translated
+  straightforwardly into another language.  (Hereinafter, translation is
+  included without limitation in the term "modification".)
+
+    "Source code" for a work means the preferred form of the work for
+  making modifications to it.  For a library, complete source code means
+  all the source code for all modules it contains, plus any associated
+  interface definition files, plus the scripts used to control compilation
+  and installation of the library.
+
+    Activities other than copying, distribution and modification are not
+  covered by this License; they are outside its scope.  The act of
+  running a program using the Library is not restricted, and output from
+  such a program is covered only if its contents constitute a work based
+  on the Library (independent of the use of the Library in a tool for
+  writing it).  Whether that is true depends on what the Library does
+  and what the program that uses the Library does.
+
+    1. You may copy and distribute verbatim copies of the Library's
+  complete source code as you receive it, in any medium, provided that
+  you conspicuously and appropriately publish on each copy an
+  appropriate copyright notice and disclaimer of warranty; keep intact
+  all the notices that refer to this License and to the absence of any
+  warranty; and distribute a copy of this License along with the
+  Library.
+
+    You may charge a fee for the physical act of transferring a copy,
+  and you may at your option offer warranty protection in exchange for a
+  fee.
+
+    2. You may modify your copy or copies of the Library or any portion
+  of it, thus forming a work based on the Library, and copy and
+  distribute such modifications or work under the terms of Section 1
+  above, provided that you also meet all of these conditions:
+
+      a) The modified work must itself be a software library.
+
+      b) You must cause the files modified to carry prominent notices
+      stating that you changed the files and the date of any change.
+
+      c) You must cause the whole of the work to be licensed at no
+      charge to all third parties under the terms of this License.
+
+      d) If a facility in the modified Library refers to a function or a
+      table of data to be supplied by an application program that uses
+      the facility, other than as an argument passed when the facility
+      is invoked, then you must make a good faith effort to ensure that,
+      in the event an application does not supply such function or
+      table, the facility still operates, and performs whatever part of
+      its purpose remains meaningful.
+
+      (For example, a function in a library to compute square roots has
+      a purpose that is entirely well-defined independent of the
+      application.  Therefore, Subsection 2d requires that any
+      application-supplied function or table used by this function must
+      be optional: if the application does not supply it, the square
+      root function must still compute square roots.)
+
+  These requirements apply to the modified work as a whole.  If
+  identifiable sections of that work are not derived from the Library,
+  and can be reasonably considered independent and separate works in
+  themselves, then this License, and its terms, do not apply to those
+  sections when you distribute them as separate works.  But when you
+  distribute the same sections as part of a whole which is a work based
+  on the Library, the distribution of the whole must be on the terms of
+  this License, whose permissions for other licensees extend to the
+  entire whole, and thus to each and every part regardless of who wrote
+  it.
+
+  Thus, it is not the intent of this section to claim rights or contest
+  your rights to work written entirely by you; rather, the intent is to
+  exercise the right to control the distribution of derivative or
+  collective works based on the Library.
+
+  In addition, mere aggregation of another work not based on the Library
+  with the Library (or with a work based on the Library) on a volume of
+  a storage or distribution medium does not bring the other work under
+  the scope of this License.
+
+    3. You may opt to apply the terms of the ordinary GNU General Public
+  License instead of this License to a given copy of the Library.  To do
+  this, you must alter all the notices that refer to this License, so
+  that they refer to the ordinary GNU General Public License, version 2,
+  instead of to this License.  (If a newer version than version 2 of the
+  ordinary GNU General Public License has appeared, then you can specify
+  that version instead if you wish.)  Do not make any other change in
+  these notices.
+
+    Once this change is made in a given copy, it is irreversible for
+  that copy, so the ordinary GNU General Public License applies to all
+  subsequent copies and derivative works made from that copy.
+
+    This option is useful when you wish to copy part of the code of
+  the Library into a program that is not a library.
+
+    4. You may copy and distribute the Library (or a portion or
+  derivative of it, under Section 2) in object code or executable form
+  under the terms of Sections 1 and 2 above provided that you accompany
+  it with the complete corresponding machine-readable source code, which
+  must be distributed under the terms of Sections 1 and 2 above on a
+  medium customarily used for software interchange.
+
+    If distribution of object code is made by offering access to copy
+  from a designated place, then offering equivalent access to copy the
+  source code from the same place satisfies the requirement to
+  distribute the source code, even though third parties are not
+  compelled to copy the source along with the object code.
+
+    5. A program that contains no derivative of any portion of the
+  Library, but is designed to work with the Library by being compiled or
+  linked with it, is called a "work that uses the Library".  Such a
+  work, in isolation, is not a derivative work of the Library, and
+  therefore falls outside the scope of this License.
+
+    However, linking a "work that uses the Library" with the Library
+  creates an executable that is a derivative of the Library (because it
+  contains portions of the Library), rather than a "work that uses the
+  library".  The executable is therefore covered by this License.
+  Section 6 states terms for distribution of such executables.
+
+    When a "work that uses the Library" uses material from a header file
+  that is part of the Library, the object code for the work may be a
+  derivative work of the Library even though the source code is not.
+  Whether this is true is especially significant if the work can be
+  linked without the Library, or if the work is itself a library.  The
+  threshold for this to be true is not precisely defined by law.
+
+    If such an object file uses only numerical parameters, data
+  structure layouts and accessors, and small macros and small inline
+  functions (ten lines or less in length), then the use of the object
+  file is unrestricted, regardless of whether it is legally a derivative
+  work.  (Executables containing this object code plus portions of the
+  Library will still fall under Section 6.)
+
+    Otherwise, if the work is a derivative of the Library, you may
+  distribute the object code for the work under the terms of Section 6.
+  Any executables containing that work also fall under Section 6,
+  whether or not they are linked directly with the Library itself.
+
+    6. As an exception to the Sections above, you may also combine or
+  link a "work that uses the Library" with the Library to produce a
+  work containing portions of the Library, and distribute that work
+  under terms of your choice, provided that the terms permit
+  modification of the work for the customer's own use and reverse
+  engineering for debugging such modifications.
+
+    You must give prominent notice with each copy of the work that the
+  Library is used in it and that the Library and its use are covered by
+  this License.  You must supply a copy of this License.  If the work
+  during execution displays copyright notices, you must include the
+  copyright notice for the Library among them, as well as a reference
+  directing the user to the copy of this License.  Also, you must do one
+  of these things:
+
+      a) Accompany the work with the complete corresponding
+      machine-readable source code for the Library including whatever
+      changes were used in the work (which must be distributed under
+      Sections 1 and 2 above); and, if the work is an executable linked
+      with the Library, with the complete machine-readable "work that
+      uses the Library", as object code and/or source code, so that the
+      user can modify the Library and then relink to produce a modified
+      executable containing the modified Library.  (It is understood
+      that the user who changes the contents of definitions files in the
+      Library will not necessarily be able to recompile the application
+      to use the modified definitions.)
+
+      b) Use a suitable shared library mechanism for linking with the
+      Library.  A suitable mechanism is one that (1) uses at run time a
+      copy of the library already present on the user's computer system,
+      rather than copying library functions into the executable, and (2)
+      will operate properly with a modified version of the library, if
+      the user installs one, as long as the modified version is
+      interface-compatible with the version that the work was made with.
+
+      c) Accompany the work with a written offer, valid for at
+      least three years, to give the same user the materials
+      specified in Subsection 6a, above, for a charge no more
+      than the cost of performing this distribution.
+
+      d) If distribution of the work is made by offering access to copy
+      from a designated place, offer equivalent access to copy the above
+      specified materials from the same place.
+
+      e) Verify that the user has already received a copy of these
+      materials or that you have already sent this user a copy.
+
+    For an executable, the required form of the "work that uses the
+  Library" must include any data and utility programs needed for
+  reproducing the executable from it.  However, as a special exception,
+  the materials to be distributed need not include anything that is
+  normally distributed (in either source or binary form) with the major
+  components (compiler, kernel, and so on) of the operating system on
+  which the executable runs, unless that component itself accompanies
+  the executable.
+
+    It may happen that this requirement contradicts the license
+  restrictions of other proprietary libraries that do not normally
+  accompany the operating system.  Such a contradiction means you cannot
+  use both them and the Library together in an executable that you
+  distribute.
+
+    7. You may place library facilities that are a work based on the
+  Library side-by-side in a single library together with other library
+  facilities not covered by this License, and distribute such a combined
+  library, provided that the separate distribution of the work based on
+  the Library and of the other library facilities is otherwise
+  permitted, and provided that you do these two things:
+
+      a) Accompany the combined library with a copy of the same work
+      based on the Library, uncombined with any other library
+      facilities.  This must be distributed under the terms of the
+      Sections above.
+
+      b) Give prominent notice with the combined library of the fact
+      that part of it is a work based on the Library, and explaining
+      where to find the accompanying uncombined form of the same work.
+
+    8. You may not copy, modify, sublicense, link with, or distribute
+  the Library except as expressly provided under this License.  Any
+  attempt otherwise to copy, modify, sublicense, link with, or
+  distribute the Library is void, and will automatically terminate your
+  rights under this License.  However, parties who have received copies,
+  or rights, from you under this License will not have their licenses
+  terminated so long as such parties remain in full compliance.
+
+    9. You are not required to accept this License, since you have not
+  signed it.  However, nothing else grants you permission to modify or
+  distribute the Library or its derivative works.  These actions are
+  prohibited by law if you do not accept this License.  Therefore, by
+  modifying or distributing the Library (or any work based on the
+  Library), you indicate your acceptance of this License to do so, and
+  all its terms and conditions for copying, distributing or modifying
+  the Library or works based on it.
+
+    10. Each time you redistribute the Library (or any work based on the
+  Library), the recipient automatically receives a license from the
+  original licensor to copy, distribute, link with or modify the Library
+  subject to these terms and conditions.  You may not impose any further
+  restrictions on the recipients' exercise of the rights granted herein.
+  You are not responsible for enforcing compliance by third parties with
+  this License.
+
+    11. If, as a consequence of a court judgment or allegation of patent
+  infringement or for any other reason (not limited to patent issues),
+  conditions are imposed on you (whether by court order, agreement or
+  otherwise) that contradict the conditions of this License, they do not
+  excuse you from the conditions of this License.  If you cannot
+  distribute so as to satisfy simultaneously your obligations under this
+  License and any other pertinent obligations, then as a consequence you
+  may not distribute the Library at all.  For example, if a patent
+  license would not permit royalty-free redistribution of the Library by
+  all those who receive copies directly or indirectly through you, then
+  the only way you could satisfy both it and this License would be to
+  refrain entirely from distribution of the Library.
+
+  If any portion of this section is held invalid or unenforceable under any
+  particular circumstance, the balance of the section is intended to apply,
+  and the section as a whole is intended to apply in other circumstances.
+
+  It is not the purpose of this section to induce you to infringe any
+  patents or other property right claims or to contest validity of any
+  such claims; this section has the sole purpose of protecting the
+  integrity of the free software distribution system which is
+  implemented by public license practices.  Many people have made
+  generous contributions to the wide range of software distributed
+  through that system in reliance on consistent application of that
+  system; it is up to the author/donor to decide if he or she is willing
+  to distribute software through any other system and a licensee cannot
+  impose that choice.
+
+  This section is intended to make thoroughly clear what is believed to
+  be a consequence of the rest of this License.
+
+    12. If the distribution and/or use of the Library is restricted in
+  certain countries either by patents or by copyrighted interfaces, the
+  original copyright holder who places the Library under this License may add
+  an explicit geographical distribution limitation excluding those countries,
+  so that distribution is permitted only in or among countries not thus
+  excluded.  In such case, this License incorporates the limitation as if
+  written in the body of this License.
+
+    13. The Free Software Foundation may publish revised and/or new
+  versions of the Lesser General Public License from time to time.
+  Such new versions will be similar in spirit to the present version,
+  but may differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the Library
+  specifies a version number of this License which applies to it and
+  "any later version", you have the option of following the terms and
+  conditions either of that version or of any later version published by
+  the Free Software Foundation.  If the Library does not specify a
+  license version number, you may choose any version ever published by
+  the Free Software Foundation.
+
+    14. If you wish to incorporate parts of the Library into other free
+  programs whose distribution conditions are incompatible with these,
+  write to the author to ask for permission.  For software which is
+  copyrighted by the Free Software Foundation, write to the Free
+  Software Foundation; we sometimes make exceptions for this.  Our
+  decision will be guided by the two goals of preserving the free status
+  of all derivatives of our free software and of promoting the sharing
+  and reuse of software generally.
+
+                              NO WARRANTY
+
+    15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+  WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+  OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+  KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+  PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+  LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+  THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+    16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+  WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+  AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+  FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+  CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+  LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+  RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+  FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+  SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+  DAMAGES.
+
+                       END OF TERMS AND CONDITIONS
+
+             How to Apply These Terms to Your New Libraries
+
+    If you develop a new library, and you want it to be of the greatest
+  possible use to the public, we recommend making it free software that
+  everyone can redistribute and change.  You can do so by permitting
+  redistribution under these terms (or, alternatively, under the terms of the
+  ordinary General Public License).
+
+    To apply these terms, attach the following notices to the library.  It is
+  safest to attach them to the start of each source file to most effectively
+  convey the exclusion of warranty; and each file should have at least the
+  "copyright" line and a pointer to where the full notice is found.
+
+      <one line to give the library's name and a brief idea of what it does.>
+      Copyright (C) <year>  <name of author>
+
+      This library is free software; you can redistribute it and/or
+      modify it under the terms of the GNU Lesser General Public
+      License as published by the Free Software Foundation; either
+      version 2.1 of the License, or (at your option) any later version.
+
+      This library is distributed in the hope that it will be useful,
+      but WITHOUT ANY WARRANTY; without even the implied warranty of
+      MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+      Lesser General Public License for more details.
+
+      You should have received a copy of the GNU Lesser General Public
+      License along with this library; if not, write to the Free Software
+      Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+  Also add information on how to contact you by electronic and paper mail.
+
+  You should also get your employer (if you work as a programmer) or your
+  school, if any, to sign a "copyright disclaimer" for the library, if
+  necessary.  Here is a sample; alter the names:
+
+    Yoyodyne, Inc., hereby disclaims all copyright interest in the
+    library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+    <signature of Ty Coon>, 1 April 1990
+    Ty Coon, President of Vice
+
+
+
+  flac
+
+
+  Copyright (C) 2000,2001,2002,2003,2004,2005,2006,2007  Josh Coalson
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions
+  are met:
+
+  - Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+  - Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+  - Neither the name of the Xiph.org Foundation nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR
+  CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+  PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+  LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  angle
+
+
+  // Copyright 2018 The ANGLE Project Authors.
+  // All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions
+  // are met:
+  //
+  //     Redistributions of source code must retain the above copyright
+  //     notice, this list of conditions and the following disclaimer.
+  //
+  //     Redistributions in binary form must reproduce the above
+  //     copyright notice, this list of conditions and the following
+  //     disclaimer in the documentation and/or other materials provided
+  //     with the distribution.
+  //
+  //     Neither the name of TransGaming Inc., Google Inc., 3DLabs Inc.
+  //     Ltd., nor the names of their contributors may be used to endorse
+  //     or promote products derived from this software without specific
+  //     prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+  // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+  // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+  // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+  // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+  // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+  // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+  // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+  // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+  // POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  quirc
+
+
+  quirc -- QR-code recognition library
+  Copyright (C) 2010-2012 Daniel Beer <dlbeer@gmail.com>
+
+  Permission to use, copy, modify, and/or distribute this software for
+  any purpose with or without fee is hereby granted, provided that the
+  above copyright notice and this permission notice appear in all
+  copies.
+
+  THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
+  WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+  WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+  AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+  DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
+  PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+  TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+  PERFORMANCE OF THIS SOFTWARE.
+
+
+
+  markupsafe
+
+
+  Copyright (c) 2010 by Armin Ronacher and contributors.  See AUTHORS
+  for more details.
+
+  Some rights reserved.
+
+  Redistribution and use in source and binary forms of the software as well
+  as documentation, with or without modification, are permitted provided
+  that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following
+    disclaimer in the documentation and/or other materials provided
+    with the distribution.
+
+  * The names of the contributors may not be used to endorse or
+    promote products derived from this software without specific
+    prior written permission.
+
+  THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+  NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+  OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+  PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+  LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+  DAMAGE.
+
+
+
+  websocket-client
+
+
+  Copyright 2018 Hiroki Ohtani.
+
+  Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+  1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+  2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+  3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+   pyjson5
+
+
+   Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {yyyy} {name of copyright owner}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+  proxy_py
+
+
+  Copyright (c) 2013-2018 by Abhinav Singh and contributors.
+
+  Some rights reserved.
+
+  Redistribution and use in source and binary forms of the software as well
+  as documentation, with or without modification, are permitted provided
+  that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above
+    copyright notice, this list of conditions and the following
+    disclaimer in the documentation and/or other materials provided
+    with the distribution.
+
+  * The names of the contributors may not be used to endorse or
+    promote products derived from this software without specific
+    prior written permission.
+
+  THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+  CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+  NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+  OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+  EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+  PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+  PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+  LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+  DAMAGE.
+
+
+
+  web platform tests
+
+
+  This repository is covered by the dual-licensing approach described in:
+
+    http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html
+
+
+
+  ply
+
+
+  PLY (Python Lex-Yacc)                   Version 3.4
+
+  Copyright (C) 2001-2011,
+  David M. Beazley (Dabeaz LLC)
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+  * Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+  * Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+  * Neither the name of the David Beazley or Dabeaz LLC may be used to
+    endorse or promote products derived from this software without
+    specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  khronos
+
+
+  Copyright (c) 2007-2010 The Khronos Group Inc.
+
+  Permission is hereby granted, free of charge, to any person obtaining a
+  copy of this software and/or associated documentation files (the
+  "Materials"), to deal in the Materials without restriction, including
+  without limitation the rights to use, copy, modify, merge, publish,
+  distribute, sublicense, and/or sell copies of the Materials, and to
+  permit persons to whom the Materials are furnished to do so, subject to
+  the following conditions:
+
+  The above copyright notice and this permission notice shall be included
+  in all copies or substantial portions of the Materials.
+
+  THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+  CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+  TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+  MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+
+
+  SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
+
+  Copyright (C) 1992 Silicon Graphics, Inc. All Rights Reserved.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy of
+  this software and associated documentation files (the "Software"), to deal in
+  the Software without restriction, including without limitation the rights to
+  use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+  of the Software, and to permit persons to whom the Software is furnished to do
+  so, subject to the following conditions:
+
+  The above copyright notice including the dates of first publication and either
+  this permission notice or a reference to http://oss.sgi.com/projects/FreeB/
+  shall be included in all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON
+  GRAPHICS, INC. BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+  AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+  WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+  Except as contained in this notice, the name of Silicon Graphics, Inc. shall
+  not be used in advertising or otherwise to promote the sale, use or other
+  dealings in this Software without prior written authorization from Silicon
+  Graphics, Inc.
+
+
+
+  google_benchmark
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+  crashpad
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+
+  libdav1d
+
+
+  Copyright © 2018-2019, VideoLAN and dav1d authors
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are met:
+
+  1. Redistributions of source code must retain the above copyright notice, this
+     list of conditions and the following disclaimer.
+
+  2. Redistributions in binary form must reproduce the above copyright notice,
+     this list of conditions and the following disclaimer in the documentation
+     and/or other materials provided with the distribution.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+  ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  linux-syscall-support
+
+
+  Copyright (c) 2005-2011, Google Inc.
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+  * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+  * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+  * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+  ---
+  Author: Markus Gutschke
+
+
+
+  mini_chromium
+
+
+  // Copyright 2006-2008 The Chromium Authors. All rights reserved.
+  //
+  // Redistribution and use in source and binary forms, with or without
+  // modification, are permitted provided that the following conditions are
+  // met:
+  //
+  //    * Redistributions of source code must retain the above copyright
+  // notice, this list of conditions and the following disclaimer.
+  //    * Redistributions in binary form must reproduce the above
+  // copyright notice, this list of conditions and the following disclaimer
+  // in the documentation and/or other materials provided with the
+  // distribution.
+  //    * Neither the name of Google Inc. nor the names of its
+  // contributors may be used to endorse or promote products derived from
+  // this software without specific prior written permission.
+  //
+  // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  googletest
+
+
+  Copyright 2008, Google Inc.
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+      * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+      * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+      * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+  lz4_lib
+
+
+  LZ4 Library
+  Copyright (c) 2011-2016, Yann Collet
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without modification,
+  are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice, this
+    list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright notice, this
+    list of conditions and the following disclaimer in the documentation and/or
+    other materials provided with the distribution.
+
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+  ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+  DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+  ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+  (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+  ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/cobalt/cssom/viewport_size.h b/src/cobalt/cssom/viewport_size.h
index 040d414..78e82cc 100644
--- a/src/cobalt/cssom/viewport_size.h
+++ b/src/cobalt/cssom/viewport_size.h
@@ -20,8 +20,8 @@
 namespace cobalt {
 namespace cssom {
 
-// ViewportSize represents a screen. It differs from from a math::Size
-// structure in order to hold the a diagonal_inches_ value which is
+// ViewportSize represents a screen. It differs from a math::Size
+// structure in order to hold a diagonal_inches_ value which is
 // necessary to calculate the DPI.
 class ViewportSize {
  public:
@@ -53,7 +53,7 @@
 
   // Ratio of CSS pixels per device pixel, matching the devicePixelRatio
   // attribute.
-  //   https://www.w3.org/TR/cssom-view-1/#dom-window-devicepixelratio
+  //   https://www.w3.org/TR/2016/WD-cssom-view-1-20160317/#dom-window-devicepixelratio
   float device_pixel_ratio_ = 1.0f;
 };
 
diff --git a/src/cobalt/doc/lifecycle.md b/src/cobalt/doc/lifecycle.md
index b9b9373..6de3346 100644
--- a/src/cobalt/doc/lifecycle.md
+++ b/src/cobalt/doc/lifecycle.md
@@ -4,88 +4,185 @@
 implements a well-defined web application lifecycle, managing resources and
 notifying the application as appropriate.
 
+## Summary of changes in Cobalt 22
+
+The application lifecycle has some changes from Cobalt 22:
+
+### States:
+
+* The *Paused* state is renamed to *Blurred*.
+* The *Suspended* state is replaced by *Concealed* and *Frozen*.
+* The *Preloading* state is removed, and *Concealed* is used instead.
+  Note: The corresponding attribute value 'prerender' for
+  document.visibilityState is also removed.
+
+The new *Concealed* state is used for applications that are not visible but may
+use CPU or network resources. This state is used to both replace the
+*Preloading* state, and as an intermediate state between *Blurred* and
+*Frozen*.
+
+The *Frozen* state most closely resembles the previous *Suspended* state,
+during which applications do not have network access.
+
+### State Changes:
+
+* The *Pause* event is renamed to *Blur*.
+* The *Unpause* event is renamed to *Focus*.
+* The *Suspend* event is replaced by *Conceal* and *Freeze*.
+* The *Resume* event is replaced by *Unfreeze* and *Reveal*.
+
+Most platforms should only need to replace 'Pause' with 'Blur', 'Unpause' with
+'Focus', 'Suspend' with 'Freeze', and 'Resume' with 'Reveal'.
+
+Since there is no longer a special *Preloading* state, applications should no
+longer use the *Start* event when a preloaded application is brought to the
+foreground. Instead, the same event(s) used for backgrounded applications
+(*Concealed* or *Frozen*) should be used.
+
+### Application 'Backgrounding' and 'Foregrounding'.
+
+To signal that the application is being 'backgrounded', the use of *Suspend*
+should be replaced with *Freeze*.
+
+To signal that the application is being 'foregrounded', the use of *Unpause*
+should be replaced with *Focus*.
+
+Note: If a platform is using *Resume* (*Reveal*) to signal that an application
+is being 'foregrounded', then that may result in unexpected application
+behavior, unless a subsequent *Unpause* (*Focus*) is also used when the
+application receives input focus.
+
+More details about lifecycle states and state changes can be found in
+`src/starboard/event.h`.
+
+### Deprecated `SbEventType` values.
+
+The `SbEventType` enum is defined in `src/starboard/event.h`.
+
+* The `kSbEventTypePause` value is renamed to `kSbEventTypeBlur`.
+* The `kSbEventTypeUnpause` value is renamed to `kSbEventTypeFocus`.
+* The `kSbEventTypeSuspend` value is replaced by `kSbEventTypeConceal` and
+  `kSbEventTypeFreeze`.
+* The `kSbEventTypeResume` value is replaced by `kSbEventTypeUnfreeze` and
+  `kSbEventTypeReveal`.
+
+The corresponding helper functions in
+`starboard::shared::starboard::Application` (implemented in
+`starboard/shared/starboard/application.cc`) that inject events with these
+values have been updated correspondingly:
+
+* The `Pause()` method is renamed to `Blur()`.
+* The `Unpause()` method is renamed to `Focus()`.
+* The `Suspend()` method is replaced by `Conceal()` and
+  `Freeze()`.
+* The `Resume()` method is replaced by `Unfreeze()` and
+  `Reveal()`.
+
+Platforms that inject events themselves should be updated to use renamed event
+type values, and platforms that use the helper functions should be updated to
+call the corresponding renamed helper functions.
+
+### Deprecated `SbSystemRequest` functions.
+
+The `SbSytemRequest` functions are declared in `src/starboard/system.h`
+
+* The `SbSystemRequestPause` event is renamed to `SbSystemRequestBlur`
+* The `SbSystemRequestUnpause` event is renamed to `SbSystemRequestFocus`
+* The `SbSystemRequestSuspend` event is replaced by `SbSystemRequestConceal`
+  and `SbSystemRequestFreeze`
+* The `SbSystemRequestResume` event is replaced by `SbSystemRequestUnfreeze`
+  and `SbSystemRequestReveal`
+
 ## Application States
 
 Starboard Application State | Page Visibility State | Window Focused
 :-------------------------- | :-------------------- | :-------------
-*Preloading*                | prerender             | false
 *Started*                   | visible               | true
-*Paused*                    | visible               | false
-*Suspended*                 | hidden                | false
+*Blurred*                   | visible               | false
+*Concealed*                 | hidden                | false
+*Frozen*                    | hidden                | false
 
-### Preloading
-
-The application is not visible, and will receive no input, but is running. Only
-possible to enter as the start state. May transition to *Started* or *Suspended*
-at any time.
-
-#### Expectations for the web application
-
-Initialize as much as possible to get to an interactive state. There is no
-official signal for an application that has finished preloading.
-
-#### Expectations for the porter
-
-For applications that can be preloaded, the platform should send
-`kSbEventTypePreload` as the first Starboard event instead of
-`kSbEventTypeStart`. `src/starboard/shared/starboard/application.cc` subclasses
-can opt-in to already implemented support for the `--preload` command-line
-switch.
-
-The platform should then send `kSbEventTypeStart` when the application is first
-brought to the foreground. In Linux desktop (linux-x64x11), this can be done by
-sending a `SIGCONT` to the process that is in the *Preloading* state.
-
-If the platform wants to only give applications a certain amount of time to
-preload, they can send `kSbEventTypeSuspend` to halt preloading and move to the
-*Suspended* state. In Linux desktop, this can be done by sending SIGUSR1 to the
-process that is in the *Preloading* state.
+When transitioning between *Concealed* and *Frozen*, the document.onfreeze and
+document.onresume events from the Page LifeCycle Web API will be dispatched.
 
 ### Started
 
 The application is running, visible, and interactive. The normal foreground
-application state. May be the start state, can be entered from *Preloading*, or
-*Paused*.
+application state. May be the start state, or can be entered from *Blurred*.
 
-May only transition to *Paused*. In Linux desktop, this happens anytime the
-top-level Cobalt X11 window loses focus. Linux transition back to *Started* when
-the top-level Cobalt X11 window gains focus again.
+May only transition to *Blurred*. In Linux desktop, this happens anytime the
+top-level Cobalt X11 window loses focus. Linux transition back to *Started*
+when the top-level Cobalt X11 window gains focus again.
 
-### Paused
+### Blurred
 
-The application may be fully visible, partially visible, or completely obscured,
-but it has lost input focus, so will receive no input events. It has been
-allowed to retain all its resources for a very quick return to *Started*, and
-the application is still running. May be entered from or transition to *Started*
-or *Suspended*.
+The application may be fully visible, partially visible, or completely
+obscured, but it has lost input focus, so will receive no input events. It has
+been allowed to retain all its resources for a very quick return to *Started*,
+and the application is still running. May be entered from or transition to
+*Started* or *Concealed* at any time.
 
-### Suspended
+### Concealed
 
-The application is not visible, and, once *Suspended*, will not run any
-code. All graphics and media resources will be revoked until resumed, so the
-application should expect all images to be lost, all caches to be cleared, and
-all network requests to be aborted. The application may be terminated in this
-state without notification.
+The application is not visible and will receive no input, but is running. Can
+be entered as the start state. May be entered from or transition to *Blurred*
+or *Frozen* at any time. The application may be terminated in this state
+without notification.
+
+Upon entering, all graphics resources will be revoked until revealed, so the
+application should expect all images to be lost, and all caches to be cleared.
 
 #### Expectations for the web application
 
 The application should **shut down** playback, releasing resources. On resume,
-all resources need to be reloaded, and playback should be reinitialized where it
-left off, or at the nearest key frame.
+all resources need to be reloaded, and playback should be reinitialized where
+it left off, or at the nearest key frame.
+
+### Frozen
+
+The application is not visible and will receive no input, and, once *Frozen*,
+will not run any code. May be entered from or transition to *Concealed* at any
+time. The application may be terminated in this state without notification.
+
+Upon entering, all graphics and media resources will be revoked until resumed,
+so the application should expect all images to be lost, all caches to be
+cleared, and all network requests to be aborted.
 
 #### Expectations for the porter
 
-The platform Starboard implementation **must always** send events in the
-prescribed order - meaning, for example, that it should never send a
-`kSbEventTypeSuspend` event unless in the *Preloading* or *Paused* states.
-
 Currently, Cobalt does not manually stop JavaScript execution when it goes into
-the *Suspended* state. In Linux desktop, it expects that a `SIGSTOP` will be
-raised, causing all the threads not to get any more CPU time until resumed. This
-will be fixed in a future version of Cobalt.
+the *Frozen* state. In Linux desktop, it expects that a `SIGSTOP` will be
+raised, causing all the threads not to get any more CPU time until resumed.
+This will be fixed in a future version of Cobalt.
+
+### Application Startup Expectations for the porter
+
+The starboard application lifecycle, with descriptions of the states and the
+state changes can be found in `src/starboard/event.h`.
+
+For applications that can be preloaded, the platform should send
+`kSbEventTypePreload` as the first Starboard event instead of
+`kSbEventTypeStart`. Subclasses of
+`src/starboard/shared/starboard/application.cc` can opt-in to use the already
+implemented support for the `--preload` command-line switch.
+
+If started with `kSbEventTypePreload`, the platform can at any time send
+`kSbEventTypeFocus` when the application brought to the foreground.
+In Linux desktop (linux-x64x11), this can be done by sending a `SIGCONT` to the
+process that is in the *Preloading* state (see
+`starboard/shared/signal/suspend_signals.cc`)
+
+If the platform wants to only give applications a certain amount of time to
+preload, they can send `SbSystemRequestFreeze` to halt preloading and move to
+the *Frozen* state. In Linux desktop, this can be done by sending SIGUSR1 to
+the process that is in the *Preloading* state.
 
 ## Implementing the Application Lifecycle (for the porter)
 
+The platform Starboard implementation **must always** send events in the
+prescribed order - meaning, for example, that it should never send a
+`kSbEventTypeConceal` event unless in the *Blurred* state.
+
 Most porters will want to subclass either `starboard::shared::Application` (in
 `src/starboard/shared/starboard/application.cc`) or
 `starboard::shared::QueueApplication` (in
@@ -95,17 +192,23 @@
 is then up to them to ensure that events are **always** sent in the correct
 state as specified in the Starboard documentation.
 
-`starboard::shared::Application` guarantees the correct ordering by implementing
-a small state machine that ignores invalid application state transitions, and
-inserts any necessary transitions to make them valid. For example, you can call
-`starboard::shared::Application::Suspend()`, and if you are in *Paused*, it will
-just dispatch a `kSbEventTypeSuspend` event. But if you call `Suspend()` in the
-*Started* state, it will dispatch `kSbEventTypePause` and then
-`kSbEventTypeSuspend` events. If you call `Suspend()` in the *Suspended* state,
-it just does nothing.
+`starboard::shared::Application` (in
+`starboard/shared/starboard/application.cc`) guarantees the correct ordering by
+implementing a small state machine that ignores invalid application state
+transitions, and inserts any necessary transitions to make them valid. For
+example, you can call `starboard::shared::Application::Conceal()`, and if you
+are in *Blurred*, it will just dispatch a `kSbEventTypeConceal` event. But if
+you call `Conceal()` in the *Started* state, it will first dispatch
+`kSbEventTypeBlur`, followed by a `kSbEventTypeConceal` event. If you call
+`Conceal()` in the *Concealed* state, it just does nothing.
 
-To control starting up in the *Preloading* state, `Application` subclasses must
-override two functions:
+This behavior can be ensured by only dispatching events to SbEventHandle()
+using `Application::DispatchAndDelete()` either directly, or indirectly such
+as by using `Application::RunLoop()` with the default implementation of
+`Application::DispatchNextEvent()`.
+
+To control starting up in the *Concealed* state for preloading, `Application`
+subclasses must override two functions:
 
 ``` c++
 class MyApplication : public shared::starboard::QueueApplication {
@@ -116,7 +219,7 @@
 }
 ```
 
-To start up in the *Preloading* state, `IsStartImmediate()` should return
+To start up in the *Concealed* state, `IsStartImmediate()` should return
 `false` and `IsPreloadImmediate()` should return `true`.
 
 To start up in the *Starting* state (which is the default), `IsStartImmediate()`
diff --git a/src/cobalt/dom/cobalt_ua_data_values.idl b/src/cobalt/dom/cobalt_ua_data_values.idl
new file mode 100644
index 0000000..62c3b30
--- /dev/null
+++ b/src/cobalt/dom/cobalt_ua_data_values.idl
@@ -0,0 +1,29 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Custom, not in spec. Holds UA information specific to Cobalt.
+
+dictionary CobaltUADataValues : UADataValues {
+  DOMString cobaltBuildNumber;
+  DOMString cobaltBuildConfiguration;
+  DOMString jsEngineVersion;
+  DOMString rasterizer;
+  DOMString evergreenVersion;
+  DOMString starboardVersion;
+  DOMString chipset;
+  DOMString modelYear;
+  DOMString deviceBrand;
+  DOMString connectionType;
+  DOMString aux;
+};
diff --git a/src/cobalt/dom/document_test.cc b/src/cobalt/dom/document_test.cc
index e2cd356..6144fc9 100644
--- a/src/cobalt/dom/document_test.cc
+++ b/src/cobalt/dom/document_test.cc
@@ -76,7 +76,8 @@
       html_element_context_(
           &environment_settings_, NULL, NULL, css_parser_.get(), NULL, NULL,
           NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
-          dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL) {
+          dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL,
+          NULL) {
   EXPECT_TRUE(GlobalStats::GetInstance()->CheckNoLeaks());
 }
 
diff --git a/src/cobalt/dom/dom.gyp b/src/cobalt/dom/dom.gyp
index 0b307d3..93b5c44 100644
--- a/src/cobalt/dom/dom.gyp
+++ b/src/cobalt/dom/dom.gyp
@@ -236,6 +236,7 @@
         'named_node_map.h',
         'navigator.cc',
         'navigator.h',
+        'navigator_ua_data.h',
         'node.cc',
         'node.h',
         'node_children_iterator.h',
diff --git a/src/cobalt/dom/dom_parser_test.cc b/src/cobalt/dom/dom_parser_test.cc
index 8f8b5b8..25fad98 100644
--- a/src/cobalt/dom/dom_parser_test.cc
+++ b/src/cobalt/dom/dom_parser_test.cc
@@ -65,7 +65,8 @@
           NULL /* remote_typeface_cache */, NULL /* mesh_cache */,
           NULL /* dom_stat_tracker */, "" /* language */,
           base::kApplicationStateStarted,
-          NULL /* synchronous_loader_interrupt */),
+          NULL /* synchronous_loader_interrupt */,
+          NULL /* performance */),
       dom_parser_(new DOMParser(&html_element_context_)) {}
 
 TEST_F(DOMParserTest, ParsesXML) {
diff --git a/src/cobalt/dom/dom_token_list.cc b/src/cobalt/dom/dom_token_list.cc
index 7489b8d..8e7a1e4 100644
--- a/src/cobalt/dom/dom_token_list.cc
+++ b/src/cobalt/dom/dom_token_list.cc
@@ -148,6 +148,45 @@
   RunUpdateSteps();
 }
 
+// Algorithm for Toggle:
+//   https://www.w3.org/TR/dom/#dom-domtokenlist-toggle
+bool DOMTokenList::Toggle(const std::string& token,
+                          const base::Optional<bool>& force) {
+  // Custom, not in any spec.
+  MaybeRefresh();
+
+  // If token is the empty string, then throw a "SyntaxError" exception.
+  // If token contains any ASCII whitespace, then throw an
+  // "InvalidCharacterError" exception.
+  if (!IsTokenValid(token)) {
+    return false;
+  }
+
+  bool containsToken = ContainsValid(base::Token(token));
+
+  // If token in tokens, and force is true, return true.
+  // If token not in tokens, and force is false, return false.
+  if (containsToken == force) {
+    return containsToken;
+  }
+
+  // If token in tokens, then remove token from tokens.
+  // If token not in tokens, append token to tokens.
+  if (containsToken) {
+    tokens_.erase(std::remove(tokens_.begin(), tokens_.end(), token),
+                  tokens_.end());
+  } else {
+    tokens_.push_back(base::Token(token));
+  }
+
+  // Run the update steps.
+  RunUpdateSteps();
+
+  // Return true if token was appended to tokens.
+  // Return false if token was removed from tokens.
+  return !containsToken;
+}
+
 // Algorithm for AnonymousStringifier:
 //   https://www.w3.org/TR/dom/#dom-domtokenlist-stringifier
 std::string DOMTokenList::AnonymousStringifier() const {
diff --git a/src/cobalt/dom/dom_token_list.h b/src/cobalt/dom/dom_token_list.h
index 4497ac3..a366b5e 100644
--- a/src/cobalt/dom/dom_token_list.h
+++ b/src/cobalt/dom/dom_token_list.h
@@ -41,6 +41,8 @@
   bool Contains(const std::string& token) const;
   void Add(const std::vector<std::string>& tokens);
   void Remove(const std::vector<std::string>& tokens);
+  bool Toggle(const std::string& token,
+              const base::Optional<bool>& force = base::nullopt);
   std::string AnonymousStringifier() const;
 
   // Custom, not in any spec.
diff --git a/src/cobalt/dom/dom_token_list.idl b/src/cobalt/dom/dom_token_list.idl
index 355576a..16830a9 100644
--- a/src/cobalt/dom/dom_token_list.idl
+++ b/src/cobalt/dom/dom_token_list.idl
@@ -20,5 +20,6 @@
   boolean contains(DOMString token);
   void add(DOMString... tokens);
   void remove(DOMString... tokens);
+  boolean toggle(DOMString token, optional boolean force);
   stringifier;
 };
diff --git a/src/cobalt/dom/dom_token_list_test.cc b/src/cobalt/dom/dom_token_list_test.cc
index 5f36dc9..bd492f0 100644
--- a/src/cobalt/dom/dom_token_list_test.cc
+++ b/src/cobalt/dom/dom_token_list_test.cc
@@ -103,6 +103,30 @@
   EXPECT_EQ("a b", element->GetAttribute("class").value());
 }
 
+TEST_F(DOMTokenListTest, DOMTokenListToggle) {
+  scoped_refptr<Element> element =
+      new Element(document_, base::Token("element"));
+  scoped_refptr<DOMTokenList> dom_token_list =
+      new DOMTokenList(element, "class");
+  element->SetAttribute("class", "a b c");
+
+  EXPECT_FALSE(dom_token_list->Toggle("a"));
+  EXPECT_TRUE(dom_token_list->Toggle("b", true));
+  EXPECT_FALSE(dom_token_list->Toggle("c", false));
+  EXPECT_TRUE(dom_token_list->Toggle("d"));
+  EXPECT_TRUE(dom_token_list->Toggle("e", true));
+  EXPECT_FALSE(dom_token_list->Toggle("f", false));
+
+  EXPECT_FALSE(dom_token_list->Contains("a"));
+  EXPECT_TRUE(dom_token_list->Contains("b"));
+  EXPECT_FALSE(dom_token_list->Contains("c"));
+  EXPECT_TRUE(dom_token_list->Contains("d"));
+  EXPECT_TRUE(dom_token_list->Contains("e"));
+  EXPECT_FALSE(dom_token_list->Contains("f"));
+
+  EXPECT_EQ("b d e", element->GetAttribute("class").value());
+}
+
 TEST_F(DOMTokenListTest, DOMTokenListAnonymousStringifier) {
   scoped_refptr<Element> element =
       new Element(document_, base::Token("element"));
diff --git a/src/cobalt/dom/element_test.cc b/src/cobalt/dom/element_test.cc
index f30ffa3..48f542b 100644
--- a/src/cobalt/dom/element_test.cc
+++ b/src/cobalt/dom/element_test.cc
@@ -68,7 +68,7 @@
                             css_parser_.get(), dom_parser_.get(), NULL, NULL,
                             NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                             NULL, dom_stat_tracker_.get(), "",
-                            base::kApplicationStateStarted, NULL) {
+                            base::kApplicationStateStarted, NULL, NULL) {
   EXPECT_TRUE(GlobalStats::GetInstance()->CheckNoLeaks());
   document_ = new Document(&html_element_context_);
   xml_document_ = new XMLDocument(&html_element_context_);
diff --git a/src/cobalt/dom/event_target.idl b/src/cobalt/dom/event_target.idl
index b50158e..094c645 100644
--- a/src/cobalt/dom/event_target.idl
+++ b/src/cobalt/dom/event_target.idl
@@ -14,6 +14,10 @@
 
 // https://www.w3.org/TR/dom/#eventtarget
 
+[
+  Constructor(),
+  ConstructorCallWith=EnvironmentSettings
+]
 interface EventTarget {
   void addEventListener(DOMString type, EventListener? listener,
                         optional boolean useCapture = false);
diff --git a/src/cobalt/dom/html_element.cc b/src/cobalt/dom/html_element.cc
index 359ea9c..307daa2 100644
--- a/src/cobalt/dom/html_element.cc
+++ b/src/cobalt/dom/html_element.cc
@@ -62,6 +62,7 @@
 #include "cobalt/dom/rule_matching.h"
 #include "cobalt/dom/text.h"
 #include "cobalt/loader/image/animated_image_tracker.h"
+#include "cobalt/loader/resource_cache.h"
 #include "third_party/icu/source/common/unicode/uchar.h"
 #include "third_party/icu/source/common/unicode/utf8.h"
 
@@ -84,6 +85,9 @@
 // focused on the element for a specified duration.
 const char kUiNavFocusDurationAttribute[] = "data-cobalt-ui-nav-focus-duration";
 
+// https://www.w3.org/TR/resource-timing-1/#dom-performanceresourcetiming-initiatortype
+const char* kPerformanceResourceTimingInitiatorType = "img";
+
 void UiNavCallbackHelper(
     scoped_refptr<base::SingleThreadTaskRunner> task_runner,
     base::Callback<void(SbTimeMonotonic)> callback) {
@@ -2292,9 +2296,27 @@
   }
 }
 
+void HTMLElement::GetLoadTimingInfoAndCreateResourceTiming() {
+  if (html_element_context()->performance() == nullptr) return;
+  for (auto& cached_background_image : cached_background_images_) {
+    scoped_refptr<loader::CachedResourceBase> cached_image =
+        cached_background_image->GetCachedResource();
+    if (cached_image == nullptr) continue;
+
+    if (!cached_image->get_resource_timing_created_flag()) {
+      html_element_context()->performance()->CreatePerformanceResourceTiming(
+          cached_image->GetLoadTimingInfo(),
+          kPerformanceResourceTimingInitiatorType, cached_image->url().spec());
+      cached_image->set_resource_timing_created_flag(true);
+    }
+  }
+}
+
 void HTMLElement::OnBackgroundImageLoaded() {
   node_document()->RecordMutation();
   InvalidateLayoutBoxRenderTreeNodes();
+  // GetLoadTimingInfo from cached resource and create resource timing.
+  GetLoadTimingInfoAndCreateResourceTiming();
 }
 
 }  // namespace dom
diff --git a/src/cobalt/dom/html_element.h b/src/cobalt/dom/html_element.h
index 4ddc196..e98a83c 100644
--- a/src/cobalt/dom/html_element.h
+++ b/src/cobalt/dom/html_element.h
@@ -393,6 +393,9 @@
                       const std::string& value) override;
   void OnRemoveAttribute(const std::string& name) override;
 
+    // Create Performance Resource Timing entry for background image.
+  void GetLoadTimingInfoAndCreateResourceTiming();
+
   // HTMLElement keeps a pointer to the dom stat tracker to ensure that it can
   // make stat updates even after its weak pointer to its document has been
   // deleted. This is protected because some derived classes need access to it.
diff --git a/src/cobalt/dom/html_element_context.cc b/src/cobalt/dom/html_element_context.cc
index 3601025..7328cde 100644
--- a/src/cobalt/dom/html_element_context.cc
+++ b/src/cobalt/dom/html_element_context.cc
@@ -15,6 +15,7 @@
 #include "cobalt/dom/html_element_context.h"
 
 #include "cobalt/dom/html_element_factory.h"
+#include "cobalt/dom/window.h"
 
 #if !defined(COBALT_BUILD_TYPE_GOLD)
 #include "cobalt/dom/testing/stub_environment_settings.h"
@@ -47,7 +48,8 @@
           &application_lifecycle_state_),
       video_playback_rate_multiplier_(1.f),
       sync_load_thread_("SynchronousLoad"),
-      html_element_factory_(new HTMLElementFactory()) {
+      html_element_factory_(new HTMLElementFactory()),
+      performance_(NULL) {
   sync_load_thread_.Start();
 }
 #endif  // !defined(COBALT_BUILD_TYPE_GOLD)
@@ -71,6 +73,7 @@
     const std::string& font_language_script,
     base::ApplicationState initial_application_state,
     base::WaitableEvent* synchronous_loader_interrupt,
+    Performance* performance,
     bool enable_inline_script_warnings, float video_playback_rate_multiplier)
     : environment_settings_(environment_settings),
       fetcher_factory_(fetcher_factory),
@@ -98,7 +101,8 @@
       synchronous_loader_interrupt_(synchronous_loader_interrupt),
       enable_inline_script_warnings_(enable_inline_script_warnings),
       sync_load_thread_("SynchronousLoad"),
-      html_element_factory_(new HTMLElementFactory()) {
+      html_element_factory_(new HTMLElementFactory()),
+      performance_(performance) {
   sync_load_thread_.Start();
 }
 
diff --git a/src/cobalt/dom/html_element_context.h b/src/cobalt/dom/html_element_context.h
index 2e06997..aa4de99 100644
--- a/src/cobalt/dom/html_element_context.h
+++ b/src/cobalt/dom/html_element_context.h
@@ -26,6 +26,7 @@
 #include "cobalt/dom/dom_stat_tracker.h"
 #include "cobalt/dom/parser.h"
 #include "cobalt/dom/url_registry.h"
+#include "cobalt/dom/performance.h"
 #include "cobalt/loader/fetcher_factory.h"
 #include "cobalt/loader/font/remote_typeface_cache.h"
 #include "cobalt/loader/image/animated_image_tracker.h"
@@ -74,6 +75,7 @@
       const std::string& font_language_script,
       base::ApplicationState initial_application_state,
       base::WaitableEvent* synchronous_loader_interrupt,
+      Performance* performance,
       bool enable_inline_script_warnings = false,
       float video_playback_rate_multiplier = 1.0);
   ~HTMLElementContext();
@@ -160,6 +162,8 @@
     return application_lifecycle_state_weak_ptr_factory_.GetWeakPtr();
   }
 
+  Performance* performance() { return performance_; }
+
  private:
 #if !defined(COBALT_BUILD_TYPE_GOLD)
   // StubEnvironmentSettings for no-args test constructor.
@@ -195,6 +199,8 @@
   base::Thread sync_load_thread_;
   std::unique_ptr<HTMLElementFactory> html_element_factory_;
 
+  Performance* performance_;
+
   DISALLOW_COPY_AND_ASSIGN(HTMLElementContext);
 };
 
diff --git a/src/cobalt/dom/html_element_factory_test.cc b/src/cobalt/dom/html_element_factory_test.cc
index c4cb2de..1c8907e 100644
--- a/src/cobalt/dom/html_element_factory_test.cc
+++ b/src/cobalt/dom/html_element_factory_test.cc
@@ -74,7 +74,8 @@
             NULL /* remote_typeface_cache */, NULL /* mesh_cache */,
             dom_stat_tracker_.get(), "" /* language */,
             base::kApplicationStateStarted,
-            NULL /* synchronous_loader_interrupt */),
+            NULL /* synchronous_loader_interrupt */,
+            NULL /* performance */),
         document_(new Document(&html_element_context_)) {}
   ~HTMLElementFactoryTest() override {}
 
diff --git a/src/cobalt/dom/html_element_test.cc b/src/cobalt/dom/html_element_test.cc
index 9678082..7161d4f 100644
--- a/src/cobalt/dom/html_element_test.cc
+++ b/src/cobalt/dom/html_element_test.cc
@@ -87,7 +87,7 @@
         html_element_context_(&environment_settings_, NULL, NULL, &css_parser_,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, NULL, dom_stat_tracker_.get(),
-                              "", base::kApplicationStateStarted, NULL),
+                              "", base::kApplicationStateStarted, NULL, NULL),
         document_(new Document(&html_element_context_)) {}
   ~HTMLElementTest() override {}
 
diff --git a/src/cobalt/dom/html_image_element.cc b/src/cobalt/dom/html_image_element.cc
index 65d847e..f931af3 100644
--- a/src/cobalt/dom/html_image_element.cc
+++ b/src/cobalt/dom/html_image_element.cc
@@ -187,6 +187,7 @@
   if (node_document()) {
     node_document()->DecreaseLoadingCounterAndMaybeDispatchLoadEvent();
   }
+  GetLoadTimingInfoAndCreateResourceTiming();
   cached_image_loaded_callback_handler_.reset();
 }
 
@@ -197,6 +198,7 @@
   if (node_document()) {
     node_document()->DecreaseLoadingCounterAndMaybeDispatchLoadEvent();
   }
+  GetLoadTimingInfoAndCreateResourceTiming();
   cached_image_loaded_callback_handler_.reset();
 }
 
@@ -228,5 +230,18 @@
   scoped_prevent_gc.reset();
 }
 
+void HTMLImageElement::GetLoadTimingInfoAndCreateResourceTiming() {
+  if (html_element_context()->performance() == nullptr) return;
+  // Resolve selected source, relative to the element.
+  const auto src_attr = GetAttribute("src");
+  const std::string src = src_attr.value_or("");
+  const GURL& base_url = node_document()->url_as_gurl();
+  const GURL selected_source = base_url.Resolve(src);
+
+  html_element_context()->performance()->CreatePerformanceResourceTiming(
+      cached_image_loaded_callback_handler_->GetLoadTimingInfo(),
+      kTagName, selected_source.spec());
+}
+
 }  // namespace dom
 }  // namespace cobalt
diff --git a/src/cobalt/dom/html_image_element.h b/src/cobalt/dom/html_image_element.h
index db22502..a972c59 100644
--- a/src/cobalt/dom/html_image_element.h
+++ b/src/cobalt/dom/html_image_element.h
@@ -75,6 +75,8 @@
   void DestroyScopedPreventGC(
       std::unique_ptr<script::GlobalEnvironment::ScopedPreventGarbageCollection>
           scoped_prevent_gc);
+  // Create Performance Resource Timing entry for image element.
+  void GetLoadTimingInfoAndCreateResourceTiming();
 
   std::unique_ptr<loader::image::WeakCachedImage> weak_cached_image_;
   std::unique_ptr<loader::image::CachedImage::OnLoadedCallbackHandler>
diff --git a/src/cobalt/dom/html_link_element.cc b/src/cobalt/dom/html_link_element.cc
index b7150de..7181f4d 100644
--- a/src/cobalt/dom/html_link_element.cc
+++ b/src/cobalt/dom/html_link_element.cc
@@ -313,6 +313,8 @@
 void HTMLLinkElement::ReleaseLoader() {
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
   DCHECK(loader_);
+  // GetLoadTimingInfo from loader before reset.
+  GetLoadTimingInfoAndCreateResourceTiming();
   loader_.reset();
 }
 
@@ -323,5 +325,11 @@
   }
 }
 
+void HTMLLinkElement::GetLoadTimingInfoAndCreateResourceTiming() {
+  if (html_element_context()->performance() == nullptr) return;
+  html_element_context()->performance()->CreatePerformanceResourceTiming(
+      loader_->get_load_timing_info(), kTagName, absolute_url_.spec());
+}
+
 }  // namespace dom
 }  // namespace cobalt
diff --git a/src/cobalt/dom/html_link_element.h b/src/cobalt/dom/html_link_element.h
index e38abea..981abac 100644
--- a/src/cobalt/dom/html_link_element.h
+++ b/src/cobalt/dom/html_link_element.h
@@ -65,6 +65,9 @@
   void OnInsertedIntoDocument() override;
   void OnRemovedFromDocument() override;
 
+  // Create Performance Resource Timing entry for link element.
+  void GetLoadTimingInfoAndCreateResourceTiming();
+
   DEFINE_WRAPPABLE_TYPE(HTMLLinkElement);
 
  protected:
diff --git a/src/cobalt/dom/html_link_element_test.cc b/src/cobalt/dom/html_link_element_test.cc
index 380711e..f487593 100644
--- a/src/cobalt/dom/html_link_element_test.cc
+++ b/src/cobalt/dom/html_link_element_test.cc
@@ -52,7 +52,7 @@
         html_element_context_(&environment_settings_, NULL, NULL, &css_parser_,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, NULL, dom_stat_tracker_.get(),
-                              "", base::kApplicationStateStarted, NULL),
+                              "", base::kApplicationStateStarted, NULL, NULL),
         document_(new DocumentMock(&html_element_context_)),
         message_loop_(base::MessageLoop::TYPE_DEFAULT) {}
 
diff --git a/src/cobalt/dom/html_script_element.cc b/src/cobalt/dom/html_script_element.cc
index ca3bd43..1c3ba7f 100644
--- a/src/cobalt/dom/html_script_element.cc
+++ b/src/cobalt/dom/html_script_element.cc
@@ -710,8 +710,16 @@
 void HTMLScriptElement::ReleaseLoader() {
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
   DCHECK(loader_);
+  // GetLoadTimingInfo from loader before reset.
+  GetLoadTimingInfoAndCreateResourceTiming();
   loader_.reset();
 }
 
+void HTMLScriptElement::GetLoadTimingInfoAndCreateResourceTiming() {
+  if (html_element_context()->performance() == nullptr) return;
+  html_element_context()->performance()->CreatePerformanceResourceTiming(
+      loader_->get_load_timing_info(), kTagName, url_.spec());
+}
+
 }  // namespace dom
 }  // namespace cobalt
diff --git a/src/cobalt/dom/html_script_element.h b/src/cobalt/dom/html_script_element.h
index d9c41c0..9ee5754 100644
--- a/src/cobalt/dom/html_script_element.h
+++ b/src/cobalt/dom/html_script_element.h
@@ -84,6 +84,9 @@
   // From HTMLElement.
   scoped_refptr<HTMLScriptElement> AsHTMLScriptElement() override;
 
+  // Create Performance Resource Timing entry for script element.
+  void GetLoadTimingInfoAndCreateResourceTiming();
+
   DEFINE_WRAPPABLE_TYPE(HTMLScriptElement);
 
  protected:
diff --git a/src/cobalt/dom/intersection_observer_test.cc b/src/cobalt/dom/intersection_observer_test.cc
index e2b9486..32b1db2 100644
--- a/src/cobalt/dom/intersection_observer_test.cc
+++ b/src/cobalt/dom/intersection_observer_test.cc
@@ -58,7 +58,7 @@
         html_element_context_(&environment_settings_, NULL, NULL, &css_parser_,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, NULL, dom_stat_tracker_.get(),
-                              "", base::kApplicationStateStarted, NULL),
+                              "", base::kApplicationStateStarted, NULL, NULL),
         document_(new Document(&html_element_context_)) {}
 
   scoped_refptr<Document> document() { return document_; }
diff --git a/src/cobalt/dom/navigator.cc b/src/cobalt/dom/navigator.cc
index 5a8387e..b13c6dc 100644
--- a/src/cobalt/dom/navigator.cc
+++ b/src/cobalt/dom/navigator.cc
@@ -206,6 +206,10 @@
 
 const std::string& Navigator::user_agent() const { return user_agent_; }
 
+const scoped_refptr<NavigatorUAData>& Navigator::user_agent_data() const {
+  return user_agent_data_;
+}
+
 bool Navigator::java_enabled() const { return false; }
 
 bool Navigator::cookie_enabled() const { return false; }
@@ -238,10 +242,10 @@
     if (media_player_factory_ != nullptr) {
       media_session_->EnsureMediaSessionClient();
       DCHECK(media_session_->media_session_client());
-      media_session_->media_session_client()
-          ->SetMaybeFreezeCallback(maybe_freeze_callback_);
-      media_session_->media_session_client()
-          ->SetMediaPlayerFactory(media_player_factory_);
+      media_session_->media_session_client()->SetMaybeFreezeCallback(
+          maybe_freeze_callback_);
+      media_session_->media_session_client()->SetMediaPlayerFactory(
+          media_player_factory_);
     }
   }
   return media_session_;
diff --git a/src/cobalt/dom/navigator.h b/src/cobalt/dom/navigator.h
index 353f297..bd77244 100644
--- a/src/cobalt/dom/navigator.h
+++ b/src/cobalt/dom/navigator.h
@@ -22,6 +22,7 @@
 #include "cobalt/dom/captions/system_caption_settings.h"
 #include "cobalt/dom/eme/media_key_system_configuration.h"
 #include "cobalt/dom/mime_type_array.h"
+#include "cobalt/dom/navigator_ua_data.h"
 #include "cobalt/dom/plugin_array.h"
 #include "cobalt/media/web_media_player_factory.h"
 #include "cobalt/media_capture/media_devices.h"
@@ -49,6 +50,9 @@
   // Web API: NavigatorID
   const std::string& user_agent() const;
 
+  // Web API: NavigatorUA
+  const scoped_refptr<NavigatorUAData>& user_agent_data() const;
+
   // Web API: NavigatorLanguage
   const std::string& language() const;
 
@@ -76,8 +80,7 @@
     maybe_freeze_callback_ = maybe_freeze_callback;
   }
 
-  void set_media_player_factory(
-      const media::WebMediaPlayerFactory* factory) {
+  void set_media_player_factory(const media::WebMediaPlayerFactory* factory) {
     media_player_factory_ = factory;
   }
 
@@ -125,6 +128,7 @@
       const std::string& encryption_scheme);
 
   std::string user_agent_;
+  scoped_refptr<NavigatorUAData> user_agent_data_;
   std::string language_;
   scoped_refptr<MimeTypeArray> mime_types_;
   scoped_refptr<PluginArray> plugins_;
diff --git a/src/cobalt/dom/navigator.idl b/src/cobalt/dom/navigator.idl
index 6a21520..acbf4b8 100644
--- a/src/cobalt/dom/navigator.idl
+++ b/src/cobalt/dom/navigator.idl
@@ -17,6 +17,7 @@
 interface Navigator {};
 
 Navigator implements NavigatorID;
+Navigator implements NavigatorUA;
 Navigator implements NavigatorLanguage;
 Navigator implements NavigatorPlugins;
 Navigator implements NavigatorStorageUtils;
diff --git a/src/cobalt/dom/navigator_ua.idl b/src/cobalt/dom/navigator_ua.idl
new file mode 100644
index 0000000..3f10c9d
--- /dev/null
+++ b/src/cobalt/dom/navigator_ua.idl
@@ -0,0 +1,20 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://wicg.github.io/ua-client-hints/#navigatorua
+
+[NoInterfaceObject]
+interface NavigatorUA {
+  readonly attribute NavigatorUAData? userAgentData;
+};
diff --git a/src/cobalt/dom/navigator_ua_brand_version.idl b/src/cobalt/dom/navigator_ua_brand_version.idl
new file mode 100644
index 0000000..ec624f9
--- /dev/null
+++ b/src/cobalt/dom/navigator_ua_brand_version.idl
@@ -0,0 +1,20 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://wicg.github.io/ua-client-hints/#dictdef-navigatoruabrandversion
+
+dictionary NavigatorUABrandVersion {
+  DOMString brand;
+  DOMString version;
+};
diff --git a/src/cobalt/dom/navigator_ua_data.h b/src/cobalt/dom/navigator_ua_data.h
new file mode 100644
index 0000000..1878093
--- /dev/null
+++ b/src/cobalt/dom/navigator_ua_data.h
@@ -0,0 +1,66 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_DOM_NAVIGATOR_UA_DATA_H_
+#define COBALT_DOM_NAVIGATOR_UA_DATA_H_
+
+#include <string>
+
+#include "cobalt/dom/cobalt_ua_data_values.h"
+#include "cobalt/dom/navigator_ua_brand_version.h"
+#include "cobalt/dom/ua_low_entropy_json.h"
+#include "cobalt/script/promise.h"
+#include "cobalt/script/sequence.h"
+#include "cobalt/script/wrappable.h"
+
+namespace cobalt {
+namespace dom {
+
+// The NavigatorUAData object holds the User-Agent Client Hints information.
+// https://wicg.github.io/ua-client-hints/#navigatoruadata
+class NavigatorUAData : public script::Wrappable {
+ public:
+  NavigatorUAData() {}
+
+  script::Sequence<NavigatorUABrandVersion> brands() const { return brands_; }
+
+  bool mobile() const { return mobile_; }
+
+  std::string platform() const { return platform_; }
+
+  script::Handle<script::Promise<CobaltUADataValues>> GetHighEntropyValues(
+      script::Sequence<std::string> hints) {
+    return high_entropy_values_;
+  }
+
+  UALowEntropyJSON ToJSON() { return low_entropy_json_; }
+
+  DEFINE_WRAPPABLE_TYPE(NavigatorUAData);
+
+ private:
+  ~NavigatorUAData() override {}
+
+  script::Sequence<NavigatorUABrandVersion> brands_;
+  bool mobile_;
+  std::string platform_;
+  script::Handle<script::Promise<CobaltUADataValues>> high_entropy_values_;
+  UALowEntropyJSON low_entropy_json_;
+
+  DISALLOW_COPY_AND_ASSIGN(NavigatorUAData);
+};
+
+}  // namespace dom
+}  // namespace cobalt
+
+#endif  // COBALT_DOM_NAVIGATOR_UA_DATA_H_
diff --git a/src/cobalt/dom/navigator_ua_data.idl b/src/cobalt/dom/navigator_ua_data.idl
new file mode 100644
index 0000000..5d1bf3e
--- /dev/null
+++ b/src/cobalt/dom/navigator_ua_data.idl
@@ -0,0 +1,25 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://wicg.github.io/ua-client-hints/#navigatoruadata
+
+[Exposed=Window]
+interface NavigatorUAData {
+  // TODO make this be FrozenArray<NavigatorUABrandVersion> when available
+  readonly attribute sequence<NavigatorUABrandVersion> brands;
+  readonly attribute boolean mobile;
+  readonly attribute DOMString platform;
+  Promise<CobaltUADataValues> getHighEntropyValues(sequence<DOMString> hints);
+  UALowEntropyJSON toJSON();
+};
diff --git a/src/cobalt/dom/node_list_live_test.cc b/src/cobalt/dom/node_list_live_test.cc
index 6d23f82..8b6012a 100644
--- a/src/cobalt/dom/node_list_live_test.cc
+++ b/src/cobalt/dom/node_list_live_test.cc
@@ -31,7 +31,7 @@
         html_element_context_(&environment_settings_, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, &dom_stat_tracker_, "",
-                              base::kApplicationStateStarted, NULL),
+                              base::kApplicationStateStarted, NULL, NULL),
         document_(new Document(&html_element_context_)) {}
 
   ~NodeListLiveTest() override {}
diff --git a/src/cobalt/dom/node_list_test.cc b/src/cobalt/dom/node_list_test.cc
index 4cd3d6f..39e536b 100644
--- a/src/cobalt/dom/node_list_test.cc
+++ b/src/cobalt/dom/node_list_test.cc
@@ -33,7 +33,7 @@
         html_element_context_(&environment_settings_, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, NULL, NULL, dom_stat_tracker_.get(), "",
-                              base::kApplicationStateStarted, NULL),
+                              base::kApplicationStateStarted, NULL, NULL),
         document_(new Document(&html_element_context_)) {}
 
   ~NodeListTest() override {}
diff --git a/src/cobalt/dom/performance.cc b/src/cobalt/dom/performance.cc
index 903273d..2b60c29 100644
--- a/src/cobalt/dom/performance.cc
+++ b/src/cobalt/dom/performance.cc
@@ -274,7 +274,7 @@
   }
 
   // 3. If resource timing buffer full event pending flag is false, run the
-  //  following substeps:
+  // following substeps:
   if (!resource_timing_buffer_full_event_pending_flag_) {
     // 3.1 Set resource timing buffer full event pending flag to true.
     resource_timing_buffer_full_event_pending_flag_ = true;
@@ -283,7 +283,7 @@
     DCHECK(message_loop());
     message_loop()->task_runner()->PostTask(
         FROM_HERE, base::Bind(&Performance::FireResourceTimingBufferFullEvent,
-        this));
+        base::Unretained(this)));
   }
   // 4. Add new entry to the resource timing secondary buffer.
   resource_timing_secondary_buffer_.push_back(resource_timing_entry);
@@ -342,7 +342,7 @@
   DCHECK(message_loop());
   message_loop()->task_runner()->PostTask(
       FROM_HERE, base::Bind(&Performance::QueuePerformanceTimelineTask,
-      this));
+      base::Unretained(this)));
 }
 
 void Performance::QueuePerformanceTimelineTask() {
@@ -374,7 +374,7 @@
 
 void Performance::CreatePerformanceResourceTiming(
     const net::LoadTimingInfo& timing_info, const std::string& initiator_type,
-    const std::string& requested_url, const std::string& cache_mode) {
+    const std::string& requested_url) {
   // To mark resource timing given a fetch timing info timingInfo, a DOMString
   // requestedURL, a DOMString initiatorType a global object global, and a
   // string cacheMode, perform the following steps:
@@ -383,8 +383,8 @@
   // 2.Setup the resource timing entry for entry, given initiatorType,
   // requestedURL, timingInfo, and cacheMode.
   scoped_refptr<PerformanceResourceTiming> resource_timing(
-      new PerformanceResourceTiming(timing_info, initiator_type, requested_url,
-                                    cache_mode, this));
+      new PerformanceResourceTiming(timing_info, initiator_type,
+                                    requested_url, this));
   // 2. Queue entry.
   QueuePerformanceEntry(resource_timing);
   // 3. Add entry to global's performance entry buffer.
diff --git a/src/cobalt/dom/performance.h b/src/cobalt/dom/performance.h
index e92650f..ad88fe5 100644
--- a/src/cobalt/dom/performance.h
+++ b/src/cobalt/dom/performance.h
@@ -88,8 +88,7 @@
   void QueuePerformanceTimelineTask();
   void CreatePerformanceResourceTiming(const net::LoadTimingInfo& timing_info,
                                        const std::string& initiator_type,
-                                       const std::string& requested_url,
-                                       const std::string& cache_mode);
+                                       const std::string& requested_url);
   // Custom, not in any spec.
   base::TimeDelta get_time_origin() const { return time_origin_; }
 
diff --git a/src/cobalt/dom/performance_resource_timing.cc b/src/cobalt/dom/performance_resource_timing.cc
index 2f13337..d61624d 100644
--- a/src/cobalt/dom/performance_resource_timing.cc
+++ b/src/cobalt/dom/performance_resource_timing.cc
@@ -18,6 +18,10 @@
 namespace cobalt {
 namespace dom {
 
+namespace {
+  const std::string kPerformanceResourceTimingCacheMode = "local";
+}
+
 PerformanceResourceTiming::PerformanceResourceTiming(
     const std::string& name, DOMHighResTimeStamp start_time,
     DOMHighResTimeStamp end_time)
@@ -25,15 +29,14 @@
 
 PerformanceResourceTiming::PerformanceResourceTiming(
     const net::LoadTimingInfo& timing_info, const std::string& initiator_type,
-    const std::string& requested_url, const std::string& cache_mode,
-    Performance* performance)
+    const std::string& requested_url, Performance* performance)
     : PerformanceEntry(
           requested_url, performance->Now(),
           ConvertTimeDeltaToDOMHighResTimeStamp(
               timing_info.receive_headers_end.since_origin(),
               Performance::kPerformanceTimerMinResolutionInMicroseconds)),
       initiator_type_(initiator_type),
-      cache_mode_(cache_mode),
+      cache_mode_(kPerformanceResourceTimingCacheMode),
       transfer_size_(0),
       timing_info_(timing_info) {}
 
diff --git a/src/cobalt/dom/performance_resource_timing.h b/src/cobalt/dom/performance_resource_timing.h
index 029f64f..a0eaa0a 100644
--- a/src/cobalt/dom/performance_resource_timing.h
+++ b/src/cobalt/dom/performance_resource_timing.h
@@ -39,7 +39,6 @@
   PerformanceResourceTiming(const net::LoadTimingInfo& timing_info,
                             const std::string& initiator_type,
                             const std::string& requested_url,
-                            const std::string& cache_mode,
                             Performance* performance);
 
   // Web API.
diff --git a/src/cobalt/dom/rule_matching_test.cc b/src/cobalt/dom/rule_matching_test.cc
index 75f1f51..daf905f 100644
--- a/src/cobalt/dom/rule_matching_test.cc
+++ b/src/cobalt/dom/rule_matching_test.cc
@@ -57,7 +57,7 @@
                               css_parser_.get(), dom_parser_.get(), NULL, NULL,
                               NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                               NULL, dom_stat_tracker_.get(), "",
-                              base::kApplicationStateStarted, NULL),
+                              base::kApplicationStateStarted, NULL, NULL),
         document_(new Document(&html_element_context_)),
         root_(document_->CreateElement("html")->AsHTMLElement()),
         head_(document_->CreateElement("head")->AsHTMLElement()),
diff --git a/src/cobalt/dom/serializer_test.cc b/src/cobalt/dom/serializer_test.cc
index 510d624..90eac26 100644
--- a/src/cobalt/dom/serializer_test.cc
+++ b/src/cobalt/dom/serializer_test.cc
@@ -51,7 +51,7 @@
       html_element_context_(&environment_settings_, NULL, NULL, NULL, NULL,
                             NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
                             NULL, NULL, NULL, dom_stat_tracker_.get(), "",
-                            base::kApplicationStateStarted, NULL),
+                            base::kApplicationStateStarted, NULL, NULL),
       document_(new Document(&html_element_context_)),
       root_(new Element(document_, base::Token("root"))),
       source_location_(base::SourceLocation("[object SerializerTest]", 1, 1)) {}
diff --git a/src/cobalt/dom/ua_data_values.idl b/src/cobalt/dom/ua_data_values.idl
new file mode 100644
index 0000000..9d99b75
--- /dev/null
+++ b/src/cobalt/dom/ua_data_values.idl
@@ -0,0 +1,26 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://wicg.github.io/ua-client-hints/#dictdef-uadatavalues
+
+dictionary UADataValues {
+  sequence<NavigatorUABrandVersion> brands;
+  boolean mobile;
+  DOMString platform;
+  DOMString architecture;
+  DOMString bitness;
+  DOMString model;
+  DOMString platformVersion;
+  DOMString uaFullVersion;
+};
diff --git a/src/cobalt/dom/ua_low_entropy_json.idl b/src/cobalt/dom/ua_low_entropy_json.idl
new file mode 100644
index 0000000..917b87e
--- /dev/null
+++ b/src/cobalt/dom/ua_low_entropy_json.idl
@@ -0,0 +1,21 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://wicg.github.io/ua-client-hints/#dictdef-ualowentropyjson
+
+dictionary UALowEntropyJSON {
+  sequence<NavigatorUABrandVersion> brands;
+  boolean mobile;
+  DOMString platform;
+};
diff --git a/src/cobalt/dom/window.cc b/src/cobalt/dom/window.cc
index 3510736..c10512c 100644
--- a/src/cobalt/dom/window.cc
+++ b/src/cobalt/dom/window.cc
@@ -136,6 +136,7 @@
 #if defined(ENABLE_TEST_RUNNER)
       test_runner_(new TestRunner()),
 #endif  // ENABLE_TEST_RUNNER
+      performance_(new Performance(settings, MakePerformanceClock(clock_type))),
       html_element_context_(new HTMLElementContext(
           settings, fetcher_factory, loader_factory, css_parser, dom_parser,
           can_play_type_handler, web_media_player_factory, script_runner,
@@ -144,8 +145,8 @@
           reduced_image_cache_capacity_manager, remote_typeface_cache,
           mesh_cache, dom_stat_tracker, font_language_script,
           initial_application_state, synchronous_loader_interrupt,
-          enable_inline_script_warnings, video_playback_rate_multiplier)),
-      performance_(new Performance(settings, MakePerformanceClock(clock_type))),
+          performance_.get(), enable_inline_script_warnings,
+          video_playback_rate_multiplier)),
       ALLOW_THIS_IN_INITIALIZER_LIST(document_(new Document(
           html_element_context_.get(),
           Document::Options(
diff --git a/src/cobalt/dom/window.h b/src/cobalt/dom/window.h
index d0deca4..5998432 100644
--- a/src/cobalt/dom/window.h
+++ b/src/cobalt/dom/window.h
@@ -443,8 +443,8 @@
   scoped_refptr<TestRunner> test_runner_;
 #endif  // ENABLE_TEST_RUNNER
 
-  const std::unique_ptr<HTMLElementContext> html_element_context_;
   scoped_refptr<Performance> performance_;
+  const std::unique_ptr<HTMLElementContext> html_element_context_;
   scoped_refptr<Document> document_;
   std::unique_ptr<loader::Loader> document_loader_;
   scoped_refptr<History> history_;
diff --git a/src/cobalt/dom_parser/html_decoder_test.cc b/src/cobalt/dom_parser/html_decoder_test.cc
index d222a35..94992ac 100644
--- a/src/cobalt/dom_parser/html_decoder_test.cc
+++ b/src/cobalt/dom_parser/html_decoder_test.cc
@@ -84,7 +84,7 @@
           NULL /* can_play_type_handler */, NULL /* web_media_player_factory */,
           &stub_script_runner_, NULL /* script_value_factory */, NULL, NULL,
           NULL, NULL, NULL, NULL, NULL, dom_stat_tracker_.get(), "",
-          base::kApplicationStateStarted, NULL),
+          base::kApplicationStateStarted, NULL, NULL),
       document_(new dom::Document(&html_element_context_)),
       root_(new dom::Element(document_, base::Token("element"))),
       source_location_(base::SourceLocation("[object HTMLDecoderTest]", 1, 1)) {
diff --git a/src/cobalt/encoding/encoding.gyp b/src/cobalt/encoding/encoding.gyp
new file mode 100644
index 0000000..cd55553
--- /dev/null
+++ b/src/cobalt/encoding/encoding.gyp
@@ -0,0 +1,35 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+    'variables': {
+        'sb_pedantic_warnings': 1,
+    },
+    'targets': [{
+        'target_name':
+            'text_encoding',
+        'type':
+            'static_library',
+        'sources': [
+            'text_decoder.cc',
+            'text_decoder.h',
+            'text_encoder.cc',
+            'text_encoder.h',
+        ],
+        'dependencies': [
+            '<(DEPTH)/cobalt/browser/browser_bindings_gen.gyp:generated_types',
+            '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+        ],
+    },]
+}
diff --git a/src/cobalt/encoding/encoding_test.gyp b/src/cobalt/encoding/encoding_test.gyp
new file mode 100644
index 0000000..8ae690d
--- /dev/null
+++ b/src/cobalt/encoding/encoding_test.gyp
@@ -0,0 +1,47 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+    'variables': {
+        'sb_pedantic_warnings': 1,
+    },
+    'targets': [
+        {
+            'target_name': 'text_encoding_test',
+            'type': '<(gtest_target_type)',
+            'sources': [
+                'text_decoder_test.cc',
+                'text_encoder_test.cc',
+            ],
+            'dependencies': [
+                '<(DEPTH)/cobalt/dom/dom.gyp:dom',
+                '<(DEPTH)/cobalt/dom/testing/dom_testing.gyp:dom_testing',
+                '<(DEPTH)/cobalt/encoding/encoding.gyp:text_encoding',
+                '<(DEPTH)/cobalt/script/script.gyp:script',
+                '<(DEPTH)/testing/gmock.gyp:gmock',
+                '<(DEPTH)/testing/gtest.gyp:gtest',
+            ],
+            'includes': ['<(DEPTH)/cobalt/test/test.gypi'],
+        },
+        {
+            'target_name': 'text_encoding_test_deploy',
+            'type': 'none',
+            'dependencies': ['text_encoding_test',],
+            'variables': {
+                'executable_name': 'text_encoding_test',
+            },
+            'includes': ['<(DEPTH)/starboard/build/deploy.gypi'],
+        },
+    ]
+}
diff --git a/src/cobalt/encoding/text_decode_options.idl b/src/cobalt/encoding/text_decode_options.idl
new file mode 100644
index 0000000..3767e36
--- /dev/null
+++ b/src/cobalt/encoding/text_decode_options.idl
@@ -0,0 +1,17 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://encoding.spec.whatwg.org/#textdecodeoptions
+
+dictionary TextDecodeOptions { boolean stream = false; };
diff --git a/src/cobalt/encoding/text_decoder.cc b/src/cobalt/encoding/text_decoder.cc
new file mode 100644
index 0000000..010262b
--- /dev/null
+++ b/src/cobalt/encoding/text_decoder.cc
@@ -0,0 +1,143 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <algorithm>
+#include <cctype>
+
+#include "cobalt/encoding/text_decoder.h"
+
+#include "third_party/icu/source/common/unicode/ucnv.h"
+#include "third_party/icu/source/common/unicode/unistr.h"
+#include "third_party/icu/source/common/unicode/utypes.h"
+
+namespace cobalt {
+namespace encoding {
+
+// static
+const char TextDecoder::kDefaultEncoding[] = "utf-8";
+const char TextDecoder::kReplacementEncoding[] = "REPLACEMENT";
+
+namespace {
+std::string to_lower(std::string str) {
+  std::transform(str.begin(), str.end(), str.begin(),
+                 [](unsigned char c) { return std::tolower(c); });
+  return str;
+}
+}  // namespace
+
+void TextDecoder::Cleanup() {
+  encoding_.clear();
+  if (converter_) {
+    ucnv_close(converter_);
+  }
+  converter_ = nullptr;
+}
+
+void TextDecoder::Setup(std::string label,
+                        script::ExceptionState* exception_state) {
+  UErrorCode error_code = U_ZERO_ERROR;
+  converter_ = ucnv_open(label.c_str(), &error_code);
+  if (U_FAILURE(error_code)) {
+    LOG(ERROR) << "Unable to open icu converter. " << u_errorName(error_code);
+    exception_state->SetSimpleException(script::kRangeError,
+                                        "label %s is invalid", label.c_str());
+    return Cleanup();
+  }
+
+  error_code = U_ZERO_ERROR;
+  encoding_ = ucnv_getName(converter_, &error_code);
+  if (U_FAILURE(error_code) || encoding_ == kReplacementEncoding) {
+    LOG(ERROR) << "Unable to get encoding name. " << u_errorName(error_code);
+    exception_state->SetSimpleException(
+        script::kRangeError, "label %s is invalid", encoding_.c_str());
+    return Cleanup();
+  }
+
+  // Encoding’s name, lowercased.
+  encoding_ = to_lower(encoding_);
+}
+
+TextDecoder::TextDecoder(script::ExceptionState* exception_state)
+    : converter_(nullptr) {
+  Setup(kDefaultEncoding, exception_state);
+}
+
+TextDecoder::TextDecoder(const std::string& label,
+                         script::ExceptionState* exception_state)
+    : converter_(nullptr) {
+  Setup(label, exception_state);
+}
+
+TextDecoder::TextDecoder(const TextDecoderOptions& options,
+                         script::ExceptionState* exception_state)
+    : fatal_(options.fatal()), ignore_bom_(options.ignore_bom()) {
+  Setup(kDefaultEncoding, exception_state);
+}
+
+TextDecoder::TextDecoder(const std::string& label,
+                         const TextDecoderOptions& options,
+                         script::ExceptionState* exception_state)
+    : fatal_(options.fatal()), ignore_bom_(options.ignore_bom()) {
+  Setup(label, exception_state);
+}
+
+TextDecoder::~TextDecoder() { Cleanup(); }
+
+std::string TextDecoder::Decode(script::ExceptionState* exception_state) {
+  // TODO: Figure out how to remove this use case or implement its support.
+  NOTIMPLEMENTED();
+  return "";
+}
+std::string TextDecoder::Decode(const dom::BufferSource& input,
+                                script::ExceptionState* exception_state) {
+  // Use the default options here.
+  const TextDecodeOptions default_options;
+  return Decode(input, default_options, exception_state);
+}
+std::string TextDecoder::Decode(const TextDecodeOptions& options,
+                                script::ExceptionState* exception_state) {
+  // TODO: Figure out how to remove this use case or implement its support.
+  // Note: This is a valid case, for example when using the stream option.
+  NOTIMPLEMENTED();
+  return "";
+}
+std::string TextDecoder::Decode(const dom::BufferSource& input,
+                                const TextDecodeOptions& options,
+                                script::ExceptionState* exception_state) {
+  int32_t size;
+  const uint8* buffer;
+  std::string result;
+  UErrorCode error_code = U_ZERO_ERROR;
+  dom::GetBufferAndSize(input, &buffer, &size);
+
+  if (converter_) {
+    icu::UnicodeString unicode_input(reinterpret_cast<const char*>(buffer),
+                                     size, converter_, error_code);
+    if (U_FAILURE(error_code)) {
+      LOG(ERROR) << "Error decoding " << u_errorName(error_code);
+      exception_state->SetSimpleException(script::kRangeError,
+                                          "Error processing the data");
+      return result;
+    }
+    unicode_input.toUTF8String(result);
+  } else {
+    LOG(ERROR) << "No converter available";
+    exception_state->SetSimpleException(script::kRangeError,
+                                        "No converter available");
+  }
+  return result;
+}
+
+}  // namespace encoding
+}  // namespace cobalt
diff --git a/src/cobalt/encoding/text_decoder.h b/src/cobalt/encoding/text_decoder.h
new file mode 100644
index 0000000..7034a6b
--- /dev/null
+++ b/src/cobalt/encoding/text_decoder.h
@@ -0,0 +1,73 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_ENCODING_TEXT_DECODER_H_
+#define COBALT_ENCODING_TEXT_DECODER_H_
+
+#include <string>
+
+#include "cobalt/dom/buffer_source.h"
+#include "cobalt/encoding/text_decode_options.h"
+#include "cobalt/encoding/text_decoder_options.h"
+#include "cobalt/script/wrappable.h"
+
+typedef struct UConverter UConverter;
+
+namespace cobalt {
+namespace encoding {
+
+class TextDecoder : public script::Wrappable {
+ public:
+  explicit TextDecoder(script::ExceptionState*);
+  explicit TextDecoder(const std::string& label, script::ExceptionState*);
+  explicit TextDecoder(const TextDecoderOptions& options,
+                       script::ExceptionState*);
+  TextDecoder(const std::string& label, const TextDecoderOptions& options,
+              script::ExceptionState*);
+
+  ~TextDecoder() override;
+
+  std::string encoding() const { return encoding_; }
+  bool fatal() const { return fatal_; }
+  bool ignore_bom() const { return ignore_bom_; }
+
+  std::string Decode(script::ExceptionState*);
+  std::string Decode(const dom::BufferSource&, script::ExceptionState*);
+  std::string Decode(const TextDecodeOptions&, script::ExceptionState*);
+  std::string Decode(const dom::BufferSource&, const TextDecodeOptions&,
+                     script::ExceptionState*);
+
+  DEFINE_WRAPPABLE_TYPE(TextDecoder);
+
+ private:
+  // Web API standard.
+  std::string encoding_;
+  bool fatal_;
+  bool ignore_bom_;
+
+  UConverter* converter_;
+
+  static const char kDefaultEncoding[];
+  static const char kReplacementEncoding[];
+
+  // Common code for constructors.
+  void Setup(std::string, script::ExceptionState*);
+  void Cleanup();
+
+  DISALLOW_COPY_AND_ASSIGN(TextDecoder);
+};
+
+}  // namespace encoding
+}  // namespace cobalt
+#endif  // COBALT_ENCODING_TEXT_DECODER_H_
diff --git a/src/cobalt/encoding/text_decoder.idl b/src/cobalt/encoding/text_decoder.idl
new file mode 100644
index 0000000..bd3d753
--- /dev/null
+++ b/src/cobalt/encoding/text_decoder.idl
@@ -0,0 +1,29 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://encoding.spec.whatwg.org/#interface-textdecoder
+
+[
+  Constructor(optional DOMString label = "utf-8",
+              optional TextDecoderOptions options),
+  RaisesException = Constructor
+] interface TextDecoder {
+  // https://encoding.spec.whatwg.org/#textdecodercommon
+  readonly attribute DOMString encoding;
+  readonly attribute boolean fatal;
+  readonly attribute boolean ignoreBOM;
+
+  [RaisesException] USVString decode(optional BufferSource input,
+                                     optional TextDecodeOptions options);
+};
diff --git a/src/cobalt/encoding/text_decoder_options.idl b/src/cobalt/encoding/text_decoder_options.idl
new file mode 100644
index 0000000..0c18e46
--- /dev/null
+++ b/src/cobalt/encoding/text_decoder_options.idl
@@ -0,0 +1,20 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://encoding.spec.whatwg.org/#textdecoderoptions
+
+dictionary TextDecoderOptions {
+  boolean fatal = false;
+  boolean ignoreBOM = false;
+};
diff --git a/src/cobalt/encoding/text_decoder_test.cc b/src/cobalt/encoding/text_decoder_test.cc
new file mode 100644
index 0000000..1480f3b
--- /dev/null
+++ b/src/cobalt/encoding/text_decoder_test.cc
@@ -0,0 +1,165 @@
+
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+#include "cobalt/encoding/text_decoder.h"
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "cobalt/base/polymorphic_downcast.h"
+#include "cobalt/dom/testing/stub_window.h"
+#include "cobalt/script/array_buffer_view.h"
+#include "cobalt/script/environment_settings.h"
+#include "cobalt/script/testing/mock_exception_state.h"
+#include "cobalt/script/typed_arrays.h"
+
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+
+namespace cobalt {
+namespace encoding {
+namespace {
+
+//////////////////////////////////////////////////////////////////////////
+// TextDecoderTest
+//////////////////////////////////////////////////////////////////////////
+
+class TextDecoderTest : public ::testing::Test {
+ protected:
+  TextDecoderTest();
+  ~TextDecoderTest();
+
+  cobalt::dom::testing::StubWindow stub_window_;
+  script::testing::MockExceptionState exception_state_;
+};
+
+TextDecoderTest::TextDecoderTest() {}
+TextDecoderTest::~TextDecoderTest() {}
+
+//////////////////////////////////////////////////////////////////////////
+// Test cases
+//////////////////////////////////////////////////////////////////////////
+
+TEST_F(TextDecoderTest, Constructors) {
+  scoped_refptr<TextDecoder> text_decoder;
+
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  text_decoder = new TextDecoder(&exception_state_);
+  EXPECT_EQ("utf-8", text_decoder->encoding());
+  text_decoder.reset();
+
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  text_decoder = new TextDecoder("utf-16", &exception_state_);
+  // It seems default is little endian.
+  EXPECT_EQ("utf-16le", text_decoder->encoding());
+  text_decoder.reset();
+
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  text_decoder = new TextDecoder("utf-16be", &exception_state_);
+  EXPECT_EQ("utf-16be", text_decoder->encoding());
+  text_decoder.reset();
+
+  EXPECT_CALL(exception_state_,
+              SetSimpleExceptionVA(script::kRangeError, _, _));
+  text_decoder = new TextDecoder("foo-encoding", &exception_state_);
+  EXPECT_EQ("", text_decoder->encoding());
+  text_decoder.reset();
+}
+
+TEST_F(TextDecoderTest, DecodeUTF8) {
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  scoped_refptr<TextDecoder> text_decoder_ = new TextDecoder(&exception_state_);
+  std::vector<std::pair<std::vector<uint8>, std::string>> tests = {
+      {{72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33},
+       "Hello world!"},
+      {{72,  101, 106, 33, 32, 208, 159, 209, 128, 208, 184, 208, 178, 208,
+        181, 209, 130, 33, 32, 228, 189, 160, 229, 165, 189, 239, 188, 129},
+       "Hej! Привет! 你好!"},
+      {{208, 148, 208, 176, 041}, "Да!"},
+  };
+
+  for (const auto &test : tests) {
+    const std::string &expected = test.second;
+    const std::vector<uint8> &raw_data = test.first;
+    script::Handle<script::ArrayBuffer> array_buffer = script::ArrayBuffer::New(
+        stub_window_.global_environment(), raw_data.data(), raw_data.size());
+    EXPECT_CALL(exception_state_,
+                SetSimpleExceptionVA(script::kRangeError, _, _))
+        .Times(0);
+    std::string result = text_decoder_->Decode(dom::BufferSource(array_buffer),
+                                               &exception_state_);
+    EXPECT_EQ(result, expected);
+  }
+}
+
+TEST_F(TextDecoderTest, DecodeUTF16) {
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  scoped_refptr<TextDecoder> text_decoder_ =
+      new TextDecoder("utf-16", &exception_state_);
+  std::vector<std::pair<std::vector<uint8>, std::string>> tests = {
+      {{0x14, 0x04, 0x30, 0x04, 0x21, 0x00}, "Да!"},
+  };
+
+  for (const auto &test : tests) {
+    const std::string &expected = test.second;
+    const std::vector<uint8> &raw_data = test.first;
+    script::Handle<script::ArrayBuffer> array_buffer = script::ArrayBuffer::New(
+        stub_window_.global_environment(), raw_data.data(), raw_data.size());
+    EXPECT_CALL(exception_state_,
+                SetSimpleExceptionVA(script::kRangeError, _, _))
+        .Times(0);
+    std::string result = text_decoder_->Decode(dom::BufferSource(array_buffer),
+                                               &exception_state_);
+    LOG(WARNING) << result << "\n";
+    EXPECT_EQ(result, expected);
+  }
+}
+
+TEST_F(TextDecoderTest, DecodeUTF16BE) {
+  EXPECT_CALL(exception_state_, SetSimpleExceptionVA(script::kRangeError, _, _))
+      .Times(0);
+  scoped_refptr<TextDecoder> text_decoder_ =
+      new TextDecoder("utf-16be", &exception_state_);
+  std::vector<std::pair<std::vector<uint8>, std::string>> tests = {
+      {{0x04, 0x14, 0x04, 0x30, 0x00, 0x21}, "Да!"},
+  };
+
+  for (const auto &test : tests) {
+    const std::string &expected = test.second;
+    const std::vector<uint8> &raw_data = test.first;
+    script::Handle<script::ArrayBuffer> array_buffer = script::ArrayBuffer::New(
+        stub_window_.global_environment(), raw_data.data(), raw_data.size());
+    EXPECT_CALL(exception_state_,
+                SetSimpleExceptionVA(script::kRangeError, _, _))
+        .Times(0);
+    std::string result = text_decoder_->Decode(dom::BufferSource(array_buffer),
+                                               &exception_state_);
+    LOG(WARNING) << result << "\n";
+    EXPECT_EQ(result, expected);
+  }
+}
+
+}  // namespace
+}  // namespace encoding
+}  // namespace cobalt
diff --git a/src/cobalt/encoding/text_encoder.cc b/src/cobalt/encoding/text_encoder.cc
new file mode 100644
index 0000000..eefdca8
--- /dev/null
+++ b/src/cobalt/encoding/text_encoder.cc
@@ -0,0 +1,54 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/encoding/text_encoder.h"
+
+#include <string>
+
+#include "cobalt/base/polymorphic_downcast.h"
+#include "cobalt/dom/dom_settings.h"
+
+namespace cobalt {
+namespace encoding {
+// static
+const char TextEncoder::KEncoding[] = "utf-8";
+
+
+TextEncoder::TextEncoder() {}
+TextEncoder::~TextEncoder() {}
+
+script::Handle<script::Uint8Array> TextEncoder::Encode(
+    script::EnvironmentSettings* settings) {
+  return Encode(settings, std::string());
+}
+
+script::Handle<script::Uint8Array> TextEncoder::Encode(
+    script::EnvironmentSettings* settings, const std::string& input) {
+  dom::DOMSettings* dom_settings =
+      base::polymorphic_downcast<dom::DOMSettings*>(settings);
+  return script::Uint8Array::New(dom_settings->global_environment(),
+                                 input.data(), input.size());
+}
+
+TextEncoderEncodeIntoResult TextEncoder::EncodeInto(
+    script::EnvironmentSettings* settings, const std::string& input,
+    const script::Handle<script::Uint8Array>& destination) {
+  // TODO: Figure out how to append bytes to an existing buffer.
+  NOTIMPLEMENTED();
+  return TextEncoderEncodeIntoResult();
+}
+
+
+}  // namespace encoding
+}  // namespace cobalt
diff --git a/src/cobalt/encoding/text_encoder.h b/src/cobalt/encoding/text_encoder.h
new file mode 100644
index 0000000..b95af53
--- /dev/null
+++ b/src/cobalt/encoding/text_encoder.h
@@ -0,0 +1,52 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_ENCODING_TEXT_ENCODER_H_
+#define COBALT_ENCODING_TEXT_ENCODER_H_
+
+#include <string>
+
+#include "cobalt/encoding/text_encoder_encode_into_result.h"
+#include "cobalt/script/environment_settings.h"
+#include "cobalt/script/typed_arrays.h"
+#include "cobalt/script/wrappable.h"
+
+namespace cobalt {
+namespace encoding {
+
+class TextEncoder : public script::Wrappable {
+ public:
+  TextEncoder();
+  ~TextEncoder();
+
+  std::string encoding() { return KEncoding; }
+
+  script::Handle<script::Uint8Array> Encode(script::EnvironmentSettings *);
+  script::Handle<script::Uint8Array> Encode(script::EnvironmentSettings *,
+                                            const std::string &);
+  TextEncoderEncodeIntoResult EncodeInto(
+      script::EnvironmentSettings *, const std::string &,
+      const script::Handle<script::Uint8Array> &);
+
+  DEFINE_WRAPPABLE_TYPE(TextEncoder);
+
+ private:
+  static const char KEncoding[];
+
+  DISALLOW_COPY_AND_ASSIGN(TextEncoder);
+};
+
+}  // namespace encoding
+}  // namespace cobalt
+#endif  // COBALT_ENCODING_TEXT_ENCODER_H_
diff --git a/src/cobalt/encoding/text_encoder.idl b/src/cobalt/encoding/text_encoder.idl
new file mode 100644
index 0000000..7db0f72
--- /dev/null
+++ b/src/cobalt/encoding/text_encoder.idl
@@ -0,0 +1,25 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://encoding.spec.whatwg.org/#interface-textencoder
+
+[Constructor()] interface TextEncoder {
+  // https://encoding.spec.whatwg.org/#textencodercommon
+  readonly attribute DOMString encoding;
+
+  [CallWith = EnvironmentSettings] Uint8Array encode(optional USVString input =
+                                                         "");
+  [CallWith = EnvironmentSettings] TextEncoderEncodeIntoResult encodeInto(
+      USVString source, Uint8Array destination);
+};
diff --git a/src/cobalt/encoding/text_encoder_encode_into_result.idl b/src/cobalt/encoding/text_encoder_encode_into_result.idl
new file mode 100644
index 0000000..1f26a52
--- /dev/null
+++ b/src/cobalt/encoding/text_encoder_encode_into_result.idl
@@ -0,0 +1,20 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// https://encoding.spec.whatwg.org/#dictdef-textencoderencodeintoresult
+
+dictionary TextEncoderEncodeIntoResult {
+  unsigned long long read;
+  unsigned long long written;
+};
diff --git a/src/cobalt/encoding/text_encoder_test.cc b/src/cobalt/encoding/text_encoder_test.cc
new file mode 100644
index 0000000..42ce85a
--- /dev/null
+++ b/src/cobalt/encoding/text_encoder_test.cc
@@ -0,0 +1,86 @@
+
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/encoding/text_encoder.h"
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "cobalt/dom/testing/stub_window.h"
+#include "cobalt/script/array_buffer_view.h"
+#include "cobalt/script/environment_settings.h"
+#include "cobalt/script/typed_arrays.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+
+namespace cobalt {
+namespace encoding {
+namespace {
+
+//////////////////////////////////////////////////////////////////////////
+// TextEncoderTest
+//////////////////////////////////////////////////////////////////////////
+
+class TextEncoderTest : public ::testing::Test {
+ protected:
+  TextEncoderTest();
+  ~TextEncoderTest();
+
+  cobalt::dom::testing::StubWindow stub_window_;
+  scoped_refptr<TextEncoder> text_encoder_;
+};
+
+TextEncoderTest::TextEncoderTest() : text_encoder_(new TextEncoder()) {}
+
+TextEncoderTest::~TextEncoderTest() {}
+
+//////////////////////////////////////////////////////////////////////////
+// Test cases
+//////////////////////////////////////////////////////////////////////////
+
+TEST_F(TextEncoderTest, Constructor) {
+  EXPECT_EQ("utf-8", text_encoder_->encoding());
+}
+
+TEST_F(TextEncoderTest, Encode) {
+  std::vector<std::pair<std::string, std::vector<uint8>>> tests = {
+      {"Hello world!",
+       {72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33}},
+      {"Hej! Привет! 你好!",
+       {72,  101, 106, 33, 32, 208, 159, 209, 128, 208, 184, 208, 178, 208,
+        181, 209, 130, 33, 32, 228, 189, 160, 229, 165, 189, 239, 188, 129}},
+      {"Да!", {208, 148, 208, 176, 041}},
+  };
+
+  for (const auto &test : tests) {
+    auto &expected = test.second;
+
+    script::Handle<script::Uint8Array> result =
+        text_encoder_->Encode(stub_window_.environment_settings(), test.first);
+    auto *array_data = static_cast<uint8 *>(result->RawData());
+
+    // Compare the result against the expectations.
+    ASSERT_EQ(result->Length(), expected.size());
+    for (uint32 i = 0; i < result->Length(); ++i) {
+      EXPECT_EQ(array_data[i], expected[i]);
+    }
+  }
+}
+
+}  // namespace
+}  // namespace encoding
+}  // namespace cobalt
diff --git a/src/cobalt/extension/extension_test.cc b/src/cobalt/extension/extension_test.cc
index 2eb07cf..81ca611 100644
--- a/src/cobalt/extension/extension_test.cc
+++ b/src/cobalt/extension/extension_test.cc
@@ -23,6 +23,7 @@
 #include "cobalt/extension/javascript_cache.h"
 #include "cobalt/extension/media_session.h"
 #include "cobalt/extension/platform_service.h"
+#include "cobalt/extension/url_fetcher_observer.h"
 #include "starboard/system.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
@@ -284,5 +285,27 @@
       << "Extension struct should be a singleton";
 }
 
+TEST(ExtensionTest, UrlFetcherObserver) {
+  typedef CobaltExtensionUrlFetcherObserverApi ExtensionApi;
+  const char* kExtensionName = kCobaltExtensionUrlFetcherObserverName;
+
+  const ExtensionApi* extension_api =
+      static_cast<const ExtensionApi*>(SbSystemGetExtension(kExtensionName));
+  if (!extension_api) {
+    return;
+  }
+
+  EXPECT_STREQ(extension_api->name, kExtensionName);
+  EXPECT_EQ(extension_api->version, 1u);
+  EXPECT_NE(extension_api->FetcherCreated, nullptr);
+  EXPECT_NE(extension_api->FetcherDestroyed, nullptr);
+  EXPECT_NE(extension_api->StartURLRequest, nullptr);
+
+  const ExtensionApi* second_extension_api =
+      static_cast<const ExtensionApi*>(SbSystemGetExtension(kExtensionName));
+  EXPECT_EQ(second_extension_api, extension_api)
+      << "Extension struct should be a singleton";
+}
+
 }  // namespace extension
 }  // namespace cobalt
diff --git a/src/cobalt/extension/url_fetcher_observer.h b/src/cobalt/extension/url_fetcher_observer.h
new file mode 100644
index 0000000..7795689
--- /dev/null
+++ b/src/cobalt/extension/url_fetcher_observer.h
@@ -0,0 +1,51 @@
+// Copyright 2021 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_EXTENSION_URL_FETCHER_OBSERVER_H_
+#define COBALT_EXTENSION_URL_FETCHER_OBSERVER_H_
+
+#include <stdint.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define kCobaltExtensionUrlFetcherObserverName \
+  "dev.cobalt.extension.UrlFetcherObserver"
+
+typedef struct CobaltExtensionUrlFetcherObserverApi {
+  // Name should be the string |kCobaltExtensionUrlFetcherObserverName|.
+  // This helps to validate that the extension API is correct.
+  const char* name;
+
+  // This specifies the version of the API that is implemented.
+  uint32_t version;
+
+  // The fields below this point were added in version 1 or later.
+
+  // The UrlFetcher for the specified |url| was created.
+  void (*FetcherCreated)(const char* url);
+
+  // The UrlFetcher for the specified |url| was destroyed.
+  void (*FetcherDestroyed)(const char* url);
+
+  // The URL request started for the specified |url|.
+  void (*StartURLRequest)(const char* url);
+} CobaltExtensionUrlFetcherObserverApi;
+
+#ifdef __cplusplus
+}  // extern "C"
+#endif
+
+#endif  // COBALT_EXTENSION_URL_FETCHER_OBSERVER_H_
diff --git a/src/cobalt/layout_tests/layout_tests.cc b/src/cobalt/layout_tests/layout_tests.cc
index 17804c8..028de65 100644
--- a/src/cobalt/layout_tests/layout_tests.cc
+++ b/src/cobalt/layout_tests/layout_tests.cc
@@ -328,6 +328,10 @@
     CSSOMViewLayoutTests, Layout,
     ::testing::ValuesIn(EnumerateLayoutTests("cssom-view")),
     GetTestName());
+// Custom DOM (https://dom.spec.whatwg.org/) test cases.
+INSTANTIATE_TEST_CASE_P(DOMLayoutTests, Layout,
+                        ::testing::ValuesIn(EnumerateLayoutTests("dom")),
+                        GetTestName());
 // "dir" attribute tests.
 // https://html.spec.whatwg.org/multipage/dom.html#the-dir-attribute
 INSTANTIATE_TEST_CASE_P(
diff --git a/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible-expected.png b/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible-expected.png
new file mode 100644
index 0000000..c856572
--- /dev/null
+++ b/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible-expected.png
Binary files differ
diff --git a/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible.html b/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible.html
new file mode 100644
index 0000000..602365f
--- /dev/null
+++ b/src/cobalt/layout_tests/testdata/dom/2-7-event-target-should-be-constructible.html
@@ -0,0 +1,23 @@
+<!DOCTYPE html>
+<!--
+ | The EventTarget() constructor, when invoked, must return a new EventTarget.
+ |   https://dom.spec.whatwg.org/#dom-eventtarget-eventtarget
+ -->
+<html>
+<head>
+  <style>
+    body {
+      background-color: red;
+    }
+    body.event-target-is-constructible {
+      background-color: green;
+    }
+  </style>
+</head>
+<body>
+  <script>
+    new EventTarget();
+    document.body.classList.add('event-target-is-constructible');
+  </script>
+</body>
+</html>
diff --git a/src/cobalt/layout_tests/testdata/dom/layout_tests.txt b/src/cobalt/layout_tests/testdata/dom/layout_tests.txt
new file mode 100644
index 0000000..b7843f1
--- /dev/null
+++ b/src/cobalt/layout_tests/testdata/dom/layout_tests.txt
@@ -0,0 +1 @@
+2-7-event-target-should-be-constructible
diff --git a/src/cobalt/layout_tests/testdata/web-platform-tests/dom/web_platform_tests.txt b/src/cobalt/layout_tests/testdata/web-platform-tests/dom/web_platform_tests.txt
index 1682272..3633261 100644
--- a/src/cobalt/layout_tests/testdata/web-platform-tests/dom/web_platform_tests.txt
+++ b/src/cobalt/layout_tests/testdata/web-platform-tests/dom/web_platform_tests.txt
@@ -4,7 +4,7 @@
 # MutationObserver tests
 
 # Disabled, until test driver executes fully without Javascript errors
-nodes/MutationObserver-attributes.html,DISABLE
+nodes/MutationObserver-attributes.html,FAIL
 # These fail because Cobalt doesn't support setAttributeNS.
 # nodes/MutationObserver-attributes.html,FAIL,attributes Element.setAttributeNS: creation mutation
 # nodes/MutationObserver-attributes.html,FAIL,attributes Element.setAttributeNS: prefixed attribute creation mutation
@@ -13,13 +13,13 @@
 # nodes/MutationObserver-attributes.html,FAIL,attributes Element.attributes.value: update mutation
 # Input element is not supported in Cobalt
 # nodes/MutationObserver-attributes.html,FAIL,attributes HTMLInputElement.type: type update mutation
-# Cobalt doesn't implement DomTokenList.toggle
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: token removal mutation
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: token addition mutation
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: forced token removal mutation
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: forced missing token removal no mutation
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: forced existing token addition no mutation
-# nodes/MutationObserver-attributes.html,FAIL,attributes Element.classList.toggle: forced token addition mutation
+
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: token removal mutation
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: token addition mutation
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: forced token removal mutation
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: forced missing token removal no mutation
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: forced existing token addition no mutation
+nodes/MutationObserver-attributes.html,PASS,attributes Element.classList.toggle: forced token addition mutation
 
 # Most of these tests use operations such as insertData, replaceData, etc.,
 # which Cobalt doesn't support.
@@ -47,3 +47,8 @@
 nodes/MutationObserver-inner-outer.html,FAIL
 
 nodes/MutationObserver-takeRecords.html,PASS
+
+
+# Events tests
+events/EventTarget-constructible.any.html,FAIL
+events/EventTarget-constructible.any.html,PASS,A constructed EventTarget can be used as expected
diff --git a/src/cobalt/layout_tests/testdata/web-platform-tests/encoding/web_platform_tests.txt b/src/cobalt/layout_tests/testdata/web-platform-tests/encoding/web_platform_tests.txt
new file mode 100644
index 0000000..c513671
--- /dev/null
+++ b/src/cobalt/layout_tests/testdata/web-platform-tests/encoding/web_platform_tests.txt
@@ -0,0 +1,22 @@
+# Encoding API Tests
+
+api-basics.html,FAIL
+api-invalid-label.html,FAIL
+api-replacement-encodings.html,FAIL
+api-surrogates-utf8.html,FAIL
+big5-encoder.html,FAIL
+gb18030-encoder.html,FAIL
+gbk-encoder.html,FAIL
+idlharness.html,DISABLE
+iso-2022-jp-decoder.html,DISABLE
+iso-2022-jp-encoder.html,DISABLE
+single-byte-decoder.html,DISABLE
+textdecoder-byte-order-marks.html,FAIL
+textdecoder-fatal.html,FAIL
+textdecoder-fatal-streaming.html,FAIL
+textdecoder-ignorebom.html,FAIL
+textdecoder-labels.html,FAIL
+textdecoder-streaming.html,FAIL
+textdecoder-utf16-surrogates.html,FAIL
+textencoder-constructor-non-utf.html,FAIL
+textencoder-utf16-surrogates.html,FAIL
diff --git a/src/cobalt/layout_tests/web_platform_tests.cc b/src/cobalt/layout_tests/web_platform_tests.cc
index f0d6f54..1362570 100644
--- a/src/cobalt/layout_tests/web_platform_tests.cc
+++ b/src/cobalt/layout_tests/web_platform_tests.cc
@@ -87,11 +87,7 @@
   kNotrun,
 };
 
-enum TestsStatus {
-  kTestsOk = 0,
-  kTestsError,
-  kTestsTimeout
-};
+enum TestsStatus { kTestsOk = 0, kTestsError, kTestsTimeout };
 
 std::string TestStatusToString(int status) {
   switch (status) {
@@ -244,8 +240,8 @@
   // Expect that parsing test result succeeded.
   EXPECT_EQ(base::JSONReader::JSON_NO_ERROR, reader.error_code());
   if (!root) {
-    // Unparseable JSON, or empty string.
-    LOG(ERROR) << "Web Platform Tests returned unparseable JSON test result!";
+    // Unparsable JSON, or empty string.
+    LOG(ERROR) << "Web Platform Tests returned unparsable JSON test result!";
     return harness_result;
   }
 
@@ -438,11 +434,12 @@
                             "streams", "'ReadableStream' in this")),
                         GetTestName());
 
-INSTANTIATE_TEST_CASE_P(webidl, WebPlatformTest,
-    ::testing::ValuesIn(EnumerateWebPlatformTests("WebIDL")),
-    GetTestName());
+INSTANTIATE_TEST_CASE_P(
+    webidl, WebPlatformTest,
+    ::testing::ValuesIn(EnumerateWebPlatformTests("WebIDL")), GetTestName());
 
-INSTANTIATE_TEST_CASE_P(websockets, WebPlatformTest,
+INSTANTIATE_TEST_CASE_P(
+    websockets, WebPlatformTest,
     ::testing::ValuesIn(EnumerateWebPlatformTests("websockets")),
     GetTestName());
 
@@ -451,6 +448,10 @@
     ::testing::ValuesIn(EnumerateWebPlatformTests("WebCryptoAPI")),
     GetTestName());
 
+INSTANTIATE_TEST_CASE_P(
+    encoding, WebPlatformTest,
+    ::testing::ValuesIn(EnumerateWebPlatformTests("encoding")), GetTestName());
+
 #endif  // !defined(COBALT_WIN)
 
 }  // namespace layout_tests
diff --git a/src/cobalt/loader/fetcher.h b/src/cobalt/loader/fetcher.h
index f4e448f..02a8422 100644
--- a/src/cobalt/loader/fetcher.h
+++ b/src/cobalt/loader/fetcher.h
@@ -22,6 +22,7 @@
 #include "cobalt/dom/url_utils.h"
 #include "cobalt/loader/loader_types.h"
 #include "net/http/http_response_headers.h"
+#include "net/base/load_timing_info.h"
 #include "url/gurl.h"
 
 namespace cobalt {
@@ -63,9 +64,23 @@
       OnReceived(fetcher, data->data(), data->length());
     }
 
+    virtual void SetLoadTimingInfo(
+        const net::LoadTimingInfo& timing_info) {
+      if (!load_timing_info_callback_.is_null()) {
+        load_timing_info_callback_.Run(timing_info);
+      }
+    }
+
+    virtual void SetLoadTimingInfoCallback(
+        const base::Callback<void(const net::LoadTimingInfo&)>& callback) {
+      load_timing_info_callback_ = callback;
+    }
+
    protected:
     Handler() {}
     virtual ~Handler() {}
+    base::Callback<void(const net::LoadTimingInfo&)>
+        load_timing_info_callback_;
 
    private:
     DISALLOW_COPY_AND_ASSIGN(Handler);
@@ -84,6 +99,11 @@
   // transient, indicating that the same fetch may later succeed.
   virtual bool did_fail_from_transient_error() const { return false; }
 
+  virtual void SetLoadTimingInfoCallback(
+      const base::Callback<void(const net::LoadTimingInfo&)>& callback) {
+    handler_->SetLoadTimingInfoCallback(callback);
+  }
+
  protected:
   Handler* handler() const { return handler_; }
 
diff --git a/src/cobalt/loader/loader.cc b/src/cobalt/loader/loader.cc
index 1532a74..f093eda 100644
--- a/src/cobalt/loader/loader.cc
+++ b/src/cobalt/loader/loader.cc
@@ -158,6 +158,11 @@
       base::Bind(&Loader::LoadComplete, base::Unretained(this))));
   fetcher_ = fetcher_creator_.Run(fetcher_handler_to_decoder_adaptor_.get());
 
+  if (fetcher_) {
+    fetcher_->SetLoadTimingInfoCallback(base::Bind(&Loader::set_load_timing_info,
+                                                   base::Unretained(this)));
+  }
+
   // Post the error callback on the current message loop in case the loader is
   // destroyed in the callback.
   if (!fetcher_) {
@@ -169,5 +174,13 @@
   }
 }
 
+void Loader::set_load_timing_info(const net::LoadTimingInfo& timing_info) {
+  load_timing_info_ = timing_info;
+}
+
+net::LoadTimingInfo Loader::get_load_timing_info() {
+  return load_timing_info_;
+}
+
 }  // namespace loader
 }  // namespace cobalt
diff --git a/src/cobalt/loader/loader.h b/src/cobalt/loader/loader.h
index d474084..e7796fd 100644
--- a/src/cobalt/loader/loader.h
+++ b/src/cobalt/loader/loader.h
@@ -66,6 +66,9 @@
 
   void LoadComplete(const base::Optional<std::string>& status);
 
+  net::LoadTimingInfo get_load_timing_info();
+  void set_load_timing_info(const net::LoadTimingInfo& timing_info);
+
  private:
   class FetcherHandlerToDecoderAdapter;
 
@@ -89,6 +92,8 @@
   bool is_suspended_;
   bool is_load_complete_ = false;
 
+  net::LoadTimingInfo load_timing_info_;
+
   DISALLOW_COPY_AND_ASSIGN(Loader);
 };
 
diff --git a/src/cobalt/loader/net_fetcher.cc b/src/cobalt/loader/net_fetcher.cc
index c74394f..c4adfef 100644
--- a/src/cobalt/loader/net_fetcher.cc
+++ b/src/cobalt/loader/net_fetcher.cc
@@ -224,6 +224,11 @@
   }
 }
 
+void NetFetcher::ReportLoadTimingInfo(
+    const net::LoadTimingInfo& timing_info) {
+  handler()->SetLoadTimingInfo(timing_info);
+}
+
 NetFetcher::~NetFetcher() {
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
   start_callback_.Cancel();
diff --git a/src/cobalt/loader/net_fetcher.h b/src/cobalt/loader/net_fetcher.h
index a7ce094..9093173 100644
--- a/src/cobalt/loader/net_fetcher.h
+++ b/src/cobalt/loader/net_fetcher.h
@@ -54,6 +54,7 @@
   void OnURLFetchDownloadProgress(const net::URLFetcher* source,
                                   int64_t current, int64_t total,
                                   int64_t current_network_bytes) override;
+  void ReportLoadTimingInfo(const net::LoadTimingInfo& timing_info) override;
 
   net::URLFetcher* url_fetcher() const {
     DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
diff --git a/src/cobalt/loader/resource_cache.cc b/src/cobalt/loader/resource_cache.cc
index 23346db..3651481 100644
--- a/src/cobalt/loader/resource_cache.cc
+++ b/src/cobalt/loader/resource_cache.cc
@@ -25,7 +25,8 @@
 
 CachedResourceBase::OnLoadedCallbackHandler::OnLoadedCallbackHandler(
     const scoped_refptr<CachedResourceBase>& cached_resource,
-    const base::Closure& success_callback, const base::Closure& error_callback)
+    const base::Closure& success_callback,
+    const base::Closure& error_callback)
     : cached_resource_(cached_resource),
       success_callback_(success_callback),
       error_callback_(error_callback) {
@@ -45,6 +46,11 @@
   }
 }
 
+net::LoadTimingInfo
+    CachedResourceBase::OnLoadedCallbackHandler::GetLoadTimingInfo() {
+  return cached_resource_->GetLoadTimingInfo();
+}
+
 CachedResourceBase::OnLoadedCallbackHandler::~OnLoadedCallbackHandler() {
   if (!success_callback_.is_null()) {
     cached_resource_->RemoveCallback(kOnLoadingSuccessCallbackType,
@@ -134,6 +140,8 @@
     const base::Optional<std::string>& error) {
   DCHECK_CALLED_ON_VALID_THREAD(cached_resource_thread_checker_);
 
+  load_timing_info_ = loader_->get_load_timing_info();
+
   // Success
   if (!error) {
     loader_.reset();
diff --git a/src/cobalt/loader/resource_cache.h b/src/cobalt/loader/resource_cache.h
index bd2890a..7677f88 100644
--- a/src/cobalt/loader/resource_cache.h
+++ b/src/cobalt/loader/resource_cache.h
@@ -68,6 +68,11 @@
         const scoped_refptr<CachedResourceBase>& cached_resource,
         const base::Closure& success_callback,
         const base::Closure& error_callback);
+
+    net::LoadTimingInfo GetLoadTimingInfo();
+    scoped_refptr<CachedResourceBase>& GetCachedResource() {
+      return cached_resource_;
+    }
     ~OnLoadedCallbackHandler();
 
    private:
@@ -89,6 +94,18 @@
   // Whether not the resource located at |url_| is finished loading.
   bool IsLoadingComplete();
 
+  net::LoadTimingInfo GetLoadTimingInfo() {
+    return load_timing_info_;
+  }
+
+  bool get_resource_timing_created_flag() {
+    return is_resource_timing_created_flag_;
+  }
+
+  void set_resource_timing_created_flag(bool is_created) {
+    is_resource_timing_created_flag_ = is_created;
+  }
+
  protected:
   friend class ResourceCacheBase;
   friend class base::RefCountedThreadSafe<CachedResourceBase>;
@@ -113,7 +130,8 @@
         has_resource_func_(has_resource_func),
         reset_resource_func_(reset_resource_func),
         are_loading_retries_enabled_func_(are_loading_retries_enabled_func),
-        on_resource_loaded_(on_resource_loaded) {
+        on_resource_loaded_(on_resource_loaded),
+        is_resource_timing_created_flag_(false) {
     DCHECK_CALLED_ON_VALID_THREAD(cached_resource_thread_checker_);
   }
 
@@ -133,7 +151,8 @@
         has_resource_func_(has_resource_func),
         reset_resource_func_(reset_resource_func),
         are_loading_retries_enabled_func_(are_loading_retries_enabled_func),
-        on_resource_loaded_(on_resource_loaded) {
+        on_resource_loaded_(on_resource_loaded),
+        is_resource_timing_created_flag_(false) {
     DCHECK_CALLED_ON_VALID_THREAD(cached_resource_thread_checker_);
   }
 
@@ -189,6 +208,9 @@
   // error causes a resource to fail to load, a retry is scheduled.
   int retry_count_ = 0;
   std::unique_ptr<base::RetainingOneShotTimer> retry_timer_;
+
+  net::LoadTimingInfo load_timing_info_;
+  bool is_resource_timing_created_flag_;
 };
 
 // CachedResource requests fetching and decoding a single resource and the
@@ -367,6 +389,9 @@
       : cached_resource_loaded_callback_handler_(cached_resource,
                                                  content_produced_callback,
                                                  load_complete_callback) {}
+  scoped_refptr<CachedResourceBase>& GetCachedResource() {
+    return cached_resource_loaded_callback_handler_.GetCachedResource();
+  }
 
  private:
   // This handles adding and removing the resource loaded callbacks.
@@ -599,6 +624,8 @@
   // new items at the end of the map.
   ResourceMap weak_referenced_cached_resource_map_;
 
+  base::Callback<void(const net::LoadTimingInfo&)> load_timing_info_callback_;
+
   DISALLOW_COPY_AND_ASSIGN(ResourceCache);
 };
 
diff --git a/src/cobalt/media/base/sbplayer_pipeline.cc b/src/cobalt/media/base/sbplayer_pipeline.cc
index fc6acc0..b100e6d 100644
--- a/src/cobalt/media/base/sbplayer_pipeline.cc
+++ b/src/cobalt/media/base/sbplayer_pipeline.cc
@@ -950,32 +950,33 @@
         set_bounds_helper_.get(), allow_resume_after_suspend_,
         *decode_to_texture_output_mode_, video_frame_provider_,
         max_video_capabilities_));
+
+    if (!player_->IsValid()) {
+      player_.reset();
+      CallErrorCB(DECODER_ERROR_NOT_SUPPORTED,
+                  "SbPlayerPipeline::CreatePlayer failed: "
+                  "player_->IsValid() is false.");
+      return;
+    }
+
     SetPlaybackRateTask(playback_rate_);
     SetVolumeTask(volume_);
   }
 
-  if (player_->IsValid()) {
-    base::Closure output_mode_change_cb;
-    {
-      base::AutoLock auto_lock(lock_);
-      DCHECK(!output_mode_change_cb_.is_null());
-      output_mode_change_cb = std::move(output_mode_change_cb_);
-    }
-    output_mode_change_cb.Run();
-
-    if (audio_stream_) {
-      UpdateDecoderConfig(audio_stream_);
-    }
-    if (video_stream_) {
-      UpdateDecoderConfig(video_stream_);
-    }
-    return;
+  base::Closure output_mode_change_cb;
+  {
+    base::AutoLock auto_lock(lock_);
+    DCHECK(!output_mode_change_cb_.is_null());
+    output_mode_change_cb = std::move(output_mode_change_cb_);
   }
+  output_mode_change_cb.Run();
 
-  player_.reset();
-  CallSeekCB(DECODER_ERROR_NOT_SUPPORTED,
-             "SbPlayerPipeline::CreatePlayer failed: "
-             "player_->IsValid() is false.");
+  if (audio_stream_) {
+    UpdateDecoderConfig(audio_stream_);
+  }
+  if (video_stream_) {
+    UpdateDecoderConfig(video_stream_);
+  }
 }
 
 void SbPlayerPipeline::OnDemuxerInitialized(PipelineStatus status) {
@@ -1373,6 +1374,13 @@
 
   if (player_) {
     player_->Resume(window);
+    if (!player_->IsValid()) {
+      player_.reset();
+      CallErrorCB(DECODER_ERROR_NOT_SUPPORTED,
+                  "SbPlayerPipeline::ResumeTask failed: "
+                  "player_->IsValid() is false.");
+      return;
+    }
   }
 
   suspended_ = false;
diff --git a/src/cobalt/media/base/starboard_player.cc b/src/cobalt/media/base/starboard_player.cc
index 6d15f4a..3f34f40 100644
--- a/src/cobalt/media/base/starboard_player.cc
+++ b/src/cobalt/media/base/starboard_player.cc
@@ -163,10 +163,12 @@
 
   CreatePlayer();
 
-  task_runner->PostTask(
-      FROM_HERE,
-      base::Bind(&StarboardPlayer::CallbackHelper::ClearDecoderBufferCache,
-                 callback_helper_));
+  if (SbPlayerIsValid(player_)) {
+    task_runner->PostTask(
+        FROM_HERE,
+        base::Bind(&StarboardPlayer::CallbackHelper::ClearDecoderBufferCache,
+                   callback_helper_));
+  }
 }
 
 StarboardPlayer::~StarboardPlayer() {
@@ -473,9 +475,11 @@
   CreatePlayer();
 #endif  // SB_HAS(PLAYER_WITH_URL)
 
-  base::AutoLock auto_lock(lock_);
-  state_ = kResuming;
-  UpdateBounds_Locked();
+  if (SbPlayerIsValid(player_)) {
+    base::AutoLock auto_lock(lock_);
+    state_ = kResuming;
+    UpdateBounds_Locked();
+  }
 }
 
 namespace {
@@ -588,7 +592,9 @@
 
 #endif  // SB_HAS(PLAYER_CREATION_AND_OUTPUT_MODE_QUERY_IMPROVEMENT)
 
-  DCHECK(SbPlayerIsValid(player_));
+  if (!SbPlayerIsValid(player_)) {
+    return;
+  }
 
   if (output_mode_ == kSbPlayerOutputModeDecodeToTexture) {
     // If the player is setup to decode to texture, then provide Cobalt with
@@ -930,7 +936,7 @@
   auto output_mode = SbPlayerGetPreferredOutputMode(&creation_param);
   CHECK_NE(kSbPlayerOutputModeInvalid, output_mode);
   return output_mode;
-#else  // SB_HAS(PLAYER_CREATION_AND_OUTPUT_MODE_QUERY_IMPROVEMENT)
+#else   // SB_HAS(PLAYER_CREATION_AND_OUTPUT_MODE_QUERY_IMPROVEMENT)
   SbMediaVideoCodec video_codec = kSbMediaVideoCodecNone;
 
   video_codec = video_sample_info_.codec;
diff --git a/src/cobalt/media/filters/vp9_parser.cc b/src/cobalt/media/filters/vp9_parser.cc
index 8e92f8a..b49c909 100644
--- a/src/cobalt/media/filters/vp9_parser.cc
+++ b/src/cobalt/media/filters/vp9_parser.cc
@@ -11,6 +11,8 @@
 
 #include "cobalt/media/filters/vp9_parser.h"
 
+#include <string.h>
+
 #include <algorithm>
 
 #include "base/basictypes.h"
@@ -42,9 +44,9 @@
   // probs should be in [1, 255] range.
   static_assert(sizeof(Vp9Prob) == 1,
                 "following checks assuming Vp9Prob is single byte");
-  if (SbMemoryFindByte(tx_probs_8x8, 0, sizeof(tx_probs_8x8))) return false;
-  if (SbMemoryFindByte(tx_probs_16x16, 0, sizeof(tx_probs_16x16))) return false;
-  if (SbMemoryFindByte(tx_probs_32x32, 0, sizeof(tx_probs_32x32))) return false;
+  if (memchr(tx_probs_8x8, 0, sizeof(tx_probs_8x8))) return false;
+  if (memchr(tx_probs_16x16, 0, sizeof(tx_probs_16x16))) return false;
+  if (memchr(tx_probs_32x32, 0, sizeof(tx_probs_32x32))) return false;
 
   for (auto& a : coef_probs) {
     for (auto& ai : a) {
@@ -60,32 +62,25 @@
       }
     }
   }
-  if (SbMemoryFindByte(skip_prob, 0, sizeof(skip_prob))) return false;
-  if (SbMemoryFindByte(inter_mode_probs, 0, sizeof(inter_mode_probs)))
-    return false;
-  if (SbMemoryFindByte(interp_filter_probs, 0, sizeof(interp_filter_probs)))
-    return false;
-  if (SbMemoryFindByte(is_inter_prob, 0, sizeof(is_inter_prob))) return false;
-  if (SbMemoryFindByte(comp_mode_prob, 0, sizeof(comp_mode_prob))) return false;
-  if (SbMemoryFindByte(single_ref_prob, 0, sizeof(single_ref_prob)))
-    return false;
-  if (SbMemoryFindByte(comp_ref_prob, 0, sizeof(comp_ref_prob))) return false;
-  if (SbMemoryFindByte(y_mode_probs, 0, sizeof(y_mode_probs))) return false;
-  if (SbMemoryFindByte(uv_mode_probs, 0, sizeof(uv_mode_probs))) return false;
-  if (SbMemoryFindByte(partition_probs, 0, sizeof(partition_probs)))
-    return false;
-  if (SbMemoryFindByte(mv_joint_probs, 0, sizeof(mv_joint_probs))) return false;
-  if (SbMemoryFindByte(mv_sign_prob, 0, sizeof(mv_sign_prob))) return false;
-  if (SbMemoryFindByte(mv_class_probs, 0, sizeof(mv_class_probs))) return false;
-  if (SbMemoryFindByte(mv_class0_bit_prob, 0, sizeof(mv_class0_bit_prob)))
-    return false;
-  if (SbMemoryFindByte(mv_bits_prob, 0, sizeof(mv_bits_prob))) return false;
-  if (SbMemoryFindByte(mv_class0_fr_probs, 0, sizeof(mv_class0_fr_probs)))
-    return false;
-  if (SbMemoryFindByte(mv_fr_probs, 0, sizeof(mv_fr_probs))) return false;
-  if (SbMemoryFindByte(mv_class0_hp_prob, 0, sizeof(mv_class0_hp_prob)))
-    return false;
-  if (SbMemoryFindByte(mv_hp_prob, 0, sizeof(mv_hp_prob))) return false;
+  if (memchr(skip_prob, 0, sizeof(skip_prob))) return false;
+  if (memchr(inter_mode_probs, 0, sizeof(inter_mode_probs))) return false;
+  if (memchr(interp_filter_probs, 0, sizeof(interp_filter_probs))) return false;
+  if (memchr(is_inter_prob, 0, sizeof(is_inter_prob))) return false;
+  if (memchr(comp_mode_prob, 0, sizeof(comp_mode_prob))) return false;
+  if (memchr(single_ref_prob, 0, sizeof(single_ref_prob))) return false;
+  if (memchr(comp_ref_prob, 0, sizeof(comp_ref_prob))) return false;
+  if (memchr(y_mode_probs, 0, sizeof(y_mode_probs))) return false;
+  if (memchr(uv_mode_probs, 0, sizeof(uv_mode_probs))) return false;
+  if (memchr(partition_probs, 0, sizeof(partition_probs))) return false;
+  if (memchr(mv_joint_probs, 0, sizeof(mv_joint_probs))) return false;
+  if (memchr(mv_sign_prob, 0, sizeof(mv_sign_prob))) return false;
+  if (memchr(mv_class_probs, 0, sizeof(mv_class_probs))) return false;
+  if (memchr(mv_class0_bit_prob, 0, sizeof(mv_class0_bit_prob))) return false;
+  if (memchr(mv_bits_prob, 0, sizeof(mv_bits_prob))) return false;
+  if (memchr(mv_class0_fr_probs, 0, sizeof(mv_class0_fr_probs))) return false;
+  if (memchr(mv_fr_probs, 0, sizeof(mv_fr_probs))) return false;
+  if (memchr(mv_class0_hp_prob, 0, sizeof(mv_class0_hp_prob))) return false;
+  if (memchr(mv_hp_prob, 0, sizeof(mv_hp_prob))) return false;
 
   return true;
 }
diff --git a/src/cobalt/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/src/cobalt/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index a501e29..64e16f9 100644
--- a/src/cobalt/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/src/cobalt/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -4,6 +4,8 @@
 
 #include "cobalt/media/formats/mpeg/mpeg_audio_stream_parser_base.h"
 
+#include <string.h>
+
 #include <algorithm>
 #include <memory>
 
@@ -334,7 +336,7 @@
   while (start < end) {
     int bytes_left = end - start;
     const uint8_t* candidate_start_code =
-        static_cast<const uint8_t*>(SbMemoryFindByte(start, 0xff, bytes_left));
+        static_cast<const uint8_t*>(memchr(start, 0xff, bytes_left));
 
     if (!candidate_start_code) return 0;
 
diff --git a/src/cobalt/media/formats/webm/webm_parser.cc b/src/cobalt/media/formats/webm/webm_parser.cc
index 36c42ce..5350253 100644
--- a/src/cobalt/media/formats/webm/webm_parser.cc
+++ b/src/cobalt/media/formats/webm/webm_parser.cc
@@ -11,13 +11,14 @@
 // encrypted request for comments specification is here
 // http://wiki.webmproject.org/encryption/webm-encryption-rfc
 
+#include <string.h>
+
 #include <iomanip>
 #include <limits>
 
 #include "base/basictypes.h"
 #include "base/logging.h"
 #include "cobalt/media/formats/webm/webm_constants.h"
-#include "starboard/memory.h"
 #include "starboard/types.h"
 
 namespace cobalt {
@@ -578,8 +579,7 @@
 
 static int ParseString(const uint8_t* buf, int size, int id,
                        WebMParserClient* client) {
-  const uint8_t* end =
-      static_cast<const uint8_t*>(SbMemoryFindByte(buf, '\0', size));
+  const uint8_t* end = static_cast<const uint8_t*>(memchr(buf, '\0', size));
   int length = (end != NULL) ? static_cast<int>(end - buf) : size;
   std::string str(reinterpret_cast<const char*>(buf), length);
   return client->OnString(id, str) ? size : -1;
diff --git a/src/cobalt/script/javascript_engine.h b/src/cobalt/script/javascript_engine.h
index 9f91737..e16a112 100644
--- a/src/cobalt/script/javascript_engine.h
+++ b/src/cobalt/script/javascript_engine.h
@@ -81,6 +81,9 @@
   // be unable to provide perfectly accurate values.
   virtual HeapStatistics GetHeapStatistics() = 0;
 
+  // Update the date/time configuration in response to a change in the setting.
+  virtual void UpdateDateTimeConfiguration() = 0;
+
  protected:
   virtual ~JavaScriptEngine() {}
   friend std::unique_ptr<JavaScriptEngine>::deleter_type;
diff --git a/src/cobalt/script/v8c/v8c.gyp b/src/cobalt/script/v8c/v8c.gyp
index 414d3b3..1a2331d 100644
--- a/src/cobalt/script/v8c/v8c.gyp
+++ b/src/cobalt/script/v8c/v8c.gyp
@@ -85,23 +85,32 @@
         '<(DEPTH)/third_party/v8/v8.gyp:v8_base_without_compiler',
         'embed_v8c_resources_as_header_files',
       ],
+      'conditions': [
+        ['target_arch=="arm64" or target_arch=="x64"', {
+          'defines': [
+            # enables pointer compression on 64 bit platforms for Cobalt.
+            'V8_COMPRESS_POINTERS',
+            'V8_31BIT_SMIS_ON_64BIT_ARCH',
+          ],
+        }],
+      ],
       'defines': [
         'ENGINE_SUPPORTS_INT64',
-        # The file name to store our V8 startup snapshot file at.  This is a
-        # serialized representation of a |v8::Isolate| after completing all
-        # tasks prior to creation of the global object (e.g., executing self
-        # hosted JavaScript to implement ECMAScript level features).  This
-        # state is architecture dependent, and in fact, dependent on anything
-        # that could affect JavaScript execution (such as #defines), and thus
-        # must be unique with respect to binary, which is why we build it out
-        # of platform name and configuration.
-        'V8C_INTERNAL_STARTUP_DATA_CACHE_FILE_NAME="<(starboard_platform_name)_<(cobalt_config)_v8_startup_snapshot.bin"',
       ],
       'all_dependent_settings': {
         'defines': [
           'ENGINE_SUPPORTS_INDEXED_DELETERS',
           'ENGINE_SUPPORTS_INT64',
         ],
+        'conditions': [
+          ['target_arch=="arm64" or target_arch=="x64"', {
+            'defines': [
+              # enables pointer compression on 64 bit platforms for Cobalt.
+              'V8_COMPRESS_POINTERS',
+              'V8_31BIT_SMIS_ON_64BIT_ARCH',
+            ],
+          }],
+        ],
       },
     },
 
@@ -120,6 +129,15 @@
         '<(DEPTH)/third_party/v8/v8.gyp:v8_base_without_compiler',
         '<(DEPTH)/third_party/v8/v8.gyp:v8_libplatform',
       ],
+      'conditions': [
+        ['target_arch=="arm64" or target_arch=="x64"', {
+          'defines': [
+            # enables pointer compression on 64 bit platforms for Cobalt.
+            'V8_COMPRESS_POINTERS',
+            'V8_31BIT_SMIS_ON_64BIT_ARCH',
+          ],
+        }],
+      ],
     },
 
     {
diff --git a/src/cobalt/script/v8c/v8c_engine.cc b/src/cobalt/script/v8c/v8c_engine.cc
index dbf4f02..03dbccd 100644
--- a/src/cobalt/script/v8c/v8c_engine.cc
+++ b/src/cobalt/script/v8c/v8c_engine.cc
@@ -215,6 +215,11 @@
           v8_heap_statistics.used_heap_size()};
 }
 
+void V8cEngine::UpdateDateTimeConfiguration() {
+  isolate_->DateTimeConfigurationChangeNotification(
+      v8::Isolate::TimeZoneDetection::kRedetect);
+}
+
 }  // namespace v8c
 
 // static
diff --git a/src/cobalt/script/v8c/v8c_engine.h b/src/cobalt/script/v8c/v8c_engine.h
index c7285fa..62d164e 100644
--- a/src/cobalt/script/v8c/v8c_engine.h
+++ b/src/cobalt/script/v8c/v8c_engine.h
@@ -45,6 +45,7 @@
   void AdjustAmountOfExternalAllocatedMemory(int64_t bytes) override;
   bool RegisterErrorHandler(JavaScriptEngine::ErrorHandler handler) override;
   HeapStatistics GetHeapStatistics() override;
+  void UpdateDateTimeConfiguration() override;
 
   v8::Isolate* isolate() const { return isolate_; }
   V8cHeapTracer* heap_tracer() const { return v8c_heap_tracer_.get(); }
diff --git a/src/cobalt/script/v8c/v8c_global_environment.cc b/src/cobalt/script/v8c/v8c_global_environment.cc
index 87d4b64..28ac8a9 100644
--- a/src/cobalt/script/v8c/v8c_global_environment.cc
+++ b/src/cobalt/script/v8c/v8c_global_environment.cc
@@ -244,11 +244,7 @@
 }
 
 void V8cGlobalEnvironment::RemoveRoot(Traceable* traceable) {
-  CHECK(isolate_);
-  V8cEngine* v8c_engine = V8cEngine::GetFromIsolate(isolate_);
-  CHECK(v8c_engine);
-  CHECK(v8c_engine->heap_tracer());
-  v8c_engine->heap_tracer()->RemoveRoot(traceable);
+  V8cEngine::GetFromIsolate(isolate_)->heap_tracer()->RemoveRoot(traceable);
 }
 
 void V8cGlobalEnvironment::PreventGarbageCollection(
diff --git a/src/cobalt/script/v8c/v8c_script_debugger.cc b/src/cobalt/script/v8c/v8c_script_debugger.cc
index 4c83436..2778aec 100644
--- a/src/cobalt/script/v8c/v8c_script_debugger.cc
+++ b/src/cobalt/script/v8c/v8c_script_debugger.cc
@@ -242,11 +242,14 @@
   std::unique_ptr<v8_inspector::protocol::Runtime::API::RemoteObject>
       remote_object = inspector_session_->wrapObject(
           context, v8_value, ToStringView(group), false /*generatePreview*/);
-  v8_crdtp::ObjectSerializer serializer;
-  serializer.AddField(v8_crdtp::MakeSpan("remoteObject"), remote_object);
-  std::unique_ptr<v8_crdtp::Serializable> result = serializer.Finish();
-  std::vector<uint8> serialized = result->Serialize();
-  return std::string(serialized.begin(), serialized.end());
+  std::vector<uint8_t> out;
+  remote_object->AppendSerialized(&out);
+  std::string remote_object_str;
+  v8_crdtp::Status status = v8_crdtp::json::ConvertCBORToJSON(
+      v8_crdtp::span<uint8_t>(out.data(), out.size()),
+      &remote_object_str);
+  CHECK(status.ok()) << status.Message();
+  return remote_object_str;
 }
 
 const script::ValueHandleHolder* V8cScriptDebugger::LookupRemoteObjectId(
diff --git a/src/cobalt/site/docs/development/setup-android.md b/src/cobalt/site/docs/development/setup-android.md
index 918c38f..61849d8 100644
--- a/src/cobalt/site/docs/development/setup-android.md
+++ b/src/cobalt/site/docs/development/setup-android.md
@@ -17,7 +17,7 @@
     sudo apt-get install python python-pip
     ```
 
-    If `python-pip` is not available via your package mangaer, you can install `pip` following [recommended instructions](https://pip.pypa.io/en/stable/installing/) from the official Python guide.
+    If `python-pip` is not available via your package manager, you can install `pip` following [recommended instructions](https://pip.pypa.io/en/stable/installing/) from the official Python guide.
 
     There are also some Python module requirements:
 
@@ -80,9 +80,6 @@
          directory will already exist since you've already run gyp_cobalt for an
          android target, so you'll see a warning in the setup wizard that an SDK
          was detected, which is okay.
-          * The path may also be `$HOME/cobalt-toolchains/AndroidSdk` if you
-            previously had an older environment configured on your machine -
-            this is okay.
         *  Select both `Android SDK` and `Android SDK Platform` (whatever
            current version is presented should be fine)
     *   On the `SDK Platforms` tab select:
diff --git a/src/cobalt/test/document_loader.h b/src/cobalt/test/document_loader.h
index 83aa262..dc0d948 100644
--- a/src/cobalt/test/document_loader.h
+++ b/src/cobalt/test/document_loader.h
@@ -72,7 +72,8 @@
             NULL /* remote_font_cache */, NULL /* mesh_cache */,
             dom_stat_tracker_.get(), "" /* language */,
             base::kApplicationStateStarted,
-            NULL /* synchronous_loader_interrupt */) {}
+            NULL /* synchronous_loader_interrupt */,
+            NULL /* performance */) {}
   void Load(const GURL& url) {
     // Load the document in a nested message loop.
     dom::Document::Options options(url);
diff --git a/src/cobalt/test/empty_document.h b/src/cobalt/test/empty_document.h
index 6d1342f..e865a5f 100644
--- a/src/cobalt/test/empty_document.h
+++ b/src/cobalt/test/empty_document.h
@@ -37,7 +37,8 @@
         html_element_context_(
             &environment_settings_, NULL, NULL, css_parser_.get(), NULL, NULL,
             NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
-            dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL),
+            dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL,
+            NULL),
         document_(new dom::Document(&html_element_context_)) {}
 
   dom::Document* document() { return document_.get(); }
diff --git a/src/cobalt/webdriver/get_element_text_test.cc b/src/cobalt/webdriver/get_element_text_test.cc
index 8725037..357ca38 100644
--- a/src/cobalt/webdriver/get_element_text_test.cc
+++ b/src/cobalt/webdriver/get_element_text_test.cc
@@ -50,7 +50,8 @@
         html_element_context_(
             &environment_settings_, NULL, NULL, css_parser_.get(), NULL, NULL,
             NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
-            dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL) {
+            dom_stat_tracker_.get(), "", base::kApplicationStateStarted, NULL,
+            NULL) {
   }
 
   void SetUp() override {
diff --git a/src/cobalt/xhr/xml_http_request.cc b/src/cobalt/xhr/xml_http_request.cc
index 0345959..565ceb6 100644
--- a/src/cobalt/xhr/xml_http_request.cc
+++ b/src/cobalt/xhr/xml_http_request.cc
@@ -30,6 +30,7 @@
 #include "cobalt/dom/csp_delegate.h"
 #include "cobalt/dom/dom_settings.h"
 #include "cobalt/dom/global_stats.h"
+#include "cobalt/dom/performance.h"
 #include "cobalt/dom/progress_event.h"
 #include "cobalt/dom/window.h"
 #include "cobalt/dom/xml_document.h"
@@ -66,6 +67,9 @@
     "connect", "trace", "track",
 };
 
+// https://www.w3.org/TR/resource-timing-1/#dom-performanceresourcetiming-initiatortype
+const char* kPerformanceResourceTimingInitiatorType = "xmlhttprequest";
+
 bool MethodNameToRequestType(const std::string& method,
                              net::URLFetcher::RequestType* request_type) {
   if (base::LowerCaseEqualsASCII(method, "get")) {
@@ -752,6 +756,8 @@
       OnRedirect(*source->GetResponseHeaders());
       return;
     }
+    // Create Performance Resource Timing entry after fetch complete.
+    GetLoadTimingInfoAndCreateResourceTiming();
   }
 
   const net::URLRequestStatus& status = source->GetStatus();
@@ -1193,6 +1199,18 @@
                   url_fetcher_generation_);
 }
 
+void XMLHttpRequest::ReportLoadTimingInfo(
+    const net::LoadTimingInfo& timing_info) {
+  load_timing_info_ = timing_info;
+}
+
+void XMLHttpRequest::GetLoadTimingInfoAndCreateResourceTiming() {
+  if (settings_->window()->performance() == nullptr) return;
+  settings_->window()->performance()->CreatePerformanceResourceTiming(
+      load_timing_info_, kPerformanceResourceTimingInitiatorType,
+      request_url_.spec());
+}
+
 std::ostream& operator<<(std::ostream& out, const XMLHttpRequest& xhr) {
 #if !defined(COBALT_BUILD_TYPE_GOLD)
   base::StringPiece response_text("");
diff --git a/src/cobalt/xhr/xml_http_request.h b/src/cobalt/xhr/xml_http_request.h
index d98b1cd..d39aa5d 100644
--- a/src/cobalt/xhr/xml_http_request.h
+++ b/src/cobalt/xhr/xml_http_request.h
@@ -37,6 +37,7 @@
 #include "cobalt/xhr/url_fetcher_buffer_writer.h"
 #include "cobalt/xhr/xml_http_request_event_target.h"
 #include "cobalt/xhr/xml_http_request_upload.h"
+#include "net/base/load_timing_info.h"
 #include "net/http/http_request_headers.h"
 #include "net/http/http_response_headers.h"
 #include "net/url_request/url_fetcher.h"
@@ -186,6 +187,10 @@
   // Called from bindings layer to tie objects' lifetimes to this XHR instance.
   XMLHttpRequestUpload* upload_or_null() { return upload_.get(); }
 
+  void ReportLoadTimingInfo(const net::LoadTimingInfo& timing_info) override;
+  // Create Performance Resource Timing entry for XMLHttpRequest.
+  void GetLoadTimingInfoAndCreateResourceTiming();
+
   friend std::ostream& operator<<(std::ostream& os, const XMLHttpRequest& xhr);
   DEFINE_WRAPPABLE_TYPE(XMLHttpRequest);
   void TraceMembers(script::Tracer* tracer) override;
@@ -320,6 +325,8 @@
   bool is_data_url_;
   int url_fetcher_generation_ = -1;
 
+  net::LoadTimingInfo load_timing_info_;
+
   DISALLOW_COPY_AND_ASSIGN(XMLHttpRequest);
 };
 
diff --git a/src/net/base/mime_sniffer.cc b/src/net/base/mime_sniffer.cc
index f04e264..75aa030 100644
--- a/src/net/base/mime_sniffer.cc
+++ b/src/net/base/mime_sniffer.cc
@@ -83,6 +83,8 @@
 // Note that our definition of HTML payload is much stricter than IE's
 // definition and roughly the same as Firefox's definition.
 
+#include <string.h>
+
 #include <string>
 
 #include "net/base/mime_sniffer.h"
@@ -93,7 +95,6 @@
 #include "base/strings/string_util.h"
 #include "nb/cpp14oncpp11.h"
 #include "starboard/common/string.h"
-#include "starboard/memory.h"
 #include "starboard/types.h"
 #include "url/gurl.h"
 
@@ -324,8 +325,7 @@
   // To compare with magic strings, we need to compute strlen(content), but
   // content might not actually have a null terminator.  In that case, we
   // pretend the length is content_size.
-  const char* end =
-      static_cast<const char*>(SbMemoryFindByte(content, '\0', size));
+  const char* end = static_cast<const char*>(memchr(content, '\0', size));
   const size_t content_strlen =
       (end != NULL) ? static_cast<size_t>(end - content) : size;
 
@@ -564,7 +564,7 @@
   // based on the name (or possibly attributes) of that tag.
   const int kMaxTagIterations = 5;
   for (int i = 0; i < kMaxTagIterations && pos < end; ++i) {
-    pos = reinterpret_cast<const char*>(SbMemoryFindByte(pos, '<', end - pos));
+    pos = reinterpret_cast<const char*>(memchr(pos, '<', end - pos));
     if (!pos)
       return false;
 
diff --git a/src/net/filter/gzip_header.cc b/src/net/filter/gzip_header.cc
index 20bc267..51992e2 100644
--- a/src/net/filter/gzip_header.cc
+++ b/src/net/filter/gzip_header.cc
@@ -4,13 +4,14 @@
 
 #include "net/filter/gzip_header.h"
 
+#include <string.h>
+
 #include <algorithm>
 
 #include "base/logging.h"
 #include "third_party/zlib/zlib.h"
 
 #include "starboard/client_porting/poem/string_poem.h"
-#include "starboard/memory.h"
 
 namespace net {
 
@@ -125,8 +126,7 @@
           break;
         }
         // See if we can find the end of the \0-terminated FNAME field.
-        pos = reinterpret_cast<const uint8_t*>(
-            SbMemoryFindByte(pos, '\0', (end - pos)));
+        pos = reinterpret_cast<const uint8_t*>(memchr(pos, '\0', (end - pos)));
         if ( pos != NULL ) {
           pos++;  // advance past the '\0'
           flags_ &= ~FLAG_FNAME;   // we're done with the FNAME stuff
@@ -142,8 +142,7 @@
           break;
         }
         // See if we can find the end of the \0-terminated FCOMMENT field.
-        pos = reinterpret_cast<const uint8_t*>(
-            SbMemoryFindByte(pos, '\0', (end - pos)));
+        pos = reinterpret_cast<const uint8_t*>(memchr(pos, '\0', (end - pos)));
         if ( pos != NULL ) {
           pos++;  // advance past the '\0'
           flags_ &= ~FLAG_FCOMMENT;   // we're done with the FCOMMENT stuff
diff --git a/src/net/net.gyp b/src/net/net.gyp
index e13d879..13d2c9b 100644
--- a/src/net/net.gyp
+++ b/src/net/net.gyp
@@ -1757,7 +1757,7 @@
         'proto_in_dir': 'nqe/proto',
         'proto_out_dir': 'net/nqe/proto',
       },
-      'includes': ['<(DEPTH)/build/protoc.gypi'],
+      'includes': ['<(DEPTH)/build_gyp/protoc.gypi'],
     },
     {
       'target_name': 'net_quic_proto',
@@ -1779,7 +1779,7 @@
         'proto_in_dir': 'third_party/quic/core/proto/',
         'proto_out_dir': 'net/third_party/quic/core/proto/',
       },
-      'includes': ['<(DEPTH)/build/protoc.gypi'],
+      'includes': ['<(DEPTH)/build_gyp/protoc.gypi'],
 
       'all_dependent_settings': {
         'include_dirs': [
diff --git a/src/net/url_request/url_fetcher_core.cc b/src/net/url_request/url_fetcher_core.cc
index 48c99c0..f23ee8b 100644
--- a/src/net/url_request/url_fetcher_core.cc
+++ b/src/net/url_request/url_fetcher_core.cc
@@ -137,6 +137,19 @@
       total_response_bytes_(-1),
       traffic_annotation_(traffic_annotation) {
   CHECK(original_url_.is_valid());
+
+  const CobaltExtensionUrlFetcherObserverApi* observer_extension =
+      static_cast<const CobaltExtensionUrlFetcherObserverApi*>(
+          SbSystemGetExtension(kCobaltExtensionUrlFetcherObserverName));
+  if (observer_extension &&
+      SbStringCompareAll(observer_extension->name,
+                         kCobaltExtensionUrlFetcherObserverName) == 0 &&
+      observer_extension->version >= 1) {
+    observer_extension_ = observer_extension;
+    observer_extension_->FetcherCreated(original_url_.spec().c_str());
+  } else {
+    observer_extension_ = nullptr;
+  }
 }
 
 void URLFetcherCore::Start() {
@@ -598,6 +611,9 @@
 }
 
 URLFetcherCore::~URLFetcherCore() {
+  if (observer_extension_ != nullptr) {
+    observer_extension_->FetcherDestroyed(original_url_.spec().c_str());
+  }
   // |request_| should be NULL. If not, it's unsafe to delete it here since we
   // may not be on the IO thread.
   DCHECK(!request_.get());
@@ -626,6 +642,9 @@
 void URLFetcherCore::StartURLRequest() {
   DCHECK(network_task_runner_->BelongsToCurrentThread());
 
+  if (observer_extension_ != nullptr) {
+    observer_extension_->StartURLRequest(original_url_.spec().c_str());
+  }
   if (was_cancelled_) {
     // Since StartURLRequest() is posted as a *delayed* task, it may
     // run after the URLFetcher was already stopped.
@@ -747,6 +766,8 @@
   if (!extra_request_headers_.IsEmpty())
     request_->SetExtraRequestHeaders(extra_request_headers_);
 
+  request_->SetLoadTimingInfoCallback(base::Bind(&URLFetcherCore::GetLoadTimingInfo,
+      base::Unretained(this)));
   request_->Start();
 }
 
@@ -825,7 +846,6 @@
 void URLFetcherCore::OnCompletedURLRequest(
     base::TimeDelta backoff_delay) {
   DCHECK(delegate_task_runner_->RunsTasksInCurrentSequence());
-
   // Save the status and backoff_delay so that delegates can read it.
   if (delegate_) {
     backoff_delay_ = backoff_delay;
@@ -835,8 +855,9 @@
 
 void URLFetcherCore::InformDelegateFetchIsComplete() {
   DCHECK(delegate_task_runner_->RunsTasksInCurrentSequence());
-  if (delegate_)
+  if (delegate_) {
     delegate_->OnURLFetchComplete(fetcher_);
+  }
 }
 
 void URLFetcherCore::NotifyMalformedContent() {
@@ -1092,4 +1113,12 @@
   DCHECK(upload_stream_factory_.is_null());
 }
 
+#if defined(STARBOARD)
+void URLFetcherCore::GetLoadTimingInfo(
+    const net::LoadTimingInfo& timing_info) {
+  DCHECK(delegate_);
+  delegate_->ReportLoadTimingInfo(timing_info);
+}
+#endif  // defined(STARBOARD)
+
 }  // namespace net
diff --git a/src/net/url_request/url_fetcher_core.h b/src/net/url_request/url_fetcher_core.h
index 3a18e18..4359ac5 100644
--- a/src/net/url_request/url_fetcher_core.h
+++ b/src/net/url_request/url_fetcher_core.h
@@ -15,8 +15,12 @@
 #include "base/macros.h"
 #include "base/memory/ref_counted.h"
 #include "base/timer/timer.h"
+#include "cobalt/extension/url_fetcher_observer.h"
 #include "net/base/chunked_upload_data_stream.h"
 #include "net/base/host_port_pair.h"
+#if defined(STARBOARD)
+#include "net/base/load_timing_info.h"
+#endif  // defined(STARBOARD)
 #include "net/base/proxy_server.h"
 #include "net/http/http_request_headers.h"
 #include "net/traffic_annotation/network_traffic_annotation.h"
@@ -158,7 +162,9 @@
   static int GetNumFetcherCores();
   static void SetEnableInterceptionForTests(bool enabled);
   static void SetIgnoreCertificateRequests(bool ignored);
-
+#if defined (STARBOARD)
+  void GetLoadTimingInfo(const net::LoadTimingInfo& timing_info);
+#endif  // defined(STARBOARD)
  private:
   friend class base::RefCountedThreadSafe<URLFetcherCore>;
 
@@ -370,6 +376,8 @@
 
   static base::LazyInstance<Registry>::DestructorAtExit g_registry;
 
+  const CobaltExtensionUrlFetcherObserverApi* observer_extension_;
+
   DISALLOW_COPY_AND_ASSIGN(URLFetcherCore);
 };
 
diff --git a/src/net/url_request/url_fetcher_delegate.h b/src/net/url_request/url_fetcher_delegate.h
index 87d8f23..aebb062 100644
--- a/src/net/url_request/url_fetcher_delegate.h
+++ b/src/net/url_request/url_fetcher_delegate.h
@@ -9,6 +9,7 @@
 #include <string>
 
 #include "net/base/net_export.h"
+#include "net/base/load_timing_info.h"
 #include "starboard/types.h"
 
 namespace net {
@@ -43,6 +44,10 @@
   virtual void OnURLFetchUploadProgress(const URLFetcher* source,
                                         int64_t current,
                                         int64_t total);
+#if defined(STARBOARD)
+  virtual void ReportLoadTimingInfo(
+      const net::LoadTimingInfo& timing_info) {}
+#endif  // defined(STARBOARD)
 
  protected:
   virtual ~URLFetcherDelegate();
diff --git a/src/net/url_request/url_request.cc b/src/net/url_request/url_request.cc
index 7200524..0767613 100644
--- a/src/net/url_request/url_request.cc
+++ b/src/net/url_request/url_request.cc
@@ -1156,6 +1156,11 @@
     raw_header_size_ = GetTotalReceivedBytes();
 
     ConvertRealLoadTimesToBlockingTimes(&load_timing_info_);
+#if defined (STARBOARD)
+    if (!load_timing_info_callback_.is_null()) {
+      load_timing_info_callback_.Run(load_timing_info_);
+    }
+#endif  // defined(STARBOARD)
   }
 }
 
diff --git a/src/net/url_request/url_request.h b/src/net/url_request/url_request.h
index 4c7fd73..652cf63 100644
--- a/src/net/url_request/url_request.h
+++ b/src/net/url_request/url_request.h
@@ -729,6 +729,13 @@
 
   base::WeakPtr<URLRequest> GetWeakPtr();
 
+#if defined (STARBOARD)
+  void SetLoadTimingInfoCallback(
+      const base::Callback<void(const net::LoadTimingInfo&)>& callback) {
+    load_timing_info_callback_ = callback;
+  }
+#endif  // defined(STARBOARD)
+
  protected:
   // Allow the URLRequestJob class to control the is_pending() flag.
   void set_is_pending(bool value) { is_pending_ = value; }
@@ -959,6 +966,10 @@
 
   base::WeakPtrFactory<URLRequest> weak_factory_;
 
+#if defined (STARBOARD)
+  base::Callback<void(const net::LoadTimingInfo&)> load_timing_info_callback_;
+#endif  // defined(STARBOARD)
+
   DISALLOW_COPY_AND_ASSIGN(URLRequest);
 };
 
diff --git a/src/sql/sql.gyp b/src/sql/sql.gyp
index 5744dd6..25ddea5 100644
--- a/src/sql/sql.gyp
+++ b/src/sql/sql.gyp
@@ -82,7 +82,7 @@
             'test_suite_name': 'sql_unittests',
             'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)sql_unittests<(SHARED_LIB_SUFFIX)',
           },
-          'includes': [ '../build/apk_test.gypi' ],
+          'includes': [ '../build_gyp/apk_test.gypi' ],
         },
       ],
     }],
diff --git a/src/starboard/BUILD.gn b/src/starboard/BUILD.gn
index 4037f31..cec7a29 100644
--- a/src/starboard/BUILD.gn
+++ b/src/starboard/BUILD.gn
@@ -15,16 +15,57 @@
 group("gn_all") {
   testonly = true
 
-  deps = [ ":starboard_headers_only" ]
+  deps = [
+    ":starboard",
+    "//starboard/client_porting/eztime",
+    "//starboard/client_porting/eztime:eztime_test",
+    "//starboard/client_porting/icu_init",
+    "//starboard/client_porting/poem:poem_unittests",
+    "//starboard/examples/window:starboard_window_example",
+    "//starboard/nplb",
+    "//starboard/nplb/nplb_evergreen_compat_tests",
+
+    # "//starboard/tools",  TODO(andrewsavage)
+  ]
+
+  if (gl_type != "none") {
+    deps += [ "//starboard/examples/glclear:starboard_gclear_example" ]
+  }
+
+  if (has_platform_targets) {
+    deps += [ "//$starboard_path/platform_targets" ]
+  }
+
+  if (has_platform_tests) {
+    deps += [ "//$starboard_path/starboard_platform_tests" ]
+  } else {
+    deps += [ ":starboard_platform_tests" ]
+  }
+
+  if (sb_enable_benchmark) {
+    deps += [ "//starboard/benchmark" ]
+  }
 }
 
 group("starboard") {
-  deps = [ ":starboard_headers_only" ]
+  public_deps = [ ":starboard_headers_only" ]
 
   if (sb_evergreen) {
-    deps += []
+    public_deps += []
   } else {
-    public_deps = [ "//$starboard_path:starboard_platform" ]
+    public_deps += [
+      "//$starboard_path:starboard_platform",
+      "//starboard/client_porting/cwrappers",
+      "//starboard/client_porting/eztime",
+      "//starboard/common",
+    ]
+
+    if (sb_evergreen_compatible) {
+      # TODO(andrewsavage): This should not be stubbed out
+      public_deps += [ "//third_party/crashpad/wrapper:wrapper_stub" ]
+    } else {
+      public_deps += [ "//third_party/crashpad/wrapper:wrapper_stub" ]
+    }
 
     if (final_executable_type == "shared_library" &&
         current_toolchain != default_toolchain) {
@@ -36,6 +77,10 @@
 }
 
 source_set("starboard_headers_only") {
+  # We include starboard/common/log.h in starboard_headers, but some common
+  # files include starboard headers.
+  check_includes = false
+
   sources = [
     "atomic.h",
     "audio_sink.h",
@@ -83,3 +128,18 @@
     # "<!@pymod_do_main(starboard.build.gyp_functions file_glob <(DEPTH)/starboard/private *.h)",
   ]
 }
+
+if (!has_platform_tests) {
+  # If 'starboard_platform_tests' is not defined by the platform, then an
+  # empty 'starboard_platform_tests' target is defined.
+  target(gtest_target_type, "starboard_platform_tests") {
+    testonly = true
+
+    sources = [ "//starboard/common/test_main.cc" ]
+
+    public_deps = [
+      ":starboard",
+      "//testing/gmock",
+    ]
+  }
+}
diff --git a/src/starboard/android/apk/build.id b/src/starboard/android/apk/build.id
index 9541871..da7d572 100644
--- a/src/starboard/android/apk/build.id
+++ b/src/starboard/android/apk/build.id
@@ -1 +1 @@
-301297
\ No newline at end of file
+301684
\ No newline at end of file
diff --git a/src/starboard/android/shared/starboard_platform.gypi b/src/starboard/android/shared/starboard_platform.gypi
index 43930fc..74bf44e 100644
--- a/src/starboard/android/shared/starboard_platform.gypi
+++ b/src/starboard/android/shared/starboard_platform.gypi
@@ -159,6 +159,7 @@
         'system_get_property.cc',
         'system_get_stack.cc',
         'system_has_capability.cc',
+        'system_network_is_disconnected.cc',
         'system_platform_error.cc',
         'system_request_stop.cc',
         'system_request_suspend.cc',
@@ -442,7 +443,6 @@
         '<(DEPTH)/starboard/shared/stub/system_get_total_gpu_memory.cc',
         '<(DEPTH)/starboard/shared/stub/system_get_used_gpu_memory.cc',
         '<(DEPTH)/starboard/shared/stub/system_hide_splash_screen.cc',
-        '<(DEPTH)/starboard/shared/stub/system_network_is_disconnected.cc',
         '<(DEPTH)/starboard/shared/stub/system_request_blur.cc',
         '<(DEPTH)/starboard/shared/stub/system_request_focus.cc',
         '<(DEPTH)/starboard/shared/stub/system_request_pause.cc',
diff --git a/src/starboard/android/shared/system_network_is_disconnected.cc b/src/starboard/android/shared/system_network_is_disconnected.cc
new file mode 100644
index 0000000..87c9a86
--- /dev/null
+++ b/src/starboard/android/shared/system_network_is_disconnected.cc
@@ -0,0 +1,33 @@
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "starboard/system.h"
+#include "starboard/android/shared/jni_env_ext.h"
+
+namespace starboard {
+namespace android {
+namespace shared {
+
+bool IsSystemNetworkConnected() {
+  JniEnvExt* env = JniEnvExt::Get();
+  jboolean j_is_connected = env->CallStarboardBooleanMethodOrAbort(
+      "isNetworkConnected", "()Z");
+  return j_is_connected;
+}
+
+}  // namespace shared
+}  // namespace android
+}  // namespace starboard
+
+bool SbSystemNetworkIsDisconnected() {
+  return !starboard::android::shared::IsSystemNetworkConnected();
+}
diff --git a/src/starboard/benchmark/BUILD.gn b/src/starboard/benchmark/BUILD.gn
new file mode 100644
index 0000000..ffe1e80
--- /dev/null
+++ b/src/starboard/benchmark/BUILD.gn
@@ -0,0 +1,29 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(final_executable_type, "benchmark") {
+  testonly = true
+  defines = [ "STARBOARD_IMPLEMENTATION" ]
+
+  sources = [
+    "//starboard/common/benchmark_main.cc",
+    "memory_benchmark.cc",
+    "thread_benchmark.cc",
+  ]
+
+  public_deps = [
+    "//starboard",
+    "//third_party/google_benchmark",
+  ]
+}
diff --git a/src/starboard/build/config/BUILD.gn b/src/starboard/build/config/BUILD.gn
index 0a63123..2b84450 100644
--- a/src/starboard/build/config/BUILD.gn
+++ b/src/starboard/build/config/BUILD.gn
@@ -12,8 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-config("base") {
+config("include_root") {
   include_dirs = [ "//" ]
+}
+
+config("base") {
   defines = []
 
   if (final_executable_type == "shared_library") {
@@ -53,23 +56,41 @@
 }
 
 config("host") {
-  if (host_os == "linux") {
-    ldflags = [ "-pthread" ]
-    cflags_cc = [ "--std=gnu++14" ]
+  if (current_toolchain == host_toolchain) {
+    if (host_os == "linux") {
+      ldflags = [ "-pthread" ]
+      cflags_cc = [ "--std=gnu++14" ]
 
-    if (target_cpu == "arm" || target_cpu == "x86") {
-      cflags = [ "-m32" ]
-      ldflags += [
-        "-target",
-        "i386-unknown-linux-gnu",
-        "-latomic",
-      ]
+      if (target_cpu == "arm" || target_cpu == "x86") {
+        cflags = [ "-m32" ]
+        ldflags += [
+          "-target",
+          "i386-unknown-linux-gnu",
+          "-latomic",
+        ]
+      }
     }
   }
 }
 
 config("target") {
-  cflags = []
+  if (current_toolchain != host_toolchain) {
+    if (final_executable_type == "shared_library") {
+      # Rewrite main() functions into StarboardMain. TODO: This is a
+      # hack, it would be better to be more surgical, here.
+      defines = [ "main=StarboardMain" ]
+    } else {
+      # To link into a shared library on Linux and similar platforms,
+      # the compiler must be told to generate Position Independent Code.
+      # This appears to cause errors when linking the code statically,
+      # however.
+      cflags = [ "-fPIC" ]
+    }
+
+    if (is_starboard) {
+      configs = [ ":starboard" ]
+    }
+  }
 }
 
 config("starboard") {
diff --git a/src/starboard/build/config/BUILDCONFIG.gn b/src/starboard/build/config/BUILDCONFIG.gn
index f9b09ee..a1f6de5 100644
--- a/src/starboard/build/config/BUILDCONFIG.gn
+++ b/src/starboard/build/config/BUILDCONFIG.gn
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import("//starboard/build/config/os_definitions.gni")
 import("//starboard/build/platforms.gni")
 
 declare_args() {
@@ -24,22 +25,59 @@
   is_starboard = true
 }
 
+if (target_os == "") {
+  target_os = host_os
+}
+if (target_cpu == "") {
+  target_cpu = host_cpu
+}
+if (current_os == "") {
+  current_os = target_os
+}
+if (current_cpu == "") {
+  current_cpu = target_cpu
+}
+
 is_debug = build_type == "debug"
 is_devel = build_type == "devel"
 is_qa = build_type == "qa"
 is_gold = build_type == "gold"
 assert(is_debug || is_devel || is_qa || is_gold)
 
+# Set some variables we never want to change
 sb_allows_memory_tracking = !is_gold
+host_byteorder = "little"
+is_official_build = false  # Chromium's build files expect this to be set.
 
 starboard_path = platforms["$target_platform"]
+import("//$starboard_path/platform_configuration/configuration.gni")
 
 host_toolchain = "//$starboard_path/toolchain:host"  # TODO(andrewsavage)
 shlib_extension = ".so"  # TODO(andrewsavage)
 set_default_toolchain(host_toolchain)
 
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+#
+# WHAT GOES HERE?
+#
+# Other than the main compiler and linker configs, the only reason for a config
+# to be in this list is if some targets need to explicitly override that config
+# by removing it. This is how targets opt-out of flags. If you don't have that
+# requirement and just need to add a config everywhere, reference it as a
+# sub-config of an existing one, most commonly the main "compiler" one.
+
 default_compiler_configs = [
   "//starboard/build/config:base",
+  "//starboard/build/config:host",
+  "//starboard/build/config:include_root",
+  "//starboard/build/config:target",
   "//$starboard_path/platform_configuration",
 ]
 
@@ -47,20 +85,56 @@
   default_compiler_configs += [ "//starboard/build/config:starboard" ]
 }
 
-import("//$starboard_path/platform_configuration/configuration.gni")
-
 set_defaults("static_library") {
   configs = default_compiler_configs
 }
 set_defaults("source_set") {
   configs = default_compiler_configs
 }
+set_defaults("loadable_module") {
+  configs = default_compiler_configs
+}
 set_defaults("executable") {
   configs = default_compiler_configs
 }
 set_defaults("shared_library") {
   configs = default_compiler_configs
 }
-set_defaults("loadable_module") {
-  configs = default_compiler_configs
+
+import("$install_target_path")
+template("executable") {
+  executable(target_name) {
+    forward_variables_from(invoker, "*")
+  }
+
+  executable_target_name = target_name
+  install_target(target_name + "_install") {
+    forward_variables_from(invoker,
+                           [
+                             "content",
+                             "testonly",
+                           ])
+    installable_target_name = executable_target_name
+    type = "executable"
+  }
 }
+
+template("shared_library") {
+  shared_library(target_name) {
+    forward_variables_from(invoker, "*")
+  }
+
+  shared_library_target_name = target_name
+  install_target(target_name + "_install") {
+    forward_variables_from(invoker,
+                           [
+                             "content",
+                             "testonly",
+                           ])
+    installable_target_name = shared_library_target_name
+    type = "shared_library"
+  }
+}
+
+# Import this down here as it relies on variables set during configuration.
+import("//starboard/build/config/components.gni")
diff --git a/src/starboard/build/config/base_configuration.gni b/src/starboard/build/config/base_configuration.gni
index 28feb25..eb54e90 100644
--- a/src/starboard/build/config/base_configuration.gni
+++ b/src/starboard/build/config/base_configuration.gni
@@ -89,4 +89,14 @@
 
   # Where the Starboard ABI file for this platform can be found.
   sabi_path = ""
+
+  # Whether the platform implements platforms tests.
+  has_platform_tests = false
+
+  # Whether the platform has platform-specific targets to depend on.
+  has_platform_targets = false
+
+  # The path to the gni file containing the install_target template which
+  # defines how the build should produce the install/ directory.
+  install_target_path = "//starboard/build/install/mock_install.gni"
 }
diff --git a/src/starboard/build/config/components.gni b/src/starboard/build/config/components.gni
new file mode 100644
index 0000000..d983e19
--- /dev/null
+++ b/src/starboard/build/config/components.gni
@@ -0,0 +1,66 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_starboard) {
+  is_component_build = false
+}
+
+# A helper for forwarding testonly and visibility.
+# Forwarding "*" does not include variables from outer scopes (to avoid copying
+# all globals into each template invocation), so it will not pick up
+# file-scoped or outer-template-scoped variables. Normally this behavior is
+# desired, but "visibility" and "testonly" are commonly defined in outer scopes.
+# Explicitly forwarding them in forward_variables_from() works around this
+# nuance. See //build/docs/writing_gn_templates.md#using-forward_variables_from
+TESTONLY_AND_VISIBILITY = [
+  "testonly",
+  "visibility",
+]
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# Defines a component, which equates to a shared_library when
+# is_component_build == true and a static_library otherwise.
+#
+# Use static libraries for the static build rather than source sets because
+# many of of our test binaries link many large dependencies but often don't
+# use large portions of them. The static libraries are much more efficient to
+# link in this situation since only the necessary object files are linked.
+#
+# The invoker can override the type of the target in the non-component-build
+# case by setting static_component_type to either "source_set" or
+# "static_library". If unset, the default will be used.
+template("component") {
+  if (is_component_build) {
+    _component_mode = "shared_library"
+  } else if (defined(invoker.static_component_type)) {
+    assert(invoker.static_component_type == "static_library" ||
+           invoker.static_component_type == "source_set")
+    _component_mode = invoker.static_component_type
+  } else if (!defined(invoker.sources) || invoker.sources == []) {
+    # When there are no sources defined, use a source set to avoid creating
+    # an empty static library (which generally don't work).
+    _component_mode = "source_set"
+  } else {
+    _component_mode = "static_library"
+  }
+  target(_component_mode, target_name) {
+    forward_variables_from(invoker, TESTONLY_AND_VISIBILITY)
+    forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY)
+  }
+}
+
+# Component defaults
+set_defaults("component") {
+  if (is_component_build) {
+    configs = default_shared_library_configs
+    if (is_android) {
+      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+    }
+  } else {
+    configs = default_compiler_configs
+  }
+}
diff --git a/src/starboard/build/config/migration_changes.md b/src/starboard/build/config/migration_changes.md
index e500b1a..24961c9 100644
--- a/src/starboard/build/config/migration_changes.md
+++ b/src/starboard/build/config/migration_changes.md
@@ -7,9 +7,13 @@
 sb_disable_microphone_idl
 starboard_path
 tizen_os
+includes_starboard
 
 ## Renamed:
 sb_deploy_output_dir -> sb_install_output_dir
 OS == starboard -> is_starboard
 
 ## Added:
+has_platform_tests
+has_platform_targets
+install_target_path
diff --git a/src/starboard/build/config/os_definitions.gni b/src/starboard/build/config/os_definitions.gni
new file mode 100644
index 0000000..6a5747b
--- /dev/null
+++ b/src/starboard/build/config/os_definitions.gni
@@ -0,0 +1,33 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Fuchsia and Windows).
+# - is_linux is true for desktop Linux, but not for ChromeOS nor Android (which
+#   is generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+is_android = current_os == "android"
+is_chromeos = current_os == "chromeos"
+is_fuchsia = current_os == "fuchsia"
+is_ios = current_os == "ios"
+is_linux = current_os == "linux"
+is_mac = current_os == "mac"
+is_nacl = current_os == "nacl"
+is_win = current_os == "win" || current_os == "winuwp"
+
+is_apple = is_ios || is_mac
+is_posix = !is_win && !is_fuchsia
diff --git a/src/starboard/build/gyp_runner.py b/src/starboard/build/gyp_runner.py
index dd8b834..3fa8ff7 100644
--- a/src/starboard/build/gyp_runner.py
+++ b/src/starboard/build/gyp_runner.py
@@ -227,7 +227,7 @@
 
         # TODO: Remove dependency on common.gypi by moving the required bits
         # into base_configuration.gypi.
-        os.path.join(paths.REPOSITORY_ROOT, 'build', 'common.gypi'),
+        os.path.join(paths.REPOSITORY_ROOT, 'build_gyp', 'common.gypi'),
     ]
     gyp_includes.extend(self.platform_configuration.GetIncludes())
     if self.app_configuration:
diff --git a/src/starboard/build/install/install_target.gni b/src/starboard/build/install/install_target.gni
new file mode 100644
index 0000000..aa7e715
--- /dev/null
+++ b/src/starboard/build/install/install_target.gni
@@ -0,0 +1,43 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+template("install_target") {
+  if (invoker.type == "executable") {
+    install_subdir = "bin"
+  } else if (invoker.type == "shared_library") {
+    install_subdir = "lib"
+  } else {
+    assert(false, "You can only install an executable or shared library.")
+  }
+
+  copy("copy_" + target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "installable_target_name",
+                             "testonly",
+                           ])
+    deps = [ ":$installable_target_name" ]
+
+    sources = [ "$root_out_dir/$installable_target_name" ]
+    outputs = [ "$root_out_dir/install/$install_subdir/{{source_file_part}}" ]
+  }
+
+  if (defined(invoker.content)) {
+    copy("copy_content_" + target_name) {
+      forward_variables_from(invoker, [ "testonly" ])
+      sources = invoker.content
+      outputs = [ "$root_out_dir/install/usr/share/cobalt/{{source_root_relative_dir}}/{{source_file_part}}" ]
+    }
+  }
+}
diff --git a/src/starboard/build/install/mock_install.gni b/src/starboard/build/install/mock_install.gni
new file mode 100644
index 0000000..2c37068
--- /dev/null
+++ b/src/starboard/build/install/mock_install.gni
@@ -0,0 +1,18 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+template("install_target") {
+  not_needed("*")
+  not_needed(invoker, "*")
+}
diff --git a/src/starboard/client_porting/cwrappers/BUILD.gn b/src/starboard/client_porting/cwrappers/BUILD.gn
new file mode 100644
index 0000000..a98f447
--- /dev/null
+++ b/src/starboard/client_porting/cwrappers/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source_set("cwrappers") {
+  sources = [ "pow_wrapper.cc" ]
+
+  public_deps = [ "//starboard/common" ]
+}
diff --git a/src/starboard/client_porting/eztime/BUILD.gn b/src/starboard/client_porting/eztime/BUILD.gn
new file mode 100644
index 0000000..912e10d
--- /dev/null
+++ b/src/starboard/client_porting/eztime/BUILD.gn
@@ -0,0 +1,42 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source_set("eztime") {
+  sources = [
+    "eztime.cc",
+    "eztime.h",
+  ]
+
+  public_deps = [
+    "//starboard:starboard_headers_only",
+    "//starboard/client_porting/icu_init",
+  ]
+}
+
+target(gtest_target_type, "eztime_test") {
+  testonly = true
+
+  sources = [
+    "//starboard/common/test_main.cc",
+    "eztime_test.cc",
+    "test_constants.h",
+  ]
+
+  deps = [
+    ":eztime",
+    "//starboard",
+    "//testing/gmock",
+    "//testing/gtest",
+  ]
+}
diff --git a/src/starboard/client_porting/icu_init/BUILD.gn b/src/starboard/client_porting/icu_init/BUILD.gn
new file mode 100644
index 0000000..ea44684
--- /dev/null
+++ b/src/starboard/client_porting/icu_init/BUILD.gn
@@ -0,0 +1,25 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source_set("icu_init") {
+  sources = [
+    "icu_init.cc",
+    "icu_init.h",
+  ]
+
+  public_deps = [
+    "//starboard/common",
+    "//third_party/icu",
+  ]
+}
diff --git a/src/starboard/client_porting/poem/BUILD.gn b/src/starboard/client_porting/poem/BUILD.gn
new file mode 100644
index 0000000..eb23fef
--- /dev/null
+++ b/src/starboard/client_porting/poem/BUILD.gn
@@ -0,0 +1,28 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(gtest_target_type, "poem_unittests") {
+  testonly = true
+
+  sources = [
+    "include_all.c",
+    "main.cc",
+    "string_poem_test.cc",
+  ]
+
+  deps = [
+    "//starboard",
+    "//testing/gtest",
+  ]
+}
diff --git a/src/starboard/client_porting/poem/string_poem.h b/src/starboard/client_porting/poem/string_poem.h
index 8e849c7..1d5733f 100644
--- a/src/starboard/client_porting/poem/string_poem.h
+++ b/src/starboard/client_porting/poem/string_poem.h
@@ -203,8 +203,6 @@
 #undef strcspn
 #define strcspn(s1, s2) PoemGetSpanUntilCharacter(s1, s2)
 
-#undef memchr
-#define memchr(s, c, n) SbMemoryFindByte(s, c, n)
 #undef memset
 #define memset(s, c, n) SbMemorySet(s, c, n)
 #undef memcpy
diff --git a/src/starboard/common/BUILD.gn b/src/starboard/common/BUILD.gn
new file mode 100644
index 0000000..bb3b724
--- /dev/null
+++ b/src/starboard/common/BUILD.gn
@@ -0,0 +1,74 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The "common" target contains facilities provided by Starboard that are common
+# to all platforms.
+
+source_set("common") {
+  public_deps = [ "//starboard:starboard_headers_only" ]
+  check_includes = false
+
+  sources = [
+    "//starboard/shared/media_session/playback_state.cc",
+    "//starboard/shared/media_session/playback_state.h",
+    "byte_swap.h",
+    "common.cc",
+    "condition_variable.cc",
+    "condition_variable.h",
+    "configuration_defaults.cc",
+    "configuration_defaults.h",
+    "experimental/concurrency_debug.cc",
+    "experimental/concurrency_debug.h",
+    "file.cc",
+    "file.h",
+    "flat_map.h",
+    "locked_ptr.h",
+    "log.cc",
+    "log.h",
+    "media.cc",
+    "media.h",
+    "move.h",
+    "murmurhash2.cc",
+    "murmurhash2.h",
+    "mutex.cc",
+    "mutex.h",
+    "new.cc",
+    "optional.cc",
+    "optional.h",
+    "queue.h",
+    "recursive_mutex.cc",
+    "recursive_mutex.h",
+    "ref_counted.cc",
+    "ref_counted.h",
+    "reset_and_return.h",
+    "rwlock.cc",
+    "rwlock.h",
+    "scoped_ptr.h",
+    "semaphore.cc",
+    "semaphore.h",
+    "socket.cc",
+    "socket.h",
+    "spin_lock.cc",
+    "spin_lock.h",
+    "state_machine.cc",
+    "state_machine.h",
+    "storage.cc",
+    "storage.h",
+    "string.h",
+    "thread.cc",
+    "thread.h",
+    "thread_collision_warner.cc",
+    "thread_collision_warner.h",
+  ]
+}
diff --git a/src/starboard/configuration.h b/src/starboard/configuration.h
index 7220c81..bbaa847 100644
--- a/src/starboard/configuration.h
+++ b/src/starboard/configuration.h
@@ -101,6 +101,11 @@
 
 // Rename misspelled accessibility event types
 #define SB_ACCESSIBILITY_EVENTS_RENAMED_VERSION SB_EXPERIMENTAL_API_VERSION
+
+// Introduce event for date / time configuration changes
+#define SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION \
+  SB_EXPERIMENTAL_API_VERSION
+
 // --- Release Candidate Feature Defines -------------------------------------
 
 // --- Common Detected Features ----------------------------------------------
diff --git a/src/starboard/doc/c99.md b/src/starboard/doc/c99.md
index ce99647..8bdf3ac 100644
--- a/src/starboard/doc/c99.md
+++ b/src/starboard/doc/c99.md
@@ -47,3 +47,9 @@
 * strtoul
 * strtoull
 * qsort
+### <string.h>
+* memchr
+* memcmp
+* memcpy
+* memmove
+* memset
diff --git a/src/starboard/egl_and_gles/BUILD.gn b/src/starboard/egl_and_gles/BUILD.gn
new file mode 100644
index 0000000..41dbb9b
--- /dev/null
+++ b/src/starboard/egl_and_gles/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The egl_and_gles GN target can be depended upon in order to
+# depend on a platform-specific implementation of EGL/GLES, whether that
+# ultimately ends up being Angle on Windows, system libraries on Linux, or a
+# custom implementation of EGL/GLES on an exotic platform. Depending on this
+# target will set dependent settings to have the EGL and GLES system headers in
+# their include directories.
+#
+# This decision is predicated on the value of the |gl_type| GN variable defined
+# in the configuration.gni for the current platform.
+
+group("egl_and_gles") {
+  public_deps = [ ":egl_and_gles_$gl_type" ]
+}
diff --git a/src/starboard/event.h b/src/starboard/event.h
index db449ad..1769547 100644
--- a/src/starboard/event.h
+++ b/src/starboard/event.h
@@ -447,6 +447,13 @@
   // to implement window.ononline DOM event.
   kSbEventTypeOsNetworkConnected,
 #endif  // SB_API_VERSION >= SB_EXPERIMENTAL_API_VERSION
+
+#if SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
+  // The platform has detected a date and/or time configuration change (such
+  // as a change in the timezone setting). This should trigger the application
+  // to re-query the relevant APIs to update the date and time.
+  kSbEventDateTimeConfigurationChanged,
+#endif  // SB_API_VERSION >= SB_EVENT_DATE_TIME_CONFIGURATION_CHANGED_VERSION
 } SbEventType;
 
 // Structure representing a Starboard event and its data.
diff --git a/src/starboard/evergreen/shared/gyp_configuration.gypi b/src/starboard/evergreen/shared/gyp_configuration.gypi
index da40422..67ebcf1 100644
--- a/src/starboard/evergreen/shared/gyp_configuration.gypi
+++ b/src/starboard/evergreen/shared/gyp_configuration.gypi
@@ -24,7 +24,7 @@
     'default_renderer_options_dependency': '<(DEPTH)/cobalt/renderer/default_options_starboard.gyp:default_options',
     'javascript_engine': 'v8',
 
-    'cobalt_font_package': 'minimal',
+    'cobalt_font_package': 'empty',
 
     # Override that omits the "data" subdirectory.
     # TODO: Remove when omitted for all platforms in base_configuration.gypi.
diff --git a/src/starboard/evergreen/shared/launcher.py b/src/starboard/evergreen/shared/launcher.py
index c8a2787..3915278 100644
--- a/src/starboard/evergreen/shared/launcher.py
+++ b/src/starboard/evergreen/shared/launcher.py
@@ -55,6 +55,7 @@
     env_variables['ASAN_OPTIONS'] = ':'.join(asan_options)
     kwargs['env_variables'] = env_variables
 
+    # pylint: disable=super-with-arguments
     super(Launcher, self).__init__(platform, target_name, config, device_id,
                                    **kwargs)
 
@@ -110,18 +111,28 @@
         output_file=self.output_file,
         out_directory=self.staging_directory,
         coverage_directory=self.coverage_directory,
-        env_variables=self.env_variables)
+        env_variables=self.env_variables,
+        log_targets=False)
 
   def Run(self):
     """Redirects to the ELF Loader platform's abstract loader implementation."""
 
     return_code = 1
 
+    logging.info('-' * 32)
+    logging.info('Starting to run target: %s', self.target_name)
+    logging.info('=' * 32)
+
     try:
+
       return_code = self.launcher.Run()
     except Exception:  # pylint: disable=broad-except
       logging.exception('Error occurred while running test.')
 
+    logging.info('-' * 32)
+    logging.info('Finished running target: %s', self.target_name)
+    logging.info('=' * 32)
+
     return return_code
 
   def Kill(self):
diff --git a/src/starboard/examples/glclear/BUILD.gn b/src/starboard/examples/glclear/BUILD.gn
new file mode 100644
index 0000000..20347b3
--- /dev/null
+++ b/src/starboard/examples/glclear/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(final_executable_type, "starboard_glclear_example") {
+  deps = [ "//starboard" ]
+  if (!sb_evergreen) {
+    deps += [ "//starboard/egl_and_gles" ]
+  }
+
+  sources = [ "main.cc" ]
+}
diff --git a/src/starboard/examples/window/BUILD.gn b/src/starboard/examples/window/BUILD.gn
new file mode 100644
index 0000000..ddd7b02
--- /dev/null
+++ b/src/starboard/examples/window/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(final_executable_type, "starboard_window_example") {
+  testonly = true
+
+  sources = [ "main.cc" ]
+  public_deps = [ "//starboard" ]
+}
diff --git a/src/starboard/linux/shared/starboard_platform.gypi b/src/starboard/linux/shared/starboard_platform.gypi
index 5f52d3d..edac2b8 100644
--- a/src/starboard/linux/shared/starboard_platform.gypi
+++ b/src/starboard/linux/shared/starboard_platform.gypi
@@ -406,7 +406,6 @@
           '<(DEPTH)/starboard/shared/stub/drm_generate_session_update_request.cc',
           '<(DEPTH)/starboard/shared/stub/drm_get_metrics.cc',
           '<(DEPTH)/starboard/shared/stub/drm_is_server_certificate_updatable.cc',
-          '<(DEPTH)/starboard/shared/stub/drm_system_internal.h',
           '<(DEPTH)/starboard/shared/stub/drm_update_server_certificate.cc',
           '<(DEPTH)/starboard/shared/stub/drm_update_session.cc',
         ],
diff --git a/src/starboard/nplb/BUILD.gn b/src/starboard/nplb/BUILD.gn
new file mode 100644
index 0000000..e85ab2f
--- /dev/null
+++ b/src/starboard/nplb/BUILD.gn
@@ -0,0 +1,338 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(gtest_target_type, "nplb") {
+  testonly = true
+
+  sources = [
+    "//starboard/common/test_main.cc",
+    "//starboard/nplb/sabi/alignment_test.cc",
+    "//starboard/nplb/sabi/endianness_test.cc",
+    "//starboard/nplb/sabi/signedness_and_size_of_enum_test.cc",
+    "//starboard/nplb/sabi/signedness_of_char_test.cc",
+    "//starboard/nplb/sabi/size_test.cc",
+    "//starboard/nplb/sabi/struct_alignment_test.cc",
+    "//starboard/testing/fake_graphics_context_provider.cc",
+    "//starboard/testing/fake_graphics_context_provider.h",
+    "accessibility_test.cc",
+    "align_test.cc",
+    "atomic_base_test.cc",
+    "atomic_test.cc",
+    "audio_sink_create_test.cc",
+    "audio_sink_destroy_test.cc",
+    "audio_sink_get_max_channels_test.cc",
+    "audio_sink_get_min_buffer_size_in_frames_test.cc",
+    "audio_sink_get_nearest_supported_sample_frequency_test.cc",
+    "audio_sink_helpers.cc",
+    "audio_sink_helpers.h",
+    "audio_sink_is_audio_frame_storage_type_supported_test.cc",
+    "audio_sink_is_audio_sample_type_supported_test.cc",
+    "audio_sink_test.cc",
+    "blitter_blit_rect_to_rect_test.cc",
+    "blitter_blit_rect_to_rect_tiled_test.cc",
+    "blitter_blit_rects_to_rects_test.cc",
+    "blitter_create_context_test.cc",
+    "blitter_create_default_device_test.cc",
+    "blitter_create_pixel_data_test.cc",
+    "blitter_create_render_target_surface_test.cc",
+    "blitter_create_surface_from_pixel_data_test.cc",
+    "blitter_create_swap_chain_from_window_test.cc",
+    "blitter_destroy_context_test.cc",
+    "blitter_destroy_device_test.cc",
+    "blitter_destroy_pixel_data_test.cc",
+    "blitter_destroy_surface_test.cc",
+    "blitter_destroy_swap_chain_test.cc",
+    "blitter_download_surface_pixels_test.cc",
+    "blitter_fill_rect_test.cc",
+    "blitter_flip_swap_chain_test.cc",
+    "blitter_flush_context_test.cc",
+    "blitter_get_max_contexts_test.cc",
+    "blitter_get_pixel_data_pitch_in_bytes_test.cc",
+    "blitter_get_pixel_data_pointer_test.cc",
+    "blitter_get_render_target_from_surface_test.cc",
+    "blitter_get_render_target_from_swap_chain_test.cc",
+    "blitter_get_surface_info_test.cc",
+    "blitter_helpers.cc",
+    "blitter_is_pixel_format_supported_by_download_surface_pixels_test.cc",
+    "blitter_is_pixel_format_supported_by_pixel_data_test.cc",
+    "blitter_is_pixel_format_supported_by_render_target_surface_test.cc",
+    "blitter_set_blending_test.cc",
+    "blitter_set_modulate_blits_with_color_test.cc",
+    "blitter_set_render_target_test.cc",
+    "blitter_set_scissor_test.cc",
+    "byte_swap_test.cc",
+    "char_is_signed_test.cc",
+    "character_is_alphanumeric_test.cc",
+    "character_is_digit_test.cc",
+    "character_is_hex_digit_test.cc",
+    "character_is_space_test.cc",
+    "character_is_upper_test.cc",
+    "character_to_lower_test.cc",
+    "character_to_upper_test.cc",
+    "condition_variable_broadcast_test.cc",
+    "condition_variable_create_test.cc",
+    "condition_variable_destroy_test.cc",
+    "condition_variable_signal_test.cc",
+    "condition_variable_wait_test.cc",
+    "condition_variable_wait_timed_test.cc",
+    "configuration_test.cc",
+    "cpu_features_get_test.cc",
+    "cryptography_create_transformer_test.cc",
+    "cryptography_helpers.cc",
+    "cryptography_helpers.h",
+    "cryptography_transform_gcm_test.cc",
+    "cryptography_transform_test.cc",
+    "directory_can_open_test.cc",
+    "directory_close_test.cc",
+    "directory_create_test.cc",
+    "directory_get_next_test.cc",
+    "directory_open_test.cc",
+    "double_absolute_test.cc",
+    "double_exponent_test.cc",
+    "double_floor_test.cc",
+    "double_is_finite_test.cc",
+    "double_is_nan_test.cc",
+    "drm_create_system_test.cc",
+    "drm_get_metrics_test.cc",
+    "drm_helpers.cc",
+    "drm_helpers.h",
+    "drm_is_server_certificate_updatable_test.cc",
+    "drm_update_server_certificate_test.cc",
+    "egl_test.cc",
+    "extern_c_test.cc",
+    "file_atomic_replace_test.cc",
+    "file_can_open_test.cc",
+    "file_close_test.cc",
+    "file_delete_recursive_test.cc",
+    "file_delete_test.cc",
+    "file_get_info_test.cc",
+    "file_get_path_info_test.cc",
+    "file_helpers.cc",
+    "file_mode_string_to_flags_test.cc",
+    "file_open_test.cc",
+    "file_read_test.cc",
+    "file_read_write_all_test.cc",
+    "file_seek_test.cc",
+    "file_truncate_test.cc",
+    "file_write_test.cc",
+    "flat_map_test.cc",
+    "gles_test.cc",
+    "image_test.cc",
+    "include_all.c",
+    "include_all_too.c",
+    "key_test.cc",
+    "log_flush_test.cc",
+    "log_format_test.cc",
+    "log_is_tty_test.cc",
+    "log_raw_dump_stack_test.cc",
+    "log_raw_test.cc",
+    "log_test.cc",
+
+    # TODO: Separate functions tested by media buffer test into multiple
+    # files.
+    "media_buffer_test.cc",
+    "media_can_play_mime_and_key_system_test.cc",
+    "media_configuration_test.cc",
+    "memory_align_to_page_size_test.cc",
+    "memory_allocate_aligned_test.cc",
+    "memory_allocate_test.cc",
+    "memory_compare_test.cc",
+    "memory_copy_test.cc",
+    "memory_deallocate_aligned_test.cc",
+    "memory_deallocate_test.cc",
+    "memory_find_byte_test.cc",
+    "memory_get_stack_bounds_test.cc",
+    "memory_is_zero_test.cc",
+    "memory_map_test.cc",
+    "memory_move_test.cc",
+    "memory_reallocate_test.cc",
+
+    # TODO(andrewsavage)
+    # "memory_reporter_test.cc",
+    "memory_set_test.cc",
+    "microphone_close_test.cc",
+    "microphone_create_test.cc",
+    "microphone_destroy_test.cc",
+    "microphone_get_available_test.cc",
+    "microphone_is_sample_rate_supported_test.cc",
+    "microphone_open_test.cc",
+    "microphone_read_test.cc",
+    "murmurhash2_test.cc",
+    "mutex_acquire_test.cc",
+    "mutex_acquire_try_test.cc",
+    "mutex_create_test.cc",
+    "mutex_destroy_test.cc",
+    "once_test.cc",
+    "optional_test.cc",
+    "player_create_test.cc",
+    "player_creation_param_helpers.cc",
+    "player_creation_param_helpers.h",
+    "player_get_preferred_output_mode_test.cc",
+    "player_output_mode_supported_test.cc",
+    "player_test_util.cc",
+    "player_test_util.h",
+    "player_write_sample_test.cc",
+    "random_helpers.cc",
+    "recursive_mutex_test.cc",
+    "rwlock_test.cc",
+    "semaphore_test.cc",
+    "socket_accept_test.cc",
+    "socket_bind_test.cc",
+    "socket_clear_last_error_test.cc",
+    "socket_connect_test.cc",
+    "socket_create_test.cc",
+    "socket_destroy_test.cc",
+    "socket_get_interface_address_test.cc",
+    "socket_get_last_error_test.cc",
+    "socket_get_local_address_test.cc",
+    "socket_helpers.cc",
+    "socket_is_connected_and_idle_test.cc",
+    "socket_is_connected_test.cc",
+    "socket_join_multicast_group_test.cc",
+    "socket_listen_test.cc",
+    "socket_receive_from_test.cc",
+    "socket_resolve_test.cc",
+    "socket_send_to_test.cc",
+    "socket_set_options_test.cc",
+    "socket_waiter_add_test.cc",
+    "socket_waiter_create_test.cc",
+    "socket_waiter_destroy_test.cc",
+    "socket_waiter_remove_test.cc",
+    "socket_waiter_wait_test.cc",
+    "socket_waiter_wait_timed_test.cc",
+    "socket_waiter_wake_up_test.cc",
+    "socket_wrapper_test.cc",
+    "speech_recognizer_cancel_test.cc",
+    "speech_recognizer_create_test.cc",
+    "speech_recognizer_destroy_test.cc",
+    "speech_recognizer_helper.h",
+    "speech_recognizer_start_test.cc",
+    "speech_recognizer_stop_test.cc",
+    "speech_synthesis_basic_test.cc",
+    "state_machine_test.cc",
+    "storage_close_record_test.cc",
+    "storage_delete_record_test.cc",
+    "storage_get_record_size_test.cc",
+    "storage_open_record_test.cc",
+    "storage_read_record_test.cc",
+    "storage_write_record_test.cc",
+    "string_compare_all_test.cc",
+    "string_compare_no_case_n_test.cc",
+    "string_compare_no_case_test.cc",
+    "string_compare_test.cc",
+    "string_compare_wide_test.cc",
+    "string_concat_test.cc",
+    "string_concat_wide_test.cc",
+    "string_copy_test.cc",
+    "string_copy_wide_test.cc",
+    "string_duplicate_test.cc",
+    "string_find_character_test.cc",
+    "string_find_last_character_test.cc",
+    "string_find_string_test.cc",
+    "string_format_test.cc",
+    "string_format_wide_test.cc",
+    "string_parse_double_test.cc",
+    "string_parse_signed_integer_test.cc",
+    "string_parse_uint64_test.cc",
+    "string_parse_unsigned_integer_test.cc",
+    "string_scan_test.cc",
+    "system_binary_search_test.cc",
+    "system_clear_last_error_test.cc",
+    "system_get_error_string_test.cc",
+    "system_get_extension_test.cc",
+    "system_get_last_error_test.cc",
+    "system_get_locale_id_test.cc",
+    "system_get_number_of_processors_test.cc",
+    "system_get_path_test.cc",
+    "system_get_property_test.cc",
+    "system_get_random_data_test.cc",
+    "system_get_random_uint64_test.cc",
+    "system_get_stack_test.cc",
+    "system_get_total_cpu_memory_test.cc",
+    "system_get_total_gpu_memory_test.cc",
+    "system_get_used_cpu_memory_test.cc",
+    "system_get_used_gpu_memory_test.cc",
+    "system_has_capability_test.cc",
+    "system_hide_splash_screen_test.cc",
+    "system_is_debugger_attached_test.cc",
+    "system_sign_with_certification_secret_key_test.cc",
+    "system_sort_test.cc",
+    "system_symbolize_test.cc",
+    "thread_create_test.cc",
+    "thread_detach_test.cc",
+    "thread_get_current_test.cc",
+    "thread_get_id_test.cc",
+    "thread_get_name_test.cc",
+    "thread_helpers.cc",
+    "thread_is_equal_test.cc",
+    "thread_join_test.cc",
+    "thread_local_value_test.cc",
+    "thread_sampler_test.cc",
+    "thread_set_name_test.cc",
+    "thread_sleep_test.cc",
+    "thread_test.cc",
+    "thread_yield_test.cc",
+    "time_get_monotonic_now_test.cc",
+    "time_get_now_test.cc",
+    "time_narrow_test.cc",
+    "time_zone_get_current_test.cc",
+    "time_zone_get_name_test.cc",
+    "ui_navigation_test.cc",
+    "undefined_behavior_test.cc",
+    "unsafe_math_test.cc",
+    "url_player_create_test.cc",
+    "user_get_current_test.cc",
+    "user_get_property_test.cc",
+    "user_get_signed_in_test.cc",
+    "window_create_test.cc",
+    "window_destroy_test.cc",
+    "window_get_diagonal_size_in_inches_test.cc",
+    "window_get_platform_handle_test.cc",
+    "window_get_size_test.cc",
+  ]
+
+  # TODO(andrewsavage): remove -fno-exceptions
+
+  # This allows the tests to include internal only header files.
+  defines = [ "STARBOARD_IMPLEMENTATION" ]
+
+  public_deps = [
+    ":nplb_file_tests_data",
+    "//starboard",
+    "//starboard/common",
+    "//starboard/shared/starboard/media:media_util",
+    "//starboard/shared/starboard/player:player_download_test_data",
+    "//starboard/shared/starboard/player:video_dmp",
+    "//testing/gmock",
+  ]
+
+  if (gl_type != "none") {
+    public_deps += [ "//starboard/egl_and_gles" ]
+  }
+}
+
+copy("nplb_file_tests_data") {
+  sources = [
+    "testdata/file_tests/dir_with_files/file11",
+    "testdata/file_tests/dir_with_files/file12",
+    "testdata/file_tests/dir_with_only_subdir/dir_with_files/file21",
+    "testdata/file_tests/dir_with_only_subdir/dir_with_files/file22",
+    "testdata/file_tests/file01",
+    "testdata/file_tests/file_with_long_name_and_contents_for_seek_testing_1234567890",
+  ]
+
+  subdir = "starboard/nplb/file_tests"
+
+  outputs = [ "$sb_static_contents_output_data_dir/test/$subdir/{{source_root_relative_dir}}/{{source_file_part}}" ]
+}
diff --git a/src/starboard/nplb/nplb_evergreen_compat_tests/BUILD.gn b/src/starboard/nplb/nplb_evergreen_compat_tests/BUILD.gn
new file mode 100644
index 0000000..f68fa6b
--- /dev/null
+++ b/src/starboard/nplb/nplb_evergreen_compat_tests/BUILD.gn
@@ -0,0 +1,38 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+target(gtest_target_type, "nplb_evergreen_compat_tests") {
+  testonly = true
+
+  sources = [
+    "//starboard/common/test_main.cc",
+    "checks.h",
+    "executable_memory_test.cc",
+    "fonts_test.cc",
+    "sabi_test.cc",
+  ]
+
+  if (!sb_evergreen_compatible_lite) {
+    sources += [
+      "icu_test.cc",
+      "max_file_name_test.cc",
+      "storage_test.cc",
+    ]
+  }
+
+  public_deps = [
+    "//starboard",
+    "//testing/gmock",
+  ]
+}
diff --git a/src/starboard/raspi/shared/gyp_configuration.py b/src/starboard/raspi/shared/gyp_configuration.py
index 5e073fa..cf514c3 100644
--- a/src/starboard/raspi/shared/gyp_configuration.py
+++ b/src/starboard/raspi/shared/gyp_configuration.py
@@ -156,11 +156,14 @@
 
   __FILTERED_TESTS = {  # pylint: disable=invalid-name
       'nplb': [
+          'SbAudioSinkTest.*',
           'SbDrmTest.AnySupportedKeySystems',
           'SbMediaCanPlayMimeAndKeySystem.AnySupportedKeySystems',
           'SbMediaCanPlayMimeAndKeySystem.KeySystemWithAttributes',
           'SbMediaCanPlayMimeAndKeySystem.MinimumSupport',
-          'SbUndefinedBehaviorTest.CallThisPointerIsNullRainyDay'
+          'SbMediaSetAudioWriteDurationTests/*',
+          'SbPlayerWriteSampleTests*',
+          'SbUndefinedBehaviorTest.CallThisPointerIsNullRainyDay',
       ],
       'player_filter_tests': [
           # The implementations for the raspberry pi (0 and 2) are incomplete
diff --git a/src/starboard/raspi/shared/launcher.py b/src/starboard/raspi/shared/launcher.py
index fe8eb9a..14e11e1 100644
--- a/src/starboard/raspi/shared/launcher.py
+++ b/src/starboard/raspi/shared/launcher.py
@@ -41,6 +41,15 @@
   sys.exit(signum)
 
 
+# First call returns True, otherwise return false.
+def FirstRun():
+  v = globals()
+  if not v.has_key('first_run'):
+    v['first_run'] = False
+    return True
+  return False
+
+
 class Launcher(abstract_launcher.AbstractLauncher):
   """Class for launching Cobalt/tools on Raspi."""
 
@@ -49,6 +58,8 @@
   _RASPI_USERNAME = 'pi'
   _RASPI_PASSWORD = 'raspberry'
   _SSH_LOGIN_SIGNAL = 'cobalt-launcher-login-success'
+  _SSH_SLEEP_SIGNAL = 'cobalt-launcher-done-sleeping'
+  _RASPI_PROMPT = 'pi@raspberrypi:'
 
   # pexpect times out each second to allow Kill to quickly stop a test run
   _PEXPECT_TIMEOUT = 1
@@ -57,11 +68,14 @@
   _PEXPECT_PASSWORD_TIMEOUT_MAX_RETRIES = 30
   # Wait up to 900 seconds for new output from the raspi
   _PEXPECT_READLINE_TIMEOUT_MAX_RETRIES = 900
+  # Delay between subsequent SSH commands
+  _INTER_COMMAND_DELAY_SECONDS = 0.5
 
   # This is used to strip ansi color codes from pexpect output.
   _PEXPECT_SANITIZE_LINE_RE = re.compile(r'\x1b[^m]*m')
 
   def __init__(self, platform, target_name, config, device_id, **kwargs):
+    # pylint: disable=super-with-arguments
     super(Launcher, self).__init__(platform, target_name, config, device_id,
                                    **kwargs)
     env = os.environ.copy()
@@ -85,6 +99,8 @@
 
     self.shutdown_initiated = threading.Event()
 
+    self.log_targets = kwargs.get('log_targets', True)
+
     signal.signal(signal.SIGINT, functools.partial(_SigIntOrSigTermHandler))
     signal.signal(signal.SIGTERM, functools.partial(_SigIntOrSigTermHandler))
 
@@ -109,10 +125,11 @@
     options = '-avzLh'
     source = test_dir + '/'
     destination = '{}:~/{}/'.format(raspi_user_hostname, raspi_test_dir)
-    self.rsync_command = 'rsync ' + options + ' ' + source + ' ' + destination
+    self.rsync_command = 'rsync ' + options + ' ' + source + ' ' + \
+        destination + ';sync'
 
     # ssh command setup
-    self.ssh_command = 'ssh ' + raspi_user_hostname
+    self.ssh_command = 'ssh -t ' + raspi_user_hostname + ' TERM=dumb bash -l'
 
     # escape command line metacharacters in the flags
     flags = ' '.join(self.target_command_line_params)
@@ -146,6 +163,8 @@
     logging.info('executing: %s', command)
     self.pexpect_process = pexpect.spawn(
         command, timeout=Launcher._PEXPECT_TIMEOUT)
+    # Let pexpect output directly to our output stream
+    self.pexpect_process.logfile_read = self.output_file
     retry_count = 0
     expected_prompts = [
         r'.*Are\syou\ssure.*',  # Fingerprint verification
@@ -174,8 +193,7 @@
         # Check if the max retry count has been exceeded. If it has, then
         # re-raise the timeout exception.
         if retry_count > Launcher._PEXPECT_PASSWORD_TIMEOUT_MAX_RETRIES:
-          exc_info = sys.exc_info()
-          raise exc_info[0], exc_info[1], exc_info[2]
+          raise
 
   def _PexpectReadLines(self):
     """Reads all lines from the pexpect process."""
@@ -186,10 +204,9 @@
         # Sanitize the line to remove ansi color codes.
         line = Launcher._PEXPECT_SANITIZE_LINE_RE.sub(
             '', self.pexpect_process.readline())
+        self.output_file.flush()
         if not line:
           break
-        self.output_file.write(line)
-        self.output_file.flush()
         # Check for the test complete tag. It will be followed by either a
         # success or failure tag.
         if line.startswith(self.test_complete_tag):
@@ -206,18 +223,49 @@
         # Check if the max retry count has been exceeded. If it has, then
         # re-raise the timeout exception.
         if retry_count > Launcher._PEXPECT_READLINE_TIMEOUT_MAX_RETRIES:
-          exc_info = sys.exc_info()
-          raise exc_info[0], exc_info[1], exc_info[2]
+          raise
+
+  def _Sleep(self, val):
+    self.pexpect_process.sendline('sleep {};echo {}'.format(
+        val, Launcher._SSH_SLEEP_SIGNAL))
+    self.pexpect_process.expect([Launcher._SSH_SLEEP_SIGNAL])
 
   def _CleanupPexpectProcess(self):
     """Closes current pexpect process."""
 
     if self.pexpect_process is not None and self.pexpect_process.isalive():
+      # Check if kernel logged OOM kill or any other system failure message
+      if self.return_value:
+        logging.info('Sending dmesg')
+        self.pexpect_process.sendline('dmesg -P --color=never | tail -n 100')
+        time.sleep(3)
+        try:
+          self.pexpect_process.readlines()
+        except pexpect.TIMEOUT:
+          pass
+        logging.info('Done sending dmesg')
+
       # Send ctrl-c to the raspi and close the process.
       self.pexpect_process.sendline(chr(3))
-      self._KillExistingCobaltProcesses()
+      time.sleep(1)  # Allow a second for normal shutdown
       self.pexpect_process.close()
 
+  def _WaitForPrompt(self):
+    """Sends empty commands, until a bash prompt is returned"""
+    retry_count = 5
+    while True:
+      try:
+        self.pexpect_process.expect(self._RASPI_PROMPT)
+        break
+      except pexpect.TIMEOUT:
+        if self.shutdown_initiated.is_set():
+          return
+        retry_count -= 1
+        if not retry_count:
+          raise
+        self.pexpect_process.sendline('echo ' + Launcher._SSH_SLEEP_SIGNAL)
+        time.sleep(self._INTER_COMMAND_DELAY_SECONDS)
+
   def _KillExistingCobaltProcesses(self):
     """If there are leftover Cobalt processes, kill them.
 
@@ -225,16 +273,19 @@
     Zombie Cobalt instances can block the WebDriver port or
     cause other problems.
     """
-    self.pexpect_process.sendline('pkill -9 -f "(cobalt)|(crashpad_handler)"')
+    logging.info('Killing existing processes')
+    self.pexpect_process.sendline(
+        'pkill -9 -ef "(cobalt)|(crashpad_handler)|(elf_loader)"')
+    self._WaitForPrompt()
     # Print the return code of pkill. 0 if a process was halted
-    self.pexpect_process.sendline('echo $?')
-    i = self.pexpect_process.expect([r'0', r'.*'])
-    if i == '0':
-      logging.warning(
-          'Forced to pkill existing instance(s) of cobalt. '
-          'Pausing to ensure no further operations are run '
-          'before processes shut down.')
+    self.pexpect_process.sendline('echo PROCKILL:${?}')
+    i = self.pexpect_process.expect([r'PROCKILL:0', r'PROCKILL:(\d+)'])
+    if i == 0:
+      logging.warning('Forced to pkill existing instance(s) of cobalt. '
+                      'Pausing to ensure no further operations are run '
+                      'before processes shut down.')
       time.sleep(10)
+    logging.info('Done killing existing processes')
 
   def Run(self):
     """Runs launcher's executable on the target raspi.
@@ -243,6 +294,11 @@
        Whether or not the run finished successfully.
     """
 
+    if self.log_targets:
+      logging.info('-' * 32)
+      logging.info('Starting to run target: %s', self.target_name)
+      logging.info('=' * 32)
+
     self.return_value = 1
 
     try:
@@ -258,8 +314,21 @@
       # ssh into the raspi and run the test
       if not self.shutdown_initiated.is_set():
         self._PexpectSpawnAndConnect(self.ssh_command)
-      if not self.shutdown_initiated.is_set():
+        self._Sleep(self._INTER_COMMAND_DELAY_SECONDS)
+      # Execute debugging commands on the first run
+      if FirstRun():
+        for cmd in ['free -mh', 'ps -ux', 'df -h']:
+          if not self.shutdown_initiated.is_set():
+            self.pexpect_process.sendline(cmd)
+            line = self.pexpect_process.readline()
+            self.output_file.write(line)
+        self._WaitForPrompt()
+        self.output_file.flush()
+        self._Sleep(self._INTER_COMMAND_DELAY_SECONDS)
         self._KillExistingCobaltProcesses()
+        self._Sleep(self._INTER_COMMAND_DELAY_SECONDS)
+
+      if not self.shutdown_initiated.is_set():
         self.pexpect_process.sendline(self.test_command)
         self._PexpectReadLines()
 
@@ -275,6 +344,11 @@
       # Notify other threads that the run is no longer active
       self.run_inactive.set()
 
+    if self.log_targets:
+      logging.info('-' * 32)
+      logging.info('Finished running target: %s', self.target_name)
+      logging.info('=' * 32)
+
     return self.return_value
 
   def Kill(self):
diff --git a/src/starboard/raspi/shared/starboard_platform.gypi b/src/starboard/raspi/shared/starboard_platform.gypi
index 26903cf..5fc6388 100644
--- a/src/starboard/raspi/shared/starboard_platform.gypi
+++ b/src/starboard/raspi/shared/starboard_platform.gypi
@@ -357,7 +357,6 @@
         '<(DEPTH)/starboard/shared/stub/drm_generate_session_update_request.cc',
         '<(DEPTH)/starboard/shared/stub/drm_get_metrics.cc',
         '<(DEPTH)/starboard/shared/stub/drm_is_server_certificate_updatable.cc',
-        '<(DEPTH)/starboard/shared/stub/drm_system_internal.h',
         '<(DEPTH)/starboard/shared/stub/drm_update_server_certificate.cc',
         '<(DEPTH)/starboard/shared/stub/drm_update_session.cc',
         '<(DEPTH)/starboard/shared/stub/media_is_supported.cc',
diff --git a/src/starboard/shared/starboard/media/BUILD.gn b/src/starboard/shared/starboard/media/BUILD.gn
new file mode 100644
index 0000000..1ad8f29
--- /dev/null
+++ b/src/starboard/shared/starboard/media/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+static_library("media_util") {
+  sources = [
+    "//starboard/shared/starboard/media/avc_util.cc",
+    "//starboard/shared/starboard/media/avc_util.h",
+    "//starboard/shared/starboard/media/codec_util.cc",
+    "//starboard/shared/starboard/media/codec_util.h",
+    "//starboard/shared/starboard/media/media_util.cc",
+    "//starboard/shared/starboard/media/media_util.h",
+    "//starboard/shared/starboard/media/video_capabilities.cc",
+    "//starboard/shared/starboard/media/video_capabilities.h",
+    "//starboard/shared/starboard/media/vp9_util.cc",
+    "//starboard/shared/starboard/media/vp9_util.h",
+  ]
+
+  defines = [
+    # This allows the tests to include internal only header files.
+    "STARBOARD_IMPLEMENTATION",
+  ]
+
+  public_deps = [ "//starboard/common" ]
+}
diff --git a/src/starboard/shared/starboard/player/BUILD.gn b/src/starboard/shared/starboard/player/BUILD.gn
new file mode 100644
index 0000000..7463843
--- /dev/null
+++ b/src/starboard/shared/starboard/player/BUILD.gn
@@ -0,0 +1,72 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//starboard/shared/starboard/player/testdata/sha_files.gni")
+
+static_library("video_dmp") {
+  sources = [
+    "//starboard/shared/starboard/player/file_cache_reader.cc",
+    "//starboard/shared/starboard/player/file_cache_reader.h",
+    "//starboard/shared/starboard/player/video_dmp_common.cc",
+    "//starboard/shared/starboard/player/video_dmp_common.h",
+    "//starboard/shared/starboard/player/video_dmp_reader.cc",
+    "//starboard/shared/starboard/player/video_dmp_reader.h",
+  ]
+
+  public_deps = [ "//starboard/common" ]
+
+  defines = [
+    # This allows the tests to include internal only header files.
+    "STARBOARD_IMPLEMENTATION",
+  ]
+}
+
+action("player_download_test_data") {
+  script = "//tools/download_from_gcs.py"
+
+  sha_sources = []
+  foreach(sha_file, sha1_files) {
+    sha_sources += [ string_join("/",
+                                 [
+                                   "testdata",
+                                   sha_file,
+                                 ]) ]
+  }
+
+  sha_outputs = []
+  subdir = "starboard/shared/starboard/player/testdata"
+  outdir = "$sb_static_contents_output_data_dir/test/$subdir"
+  foreach(sha_source, sha_sources) {
+    sha_outputs +=
+        [ string_join("/",
+                      [
+                        outdir,
+                        string_replace(sha_source, ".dmp.sha1", ".dmp"),
+                      ]) ]
+  }
+
+  sources = sha_sources
+  outputs = sha_outputs
+
+  sha1_dir = rebase_path("testdata", root_build_dir)
+
+  args = [
+    "--bucket",
+    "cobalt-static-storage",
+    "--sha1",
+    sha1_dir,
+    "--output",
+    outdir,
+  ]
+}
diff --git a/src/starboard/shared/starboard/player/testdata/sha_files.gni b/src/starboard/shared/starboard/player/testdata/sha_files.gni
new file mode 100644
index 0000000..66f7cc0
--- /dev/null
+++ b/src/starboard/shared/starboard/player/testdata/sha_files.gni
@@ -0,0 +1,29 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+sha1_files = [
+  "beneath_the_canopy_137_avc.dmp.sha1",
+  "beneath_the_canopy_248_vp9.dmp.sha1",
+  "beneath_the_canopy_aac_5_1.dmp.sha1",
+  "beneath_the_canopy_aac_mono.dmp.sha1",
+  "beneath_the_canopy_aac_stereo.dmp.sha1",
+  "beneath_the_canopy_opus_5_1.dmp.sha1",
+  "beneath_the_canopy_opus_mono.dmp.sha1",
+  "beneath_the_canopy_opus_stereo.dmp.sha1",
+  "black_test_avc_1080p_30to60_fps.dmp.sha1",
+  "heaac.dmp.sha1",
+  "sintel_329_ec3.dmp.sha1",
+  "sintel_381_ac3.dmp.sha1",
+  "sintel_399_av1.dmp.sha1",
+]
diff --git a/src/starboard/stub/BUILD.gn b/src/starboard/stub/BUILD.gn
index fd2f904..fc139ae 100644
--- a/src/starboard/stub/BUILD.gn
+++ b/src/starboard/stub/BUILD.gn
@@ -1,3 +1,17 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 static_library("starboard_platform") {
   deps = [ ":stub_sources" ]
 
@@ -10,6 +24,8 @@
     "configuration_constants.cc",
     "font.cc",
     "font.h",
+    "javascript_cache.cc",
+    "javascript_cache.h",
     "main.cc",
     "system_get_extensions.cc",
     "thread_types_public.h",
@@ -22,7 +38,10 @@
 
 source_set("stub_sources") {
   public_configs = [ ":starboard_implementation" ]
-  public_deps = [ "//starboard:starboard_headers_only" ]
+  public_deps = [
+    "//starboard:starboard_headers_only",
+    "//starboard/common",
+  ]
 
   sources = [
     "//starboard/shared/starboard/application.cc",
diff --git a/src/starboard/stub/platform_configuration/configuration.gni b/src/starboard/stub/platform_configuration/configuration.gni
index f40fe4c..61a778f 100644
--- a/src/starboard/stub/platform_configuration/configuration.gni
+++ b/src/starboard/stub/platform_configuration/configuration.gni
@@ -21,3 +21,5 @@
 gl_type = "none"
 
 sabi_path = "//starboard/sabi/x64/sysv/sabi-v$sb_api_version.json"
+
+install_target_path = "//starboard/build/install/install_target.gni"
diff --git a/src/starboard/tools/build.py b/src/starboard/tools/build.py
index a788cb2..08923fe 100644
--- a/src/starboard/tools/build.py
+++ b/src/starboard/tools/build.py
@@ -29,9 +29,6 @@
 
 _STARBOARD_TOOLCHAINS_DIR_KEY = 'STARBOARD_TOOLCHAINS_DIR'
 _STARBOARD_TOOLCHAINS_DIR_NAME = 'starboard-toolchains'
-# TODO: Remove COBALT versions, eventually.
-_COBALT_TOOLCHAINS_DIR_KEY = 'COBALT_TOOLCHAINS_DIR'
-_COBALT_TOOLCHAINS_DIR_NAME = 'cobalt-toolchains'
 
 # TODO: Rectify consistency of "Build Type" / "Build Config" naming.
 _BUILD_CONFIG_KEY = 'BUILD_TYPE'
@@ -142,13 +139,6 @@
                 os.path.join(home_dir, _STARBOARD_TOOLCHAINS_DIR_NAME)))
 
   if not os.path.isdir(toolchains_dir):
-    # TODO: Remove backup, eventually.
-    backup_toolchains_dir = os.path.realpath(
-        os.getenv(_COBALT_TOOLCHAINS_DIR_KEY,
-                  os.path.join(home_dir, _COBALT_TOOLCHAINS_DIR_NAME)))
-    if os.path.isdir(backup_toolchains_dir):
-      return backup_toolchains_dir
-
     # Ensure the toolchains directory exists.
     os.mkdir(toolchains_dir)
 
diff --git a/src/starboard/tools/testing/test_runner.py b/src/starboard/tools/testing/test_runner.py
index 540aa6d..1ae0133 100755
--- a/src/starboard/tools/testing/test_runner.py
+++ b/src/starboard/tools/testing/test_runner.py
@@ -417,6 +417,9 @@
                    xml_output_path)
       test_params.append("--gtest_output=xml:%s" % (xml_output_path))
 
+    # Turn off color codes from output to make it easy to parse
+    test_params.append("--gtest_color=no")
+
     test_params.extend(self.target_params)
     if self.dry_run:
       test_params.extend(["--gtest_list_tests"])
diff --git a/src/testing/gtest/BUILD.gn b/src/testing/gtest/BUILD.gn
index 0324adf..3d38232 100644
--- a/src/testing/gtest/BUILD.gn
+++ b/src/testing/gtest/BUILD.gn
@@ -31,6 +31,9 @@
 static_library("gtest") {
   testonly = true
   sources = [
+    "../multiprocess_func_list.cc",
+    "../multiprocess_func_list.h",
+    "../platform_test.h",
     "include/gtest/gtest-death-test.h",
     "include/gtest/gtest-message.h",
     "include/gtest/gtest-param-test.h",
@@ -58,9 +61,6 @@
     "src/gtest-test-part.cc",
     "src/gtest-typed-test.cc",
     "src/gtest.cc",
-    "../multiprocess_func_list.cc",
-    "../multiprocess_func_list.h",
-    "../platform_test.h",
   ]
 
   include_dirs = [ "." ]
@@ -74,5 +74,7 @@
   # It happens to be that all_dependent_configs get processed after default
   # configs, so the -frtti flag comes after the -fno-rtti flag on the command
   # line, so it happens to work in this case.
-  all_dependent_configs = [ "//starboard/build/config:rtti" ]
+  all_dependent_configs = [ "//build/config/compiler:rtti" ]
+
+  public_deps = [ "//starboard" ]
 }
diff --git a/src/third_party/blink/Source/bindings/scripts/utilities.py b/src/third_party/blink/Source/bindings/scripts/utilities.py
index ed8e5e9..bfcebe2 100644
--- a/src/third_party/blink/Source/bindings/scripts/utilities.py
+++ b/src/third_party/blink/Source/bindings/scripts/utilities.py
@@ -34,7 +34,8 @@
 # List of regular expressions finding tokens that would appear in a name that
 # was converted from snake_case to TitleCase, but by convention should be in
 # ALL CAPS i.e. html_html_element -> HtmlHtmlElement -> HTMLHtmlElement
-special_token_list = ['3d', 'Br', 'Cdata', 'Css', 'Dom', '^Html', 'Idl?', 'Ui(?!nt)', 'Url', 'Xml']
+special_token_list = ['3d', 'Br(?=E)', 'Cdata', 'Css', 'Dom', '^Html', 'Json',
+                      'Idl?', 'Ua', 'Ui(?!nt)', 'Url', 'Xml']
 
 
 # Regular expression to capture all of the special tokens.
diff --git a/src/third_party/boringssl/src/config/starboard/openssl/opensslconf.h b/src/third_party/boringssl/src/config/starboard/openssl/opensslconf.h
index 17e638c..3176038 100644
--- a/src/third_party/boringssl/src/config/starboard/openssl/opensslconf.h
+++ b/src/third_party/boringssl/src/config/starboard/openssl/opensslconf.h
@@ -233,13 +233,11 @@
 // Definitions for system calls that may need to be overridden.
 #define OPENSSL_port_abort SbSystemBreakIntoDebugger
 #define OPENSSL_port_assert(x) SB_DCHECK(x)
-#define OPENSSL_port_atoi SbStringAToI
 #define OPENSSL_port_free SbMemoryDeallocate
 #define OPENSSL_port_getenv(x) NULL
 #define OPENSSL_port_gettimeofday EzTimeValueGetNow
 #define OPENSSL_port_gmtime_r EzTimeTExplodeUTC
 #define OPENSSL_port_malloc SbMemoryAllocate
-#define OPENSSL_port_memchr (unsigned char *)SbMemoryFindByte
 #define OPENSSL_port_memcmp SbMemoryCompare
 #define OPENSSL_port_memcpy SbMemoryCopy
 #define OPENSSL_port_memmove SbMemoryMove
diff --git a/src/third_party/boringssl/src/crypto/mem_starboard.c b/src/third_party/boringssl/src/crypto/mem_starboard.c
index 78c2f12..57da207 100644
--- a/src/third_party/boringssl/src/crypto/mem_starboard.c
+++ b/src/third_party/boringssl/src/crypto/mem_starboard.c
@@ -11,6 +11,8 @@
 
 #include <openssl/mem.h>
 
+#include <string.h>
+
 #if defined(OPENSSL_WINDOWS)
 OPENSSL_MSVC_PRAGMA(warning(push, 3))
 #include <windows.h>
@@ -101,7 +103,7 @@
     return NULL;
   }
 
-  return SbMemoryFindByte(s, c, n);
+  return memchr(s, c, n);
 }
 
 int OPENSSL_memcmp(const void *s1, const void *s2, size_t n) {
diff --git a/src/third_party/crashpad/wrapper/BUILD.gn b/src/third_party/crashpad/wrapper/BUILD.gn
new file mode 100644
index 0000000..341319c
--- /dev/null
+++ b/src/third_party/crashpad/wrapper/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The common "starboard" target. Any target that depends on Starboard should
+# depend on this common target, and not any of the specific "starboard_platform"
+# targets.
+
+static_library("wrapper_stub") {
+  sources = [
+    "wrapper_stub.cc",
+    "wrapper.h",
+  ]
+}
\ No newline at end of file
diff --git a/src/third_party/freetype2/include/freetype/config/ftstdlib.h b/src/third_party/freetype2/include/freetype/config/ftstdlib.h
index c766ef0..6cbbcca 100644
--- a/src/third_party/freetype2/include/freetype/config/ftstdlib.h
+++ b/src/third_party/freetype2/include/freetype/config/ftstdlib.h
@@ -74,10 +74,12 @@
    *
    */
 #include <string.h>
+
+#define ft_memchr memchr
+
 #if defined(STARBOARD)
 #include "starboard/string.h"
 #include "starboard/memory.h"
-#define ft_memchr   SbMemoryFindByte
 #define ft_memcmp   SbMemoryCompare
 #define ft_memcpy   SbMemoryCopy
 #define ft_memmove  SbMemoryMove
@@ -90,7 +92,6 @@
 #define ft_strncpy  SbStringCopy
 #define ft_strrchr  SbStringFindLastCharacter
 #else
-#define ft_memchr   memchr
 #define ft_memcmp   memcmp
 #define ft_memcpy   memcpy
 #define ft_memmove  memmove
diff --git a/src/third_party/google_benchmark/BUILD.gn b/src/third_party/google_benchmark/BUILD.gn
new file mode 100644
index 0000000..c274b73
--- /dev/null
+++ b/src/third_party/google_benchmark/BUILD.gn
@@ -0,0 +1,42 @@
+# Copyright 2021 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+static_library("google_benchmark") {
+  testonly = true
+
+  sources = [
+    "src/benchmark.cc",
+    "src/benchmark_api_internal.cc",
+    "src/benchmark_name.cc",
+    "src/benchmark_register.cc",
+    "src/benchmark_runner.cc",
+    "src/colorprint_starboard.cc",
+    "src/commandlineflags.cc",
+    "src/complexity.cc",
+    "src/console_reporter.cc",
+    "src/counter.cc",
+    "src/csv_reporter.cc",
+    "src/json_reporter.cc",
+    "src/reporter.cc",
+    "src/sleep.cc",
+    "src/statistics.cc",
+    "src/string_util.cc",
+    "src/sysinfo.cc",
+    "src/timers.cc",
+  ]
+
+  include_dirs = [ "include" ]
+
+  public_deps = [ "//starboard/common" ]
+}
diff --git a/src/third_party/harfbuzz-ng/harfbuzz.gyp b/src/third_party/harfbuzz-ng/harfbuzz.gyp
index 68d9a6d..1e2a29a 100644
--- a/src/third_party/harfbuzz-ng/harfbuzz.gyp
+++ b/src/third_party/harfbuzz-ng/harfbuzz.gyp
@@ -7,7 +7,7 @@
     'use_system_harfbuzz%': 0,
   },
   'includes': [
-    '../../build/win_precompile.gypi',
+    '../../build_gyp/win_precompile.gypi',
   ],
   'conditions': [
     ['use_system_harfbuzz==0', {
diff --git a/src/third_party/icu/BUILD.gn b/src/third_party/icu/BUILD.gn
index 88dac45..1d15fb4 100644
--- a/src/third_party/icu/BUILD.gn
+++ b/src/third_party/icu/BUILD.gn
@@ -240,6 +240,10 @@
 
     defines = [ "U_I18N_IMPLEMENTATION" ]
     deps = icuuc_deps
+
+    if (is_starboard) {
+      check_includes = false
+    }
   }
 }
 
@@ -285,6 +289,14 @@
     }
 
     defines += [ "U_ICUDATAENTRY_IN_COMMON" ]
+
+    if (is_starboard) {
+      public_deps = [ "//starboard:starboard_headers_only" ]
+      defines += [
+        "U_HAVE_NL_LANGINFO_CODESET=0",
+        "U_HAVE_NL_LANGINFO=0"
+      ]
+    }
   }
 }
 
diff --git a/src/third_party/icu/source/i18n/currunit.cpp b/src/third_party/icu/source/i18n/currunit.cpp
index 92bcf12..ec14738 100644
--- a/src/third_party/icu/source/i18n/currunit.cpp
+++ b/src/third_party/icu/source/i18n/currunit.cpp
@@ -10,6 +10,8 @@
 * Since: ICU 3.0
 **********************************************************************
 */
+#include <string.h>
+
 #include "unicode/utypes.h"
 
 #if !UCONFIG_NO_FORMATTING
diff --git a/src/third_party/mini_chromium/base/BUILD.gn b/src/third_party/mini_chromium/base/BUILD.gn
index b1364ba..11fdf46 100644
--- a/src/third_party/mini_chromium/base/BUILD.gn
+++ b/src/third_party/mini_chromium/base/BUILD.gn
@@ -6,6 +6,9 @@
 
 config("base_public_config") {
   include_dirs = [ ".." ]
+  if (is_starboard) {
+    cflags = [ "-isystem" + rebase_path("../../..", root_build_dir) ]
+  }
 }
 
 static_library("base") {
@@ -161,4 +164,7 @@
   }
 
   public_configs = [ ":base_public_config" ]
+  if (is_starboard) {
+    configs -= [ "//starboard/build/config:include_root" ]
+  }
 }
diff --git a/src/third_party/ots/src/post.cc b/src/third_party/ots/src/post.cc
index 1a5b95c..b947d04 100644
--- a/src/third_party/ots/src/post.cc
+++ b/src/third_party/ots/src/post.cc
@@ -4,15 +4,9 @@
 
 #include "post.h"
 
-#include "maxp.h"
+#include <string.h>
 
-#if !defined(STARBOARD)
-#include <cstring>
-#define MEMCHR_POST std::memchr
-#else
-#include "starboard/memory.h"
-#define MEMCHR_POST SbMemoryFindByte
-#endif
+#include "maxp.h"
 
 // post - PostScript
 // http://www.microsoft.com/typography/otspec/post.htm
@@ -102,7 +96,7 @@
     if (strings + 1 + string_length > strings_end) {
       return Error("Bad string length %d", string_length);
     }
-    if (MEMCHR_POST(strings + 1, '\0', string_length)) {
+    if (memchr(strings + 1, '\0', string_length)) {
       return Error("Bad string of length %d", string_length);
     }
     this->names.push_back(
@@ -182,5 +176,3 @@
 }
 
 }  // namespace ots
-
-#undef MEMCHR_POST
diff --git a/src/third_party/protobuf/src/google/protobuf/stubs/stringpiece.cc b/src/third_party/protobuf/src/google/protobuf/stubs/stringpiece.cc
index b29eb01..0a5d67d 100644
--- a/src/third_party/protobuf/src/google/protobuf/stubs/stringpiece.cc
+++ b/src/third_party/protobuf/src/google/protobuf/stubs/stringpiece.cc
@@ -27,13 +27,12 @@
 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "starboard/client_porting/poem/string_poem.h"
 
 #include <google/protobuf/stubs/stringpiece.h>
 
-#ifndef STARBOARD
 #include <string.h>
-#endif  // STARBOARD
+
+#include "starboard/client_porting/poem/string_poem.h"
 
 #include <algorithm>
 #include <climits>
diff --git a/src/third_party/skia/include/core/SkString.h b/src/third_party/skia/include/core/SkString.h
index c75e6b7..fef0aa1 100644
--- a/src/third_party/skia/include/core/SkString.h
+++ b/src/third_party/skia/include/core/SkString.h
@@ -11,7 +11,6 @@
 #include "include/core/SkRefCnt.h"
 #include "include/core/SkScalar.h"
 #include "include/core/SkTypes.h"
-#include "include/private/SkMacros.h"
 #include "include/private/SkMalloc.h"
 #include "include/private/SkTArray.h"
 #include "include/private/SkTo.h"
@@ -240,7 +239,7 @@
 private:
     struct Rec {
     public:
-        CONSTEXPR Rec(uint32_t len, int32_t refCnt)
+        constexpr Rec(uint32_t len, int32_t refCnt)
             : fLength(len), fRefCnt(refCnt), fBeginningOfData(0)
         { }
         static sk_sp<Rec> Make(const char text[], size_t len);
diff --git a/src/third_party/skia/include/gpu/GrTypes.h b/src/third_party/skia/include/gpu/GrTypes.h
index 3edd776..679a0fa 100644
--- a/src/third_party/skia/include/gpu/GrTypes.h
+++ b/src/third_party/skia/include/gpu/GrTypes.h
@@ -11,7 +11,6 @@
 #include "include/core/SkMath.h"
 #include "include/core/SkTypes.h"
 #include "include/gpu/GrConfig.h"
-#include "include/private/SkMacros.h"
 
 class GrBackendSemaphore;
 class SkImage;
@@ -137,7 +136,7 @@
 /**
  *  divide, rounding up
  */
-static inline CONSTEXPR int32_t GrIDivRoundUp(int x, int y) {
+static inline constexpr int32_t GrIDivRoundUp(int x, int y) {
     SkASSERT(y > 0);
     return (x + (y-1)) / y;
 }
diff --git a/src/third_party/skia/include/gpu/gl/GrGLTypes.h b/src/third_party/skia/include/gpu/gl/GrGLTypes.h
index b76f501..1dba115 100644
--- a/src/third_party/skia/include/gpu/gl/GrGLTypes.h
+++ b/src/third_party/skia/include/gpu/gl/GrGLTypes.h
@@ -165,21 +165,9 @@
  * we will internally fall back to using the base internal formats.
  */
 struct GrGLTextureInfo {
-#if defined(COBALT)
-    // In C++11, we run into issues when initializing a struct when it has
-    // initializers for non-static members. We must use a constructor instead.
-    GrGLTextureInfo() : fFormat(0) {}
-    GrGLTextureInfo(GrGLenum fTarget, GrGLuint fID, GrGLenum fFormat = 0)
-            : fTarget(fTarget), fID(fID), fFormat(fFormat) {}
-
-    GrGLenum fTarget;
-    GrGLuint fID;
-    GrGLenum fFormat;
-#else
     GrGLenum fTarget;
     GrGLuint fID;
     GrGLenum fFormat = 0;
-#endif
 
     bool operator==(const GrGLTextureInfo& that) const {
         return fTarget == that.fTarget && fID == that.fID && fFormat == that.fFormat;
diff --git a/src/third_party/skia/include/private/GrTypesPriv.h b/src/third_party/skia/include/private/GrTypesPriv.h
index 16781c3..43a129b 100644
--- a/src/third_party/skia/include/private/GrTypesPriv.h
+++ b/src/third_party/skia/include/private/GrTypesPriv.h
@@ -211,18 +211,8 @@
 };
 
 struct GrMipLevel {
-#if defined(COBALT)
-    // In C++11, we run into issues when initializing a struct when it has
-    // initializers for non-static members. We must use a constructor instead.
-    GrMipLevel(const void* fPixels = nullptr, size_t fRowBytes = 0)
-            : fPixels(fPixels), fRowBytes(fRowBytes) {}
-
-    const void* fPixels;
-    size_t fRowBytes;
-#else
     const void* fPixels = nullptr;
     size_t fRowBytes = 0;
-#endif
 };
 
 /**
@@ -279,7 +269,7 @@
     kMSAA
 };
 
-static CONSTEXPR bool GrAATypeIsHW(GrAAType type) {
+static constexpr bool GrAATypeIsHW(GrAAType type) {
     switch (type) {
         case GrAAType::kNone:
             return false;
@@ -416,7 +406,7 @@
 GR_MAKE_BITFIELD_OPS(GrShaderFlags)
 
 /** Is the shading language type float (including vectors/matrices)? */
-static CONSTEXPR bool GrSLTypeIsFloatType(GrSLType type) {
+static constexpr bool GrSLTypeIsFloatType(GrSLType type) {
     switch (type) {
         case kFloat_GrSLType:
         case kFloat2_GrSLType:
@@ -469,7 +459,7 @@
 }
 
 /** If the type represents a single value or vector return the vector length, else -1. */
-static CONSTEXPR int GrSLTypeVecLength(GrSLType type) {
+static constexpr int GrSLTypeVecLength(GrSLType type) {
     switch (type) {
         case kFloat_GrSLType:
         case kHalf_GrSLType:
@@ -556,7 +546,7 @@
     }
 }
 
-static CONSTEXPR bool GrSLTypeIsCombinedSamplerType(GrSLType type) {
+static constexpr bool GrSLTypeIsCombinedSamplerType(GrSLType type) {
     switch (type) {
         case kTexture2DSampler_GrSLType:
         case kTextureExternalSampler_GrSLType:
@@ -826,7 +816,7 @@
  * Utility functions for GrPixelConfig
  */
 
-static CONSTEXPR GrPixelConfig GrCompressionTypePixelConfig(SkImage::CompressionType compression) {
+static constexpr GrPixelConfig GrCompressionTypePixelConfig(SkImage::CompressionType compression) {
     switch (compression) {
         case SkImage::kETC1_CompressionType: return kRGB_ETC1_GrPixelConfig;
     }
@@ -837,7 +827,7 @@
  * Returns true if the pixel config is a GPU-specific compressed format
  * representation.
  */
-static CONSTEXPR bool GrPixelConfigIsCompressed(GrPixelConfig config) {
+static constexpr bool GrPixelConfigIsCompressed(GrPixelConfig config) {
     switch (config) {
         case kRGB_ETC1_GrPixelConfig:
             return true;
@@ -906,7 +896,7 @@
 
 static const int kGrColorTypeCnt = static_cast<int>(GrColorType::kLast) + 1;
 
-static CONSTEXPR SkColorType GrColorTypeToSkColorType(GrColorType ct) {
+static constexpr SkColorType GrColorTypeToSkColorType(GrColorType ct) {
     switch (ct) {
         case GrColorType::kUnknown:          return kUnknown_SkColorType;
         case GrColorType::kAlpha_8:          return kAlpha_8_SkColorType;
@@ -935,7 +925,7 @@
     SkUNREACHABLE;
 }
 
-static CONSTEXPR GrColorType SkColorTypeToGrColorType(SkColorType ct) {
+static constexpr GrColorType SkColorTypeToGrColorType(SkColorType ct) {
     switch (ct) {
         case kUnknown_SkColorType:            return GrColorType::kUnknown;
         case kAlpha_8_SkColorType:            return GrColorType::kAlpha_8;
@@ -966,7 +956,7 @@
                                               SkColorType skCT,
                                               const GrBackendFormat& format);
 
-static CONSTEXPR uint32_t GrColorTypeComponentFlags(GrColorType ct) {
+static constexpr uint32_t GrColorTypeComponentFlags(GrColorType ct) {
     switch (ct) {
         case GrColorType::kUnknown:          return 0;
         case GrColorType::kAlpha_8:          return kAlpha_SkColorTypeComponentFlag;
@@ -1016,45 +1006,45 @@
  */
 struct GrColorTypeDesc {
 public:
-    static CONSTEXPR GrColorTypeDesc MakeRGBA(int rgba, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeRGBA(int rgba, GrColorTypeEncoding e) {
         return {rgba, rgba, rgba, rgba, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeRGBA(int rgb, int a, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeRGBA(int rgb, int a, GrColorTypeEncoding e) {
         return {rgb, rgb, rgb, a, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeRGB(int rgb, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeRGB(int rgb, GrColorTypeEncoding e) {
         return {rgb, rgb, rgb, 0, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeRGB(int r, int g, int b, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeRGB(int r, int g, int b, GrColorTypeEncoding e) {
         return {r, g, b, 0, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeAlpha(int a, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeAlpha(int a, GrColorTypeEncoding e) {
         return {0, 0, 0, a, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeR(int r, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeR(int r, GrColorTypeEncoding e) {
         return {r, 0, 0, 0, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeRG(int rg, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeRG(int rg, GrColorTypeEncoding e) {
         return {rg, rg, 0, 0, 0, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeGray(int grayBits, GrColorTypeEncoding e) {
+    static constexpr GrColorTypeDesc MakeGray(int grayBits, GrColorTypeEncoding e) {
         return {0, 0, 0, 0, grayBits, e};
     }
 
-    static CONSTEXPR GrColorTypeDesc MakeInvalid() { return {}; }
+    static constexpr GrColorTypeDesc MakeInvalid() { return {}; }
 
     constexpr int r() const { return fRBits; }
     constexpr int g() const { return fGBits; }
     constexpr int b() const { return fBBits; }
     constexpr int a() const { return fABits; }
-    CONSTEXPR int operator[](int c) const {
+    constexpr int operator[](int c) const {
         switch (c) {
             case 0: return this->r();
             case 1: return this->g();
@@ -1078,7 +1068,7 @@
 
     constexpr GrColorTypeDesc() = default;
 
-    CONSTEXPR GrColorTypeDesc(int r, int g, int b, int a, int gray, GrColorTypeEncoding encoding)
+    constexpr GrColorTypeDesc(int r, int g, int b, int a, int gray, GrColorTypeEncoding encoding)
             : fRBits(r), fGBits(g), fBBits(b), fABits(a), fGrayBits(gray), fEncoding(encoding) {
         SkASSERT(r >= 0 && g >= 0 && b >= 0 && a >= 0 && gray >= 0);
         SkASSERT(!gray || (!r && !g && !b));
@@ -1086,7 +1076,7 @@
     }
 };
 
-static CONSTEXPR GrColorTypeDesc GrGetColorTypeDesc(GrColorType ct) {
+static constexpr GrColorTypeDesc GrGetColorTypeDesc(GrColorType ct) {
     switch (ct) {
         case GrColorType::kUnknown:
             return GrColorTypeDesc::MakeInvalid();
@@ -1136,7 +1126,7 @@
     SkUNREACHABLE;
 }
 
-static CONSTEXPR GrClampType GrColorTypeClampType(GrColorType colorType) {
+static constexpr GrClampType GrColorTypeClampType(GrColorType colorType) {
     if (GrGetColorTypeDesc(colorType).encoding() == GrColorTypeEncoding::kUnorm ||
         GrGetColorTypeDesc(colorType).encoding() == GrColorTypeEncoding::kSRGBUnorm) {
         return GrClampType::kAuto;
@@ -1146,7 +1136,7 @@
 
 // Consider a color type "wider" than n if it has more than n bits for any its representable
 // channels.
-static CONSTEXPR bool GrColorTypeIsWiderThan(GrColorType colorType, int n) {
+static constexpr bool GrColorTypeIsWiderThan(GrColorType colorType, int n) {
     SkASSERT(n > 0);
     auto desc = GrGetColorTypeDesc(colorType);
     return (desc.r() && desc.r() > n )||
@@ -1156,15 +1146,15 @@
            (desc.gray() && desc.gray() > n);
 }
 
-static CONSTEXPR bool GrColorTypeIsAlphaOnly(GrColorType ct) {
+static constexpr bool GrColorTypeIsAlphaOnly(GrColorType ct) {
     return kAlpha_SkColorTypeComponentFlag == GrColorTypeComponentFlags(ct);
 }
 
-static CONSTEXPR bool GrColorTypeHasAlpha(GrColorType ct) {
+static constexpr bool GrColorTypeHasAlpha(GrColorType ct) {
     return kAlpha_SkColorTypeComponentFlag & GrColorTypeComponentFlags(ct);
 }
 
-static CONSTEXPR size_t GrColorTypeBytesPerPixel(GrColorType ct) {
+static constexpr size_t GrColorTypeBytesPerPixel(GrColorType ct) {
     switch (ct) {
         case GrColorType::kUnknown:          return 0;
         case GrColorType::kAlpha_8:          return 1;
@@ -1192,7 +1182,7 @@
     SkUNREACHABLE;
 }
 
-static CONSTEXPR GrColorType GrPixelConfigToColorType(GrPixelConfig config) {
+static constexpr GrColorType GrPixelConfigToColorType(GrPixelConfig config) {
     switch (config) {
         case kUnknown_GrPixelConfig:
             return GrColorType::kUnknown;
@@ -1251,7 +1241,7 @@
     SkUNREACHABLE;
 }
 
-static CONSTEXPR GrPixelConfig GrColorTypeToPixelConfig(GrColorType colorType) {
+static constexpr GrPixelConfig GrColorTypeToPixelConfig(GrColorType colorType) {
     switch (colorType) {
         case GrColorType::kUnknown:          return kUnknown_GrPixelConfig;
         case GrColorType::kAlpha_8:          return kAlpha_8_GrPixelConfig;
@@ -1300,7 +1290,7 @@
 };
 
 #if GR_TEST_UTILS || defined(SK_ENABLE_DUMP_GPU)
-static CONSTEXPR const char* GrBackendApiToStr(GrBackendApi api) {
+static constexpr onst char* GrBackendApiToStr(GrBackendApi api) {
     switch (api) {
         case GrBackendApi::kMetal:  return "Metal";
         case GrBackendApi::kDawn:   return "Dawn";
@@ -1311,7 +1301,7 @@
     SkUNREACHABLE;
 }
 
-static CONSTEXPR const char* GrColorTypeToStr(GrColorType ct) {
+static constexpr const char* GrColorTypeToStr(GrColorType ct) {
     switch (ct) {
         case GrColorType::kUnknown:          return "kUnknown";
         case GrColorType::kAlpha_8:          return "kAlpha_8";
diff --git a/src/third_party/skia/include/private/SkMacros.h b/src/third_party/skia/include/private/SkMacros.h
index 504c424..a681932 100644
--- a/src/third_party/skia/include/private/SkMacros.h
+++ b/src/third_party/skia/include/private/SkMacros.h
@@ -7,8 +7,6 @@
 #ifndef SkMacros_DEFINED
 #define SkMacros_DEFINED
 
-#include "nb/cpp14oncpp11.h"
-
 /*
  *  Usage:  SK_MACRO_CONCAT(a, b)   to construct the symbol ab
  *
diff --git a/src/third_party/skia/include/private/SkSemaphore.h b/src/third_party/skia/include/private/SkSemaphore.h
index 11816d5..29bbca6 100644
--- a/src/third_party/skia/include/private/SkSemaphore.h
+++ b/src/third_party/skia/include/private/SkSemaphore.h
@@ -9,13 +9,12 @@
 #define SkSemaphore_DEFINED
 
 #include "include/core/SkTypes.h"
-#include "include/private/SkMacros.h"
 #include "include/private/SkOnce.h"
 #include <atomic>
 
 class SkSemaphore {
 public:
-    CONSTEXPR SkSemaphore(int count = 0) : fCount(count), fOSSemaphore(nullptr) {}
+    constexpr SkSemaphore(int count = 0) : fCount(count), fOSSemaphore(nullptr) {}
 
     // Cleanup the underlying OS semaphore.
     ~SkSemaphore();
diff --git a/src/third_party/skia/include/private/SkTHash.h b/src/third_party/skia/include/private/SkTHash.h
index 8ea5691..bc563d1 100644
--- a/src/third_party/skia/include/private/SkTHash.h
+++ b/src/third_party/skia/include/private/SkTHash.h
@@ -297,7 +297,7 @@
         K key;
         V val;
         static const K& GetKey(const Pair& p) { return p.key; }
-        static auto Hash(const K& key) -> decltype(HashK()(key)) { return HashK()(key); }
+        static auto Hash(const K& key) { return HashK()(key); }
     };
 
     SkTHashTable<Pair, K> fTable;
@@ -351,7 +351,7 @@
 private:
     struct Traits {
         static const T& GetKey(const T& item) { return item; }
-        static auto Hash(const T& item) -> decltype(HashT()(item)) { return HashT()(item); }
+        static auto Hash(const T& item) { return HashT()(item); }
     };
     SkTHashTable<T, T, Traits> fTable;
 
diff --git a/src/third_party/skia/modules/skottie/src/text/RangeSelector.cpp b/src/third_party/skia/modules/skottie/src/text/RangeSelector.cpp
index f78c065..28fb50b 100644
--- a/src/third_party/skia/modules/skottie/src/text/RangeSelector.cpp
+++ b/src/third_party/skia/modules/skottie/src/text/RangeSelector.cpp
@@ -47,11 +47,11 @@
 
 template <>
 struct UnitTraits<RangeSelector::Units::kPercentage> {
-    static CONSTEXPR std::tuple<float, float, float> Defaults() {
+    static constexpr auto Defaults() {
         return std::make_tuple<float, float, float>(0, 100, 0);
     }
 
-    static std::tuple<float, float> Resolve(float s, float e, float o, size_t domain_size) {
+    static auto Resolve(float s, float e, float o, size_t domain_size) {
         return std::make_tuple(domain_size * (s + o) / 100,
                                domain_size * (e + o) / 100);
     }
@@ -59,12 +59,12 @@
 
 template <>
 struct UnitTraits<RangeSelector::Units::kIndex> {
-    static CONSTEXPR std::tuple<float, float, float> Defaults() {
+    static constexpr auto Defaults() {
         // It's OK to default fEnd to FLOAT_MAX, as it gets clamped when resolved.
         return std::make_tuple<float, float, float>(0, std::numeric_limits<float>::max(), 0);
     }
 
-    static std::tuple<float, float> Resolve(float s, float e, float o, size_t domain_size) {
+    static auto Resolve(float s, float e, float o, size_t domain_size) {
         return std::make_tuple(s + o, e + o);
     }
 };
diff --git a/src/third_party/skia/src/core/SkContourMeasure.cpp b/src/third_party/skia/src/core/SkContourMeasure.cpp
index 1958b5f..e827cc9 100644
--- a/src/third_party/skia/src/core/SkContourMeasure.cpp
+++ b/src/third_party/skia/src/core/SkContourMeasure.cpp
@@ -14,21 +14,15 @@
 
 #define kMaxTValue  0x3FFFFFFF
 
-CONSTEXPR static inline SkScalar tValue2Scalar(int t) {
+constexpr static inline SkScalar tValue2Scalar(int t) {
     SkASSERT((unsigned)t <= kMaxTValue);
     // 1/kMaxTValue can't be represented as a float, but it's close and the limits work fine.
     const SkScalar kMaxTReciprocal = 1.0f / (SkScalar)kMaxTValue;
     return t * kMaxTReciprocal;
 }
 
-// tValue2Scalar() cannot be a constexpr function in C++11 because variable
-// definitions and usage of SkASSERT() are both C++14 extensions (for constexpr
-// functions). Thus we do not use tValue2Scalar() in static asserts for this
-// situation.
-#if !defined(COBALT)
 static_assert(0.0f == tValue2Scalar(         0), "Lower limit should be exact.");
 static_assert(1.0f == tValue2Scalar(kMaxTValue), "Upper limit should be exact.");
-#endif
 
 SkScalar SkContourMeasure::Segment::getScalarT() const {
     return tValue2Scalar(fTValue);
diff --git a/src/third_party/skia/src/core/SkEnumerate.h b/src/third_party/skia/src/core/SkEnumerate.h
index c2d3602..54fcc0d 100644
--- a/src/third_party/skia/src/core/SkEnumerate.h
+++ b/src/third_party/skia/src/core/SkEnumerate.h
@@ -30,19 +30,11 @@
         using iterator_category = std::input_iterator_tag;
         constexpr Iterator(ptrdiff_t index, Iter it) : fIndex{index}, fIt{it} { }
         constexpr Iterator(const Iterator&) = default;
-        CONSTEXPR Iterator operator++() {
-            ++fIndex;
-            ++fIt;
-            return *this;
-        }
-        CONSTEXPR Iterator operator++(int) {
-            Iterator tmp(*this);
-            operator++();
-            return tmp;
-        }
+        constexpr Iterator operator++() { ++fIndex; ++fIt; return *this; }
+        constexpr Iterator operator++(int) { Iterator tmp(*this); operator++(); return tmp; }
         constexpr bool operator==(const Iterator& rhs) const { return fIt == rhs.fIt; }
         constexpr bool operator!=(const Iterator& rhs) const { return fIt != rhs.fIt; }
-        CONSTEXPR reference operator*() { return std::forward_as_tuple(fIndex, *fIt); }
+        constexpr reference operator*() { return std::forward_as_tuple(fIndex, *fIt); }
 
     private:
         ptrdiff_t fIndex;
@@ -56,7 +48,7 @@
             , fBegin{std::begin(fCollection)}
             , fEnd{std::end(fCollection)} { }
     constexpr SkEnumerate(const SkEnumerate& that) = default;
-    CONSTEXPR SkEnumerate& operator=(const SkEnumerate& that) {
+    constexpr SkEnumerate& operator=(const SkEnumerate& that) {
         fBegin = that.fBegin;
         fEnd = that.fEnd; return *this;
     }
diff --git a/src/third_party/skia/src/core/SkGlyph.h b/src/third_party/skia/src/core/SkGlyph.h
index 99b6714..6429c18 100644
--- a/src/third_party/skia/src/core/SkGlyph.h
+++ b/src/third_party/skia/src/core/SkGlyph.h
@@ -50,12 +50,13 @@
     constexpr explicit SkPackedGlyphID(SkGlyphID glyphID)
             : fID{glyphID} { }
 
-    CONSTEXPR SkPackedGlyphID(SkGlyphID glyphID, SkFixed x, SkFixed y)
+    constexpr SkPackedGlyphID(SkGlyphID glyphID, SkFixed x, SkFixed y)
             : fID{PackIDXY(glyphID, x, y)} {
         SkASSERT(fID != kImpossibleID);
     }
 
-    CONSTEXPR SkPackedGlyphID(SkGlyphID code, SkIPoint pt) : SkPackedGlyphID(code, pt.fX, pt.fY) {}
+    constexpr SkPackedGlyphID(SkGlyphID code, SkIPoint pt)
+        : SkPackedGlyphID(code, pt.fX, pt.fY) {}
 
     constexpr SkPackedGlyphID() : fID{kImpossibleID} {}
 
@@ -106,7 +107,7 @@
         return ((uint32_t)n >> kFixedPointSubPixelPosBits) & kSubPixelPosMask;
     }
 
-    CONSTEXPR SkFixed subToFixed(uint32_t subPixelPosBit) const {
+    constexpr SkFixed subToFixed(uint32_t subPixelPosBit) const {
         uint32_t subPixelPosition = (fID >> subPixelPosBit) & kSubPixelPosMask;
         return subPixelPosition << kFixedPointSubPixelPosBits;
     }
diff --git a/src/third_party/skia/src/core/SkSpan.h b/src/third_party/skia/src/core/SkSpan.h
index 11212b6..8db1866 100644
--- a/src/third_party/skia/src/core/SkSpan.h
+++ b/src/third_party/skia/src/core/SkSpan.h
@@ -9,7 +9,6 @@
 #define SkSpan_DEFINED
 
 #include <cstddef>
-#include "include/private/SkMacros.h"
 #include "include/private/SkTo.h"
 
 template <typename T>
@@ -20,7 +19,7 @@
     template <typename U, typename = typename std::enable_if<std::is_same<const U, T>::value>::type>
     constexpr SkSpan(const SkSpan<U>& that) : fPtr(that.data()), fSize{that.size()} {}
     constexpr SkSpan(const SkSpan& o) = default;
-    CONSTEXPR SkSpan& operator=(const SkSpan& that) {
+    constexpr SkSpan& operator=(const SkSpan& that) {
         fPtr = that.fPtr;
         fSize = that.fSize;
         return *this;
@@ -36,7 +35,7 @@
     constexpr size_t size() const { return fSize; }
     constexpr bool empty() const { return fSize == 0; }
     constexpr size_t size_bytes() const { return fSize * sizeof(T); }
-    CONSTEXPR SkSpan<T> first(size_t prefixLen) { return SkSpan<T>{fPtr, prefixLen}; }
+    constexpr SkSpan<T> first(size_t prefixLen) { return SkSpan<T>{fPtr, prefixLen}; }
 
 private:
     T* fPtr;
diff --git a/src/third_party/skia/src/core/SkVM.cpp b/src/third_party/skia/src/core/SkVM.cpp
index 6c93d1a..867ecba 100644
--- a/src/third_party/skia/src/core/SkVM.cpp
+++ b/src/third_party/skia/src/core/SkVM.cpp
@@ -744,7 +744,9 @@
     void Assembler::vcvtdq2ps (Ymm dst, Ymm x) { this->op(0,   0x0f,0x5b, dst,x); }
     void Assembler::vcvttps2dq(Ymm dst, Ymm x) { this->op(0xf3,0x0f,0x5b, dst,x); }
 
-    Assembler::Label Assembler::here() { return {{}, (int)this->size(), Label::None}; }
+    Assembler::Label Assembler::here() {
+        return { (int)this->size(), Label::None, {} };
+    }
 
     int Assembler::disp19(Label* l) {
         SkASSERT(l->kind == Label::None ||
diff --git a/src/third_party/skia/src/core/SkVM.h b/src/third_party/skia/src/core/SkVM.h
index 68f40d7..06b189a 100644
--- a/src/third_party/skia/src/core/SkVM.h
+++ b/src/third_party/skia/src/core/SkVM.h
@@ -86,24 +86,9 @@
         void vpblendvb(Ymm dst, Ymm x, Ymm y, Ymm z);
 
         struct Label {
-            enum Kind { None, ARMDisp19, X86Disp32 };
-#if defined(COBALT)
-            // In C++11, we run into issues when initializing a struct when it
-            // has initializers for non-static members. We must use a
-            // constructor instead. We must also specify the value of
-            // |references| before |offset| and |kind| because it does not
-            // have a default value.
-            Label(std::vector<int> references, int offset = 0, Kind kind = None)
-                    : references(references), offset(offset), kind(kind) {}
-
+            int                                 offset = 0;
+            enum { None, ARMDisp19, X86Disp32 } kind = None;
             std::vector<int>                    references;
-            int offset;
-            Kind kind;
-#else
-            std::vector<int> references;
-            int offset = 0;
-            Kind kind = None;
-#endif
         };
 
         Label here();
diff --git a/src/third_party/skia/src/core/SkZip.h b/src/third_party/skia/src/core/SkZip.h
index c5e4862..1b38b41 100644
--- a/src/third_party/skia/src/core/SkZip.h
+++ b/src/third_party/skia/src/core/SkZip.h
@@ -14,7 +14,6 @@
 #include <type_traits>
 
 #include "include/core/SkTypes.h"
-#include "include/private/SkMacros.h"
 #include "include/private/SkTemplates.h"
 #include "include/private/SkTo.h"
 #include "src/core/SkSpan.h"
@@ -34,18 +33,11 @@
         using iterator_category = std::input_iterator_tag;
         constexpr Iterator(const SkZip* zip, size_t index) : fZip{zip}, fIndex{index} { }
         constexpr Iterator(const Iterator& that) : Iterator{ that.fZip, that.fIndex } { }
-        CONSTEXPR Iterator& operator++() {
-            ++fIndex;
-            return *this;
-        }
-        CONSTEXPR Iterator operator++(int) {
-            Iterator tmp(*this);
-            operator++();
-            return tmp;
-        }
+        constexpr Iterator& operator++() { ++fIndex; return *this; }
+        constexpr Iterator operator++(int) { Iterator tmp(*this); operator++(); return tmp; }
         constexpr bool operator==(const Iterator& rhs) const { return fIndex == rhs.fIndex; }
         constexpr bool operator!=(const Iterator& rhs) const { return fIndex != rhs.fIndex; }
-        CONSTEXPR reference operator*() { return (*fZip)[fIndex]; }
+        constexpr reference operator*() { return (*fZip)[fIndex]; }
         friend constexpr difference_type operator-(Iterator lhs, Iterator rhs) {
             return lhs.fIndex - rhs.fIndex;
         }
@@ -85,12 +77,11 @@
     constexpr ReturnTuple back() const { return this->index(this->size() - 1); }
     constexpr Iterator begin() const { return Iterator{this, 0}; }
     constexpr Iterator end() const { return Iterator{this, this->size()}; }
-    template <size_t I>
-    constexpr SkSpan<typename std::tuple_element<I, std::tuple<Ts...>>::type> get() const {
+    template<size_t I> constexpr auto get() const {
         return SkMakeSpan(std::get<I>(fPointers), fSize);
     }
     constexpr std::tuple<Ts*...> data() const { return fPointers; }
-    CONSTEXPR SkZip first(size_t n) const {
+    constexpr SkZip first(size_t n) const {
         SkASSERT(n <= this->size());
         return SkZip{n, fPointers};
     }
@@ -100,7 +91,7 @@
         : fPointers{pointers}
         , fSize{n} {}
 
-    CONSTEXPR ReturnTuple index(size_t i) const {
+    constexpr ReturnTuple index(size_t i) const {
         SkASSERT(this->size() > 0);
         SkASSERT(i < this->size());
         return indexDetail(i, skstd::make_index_sequence<sizeof...(Ts)>{});
@@ -166,7 +157,9 @@
     };
 
 public:
-    template <typename... Ts> static CONSTEXPR SkZip<ValueType<Ts>...> MakeZip(Ts&&... ts) {
+    template<typename... Ts>
+    static constexpr auto MakeZip(Ts&& ... ts) {
+
         // Pick the first collection that has a size, and use that for the size.
         size_t size = PickOneSize<DecayPointerT<Ts>...>::Size(std::forward<Ts>(ts)...);
 
@@ -187,9 +180,8 @@
     }
 };
 
-template <typename... Ts>
-inline constexpr auto SkMakeZip(Ts&&... ts)
-        -> decltype(SkMakeZipDetail::MakeZip(std::forward<Ts>(ts)...)) {
+template<typename... Ts>
+inline constexpr auto SkMakeZip(Ts&& ... ts) {
     return SkMakeZipDetail::MakeZip(std::forward<Ts>(ts)...);
 }
 #endif //SkZip_DEFINED
diff --git a/src/third_party/skia/src/gpu/GrBackendTextureImageGenerator.cpp b/src/third_party/skia/src/gpu/GrBackendTextureImageGenerator.cpp
index ab0e842..bd4cacb 100644
--- a/src/third_party/skia/src/gpu/GrBackendTextureImageGenerator.cpp
+++ b/src/third_party/skia/src/gpu/GrBackendTextureImageGenerator.cpp
@@ -159,13 +159,9 @@
 
     // Must make copies of member variables to capture in the lambda since this image generator may
     // be deleted before we actually execute the lambda.
-    // Since initialized lambda captures are a C++14 extension, we need to
-    // initialize |refHelper|, |semaphore|, and |backendTexture| first.
-    RefHelper* refHelper = fRefHelper;
-    sk_sp<GrSemaphore> semaphore = fSemaphore;
-    GrBackendTexture backendTexture = fBackendTexture;
     sk_sp<GrTextureProxy> proxy = proxyProvider->createLazyProxy(
-            [refHelper, releaseProcHelper, semaphore, backendTexture, grColorType](
+            [refHelper = fRefHelper, releaseProcHelper, semaphore = fSemaphore,
+             backendTexture = fBackendTexture, grColorType](
                     GrResourceProvider* resourceProvider) -> GrSurfaceProxy::LazyCallbackResult {
                 if (semaphore) {
                     resourceProvider->priv().gpu()->waitSemaphore(semaphore);
diff --git a/src/third_party/skia/src/gpu/GrGlyph.h b/src/third_party/skia/src/gpu/GrGlyph.h
index b99acee..76490f4 100644
--- a/src/third_party/skia/src/gpu/GrGlyph.h
+++ b/src/third_party/skia/src/gpu/GrGlyph.h
@@ -47,13 +47,6 @@
            : GrGlyph::MaskStyle::kCoverage_MaskStyle;
 	}
 
-    static inline bool Fits(const SkIRect& bounds) {
-        return SkTFitsIn<int16_t>(bounds.fLeft) &&
-               SkTFitsIn<int16_t>(bounds.fTop) &&
-               SkTFitsIn<int16_t>(bounds.fRight) &&
-               SkTFitsIn<int16_t>(bounds.fBottom);
-    }
-
     GrGlyph(const SkGlyph& skGlyph)
         : fPackedID{skGlyph.getPackedID()}
         , fMaskFormat{FormatFromSkGlyph(skGlyph.maskFormat())}
diff --git a/src/third_party/skia/src/gpu/GrPrimitiveProcessor.h b/src/third_party/skia/src/gpu/GrPrimitiveProcessor.h
index 871e453..2bcd01a 100644
--- a/src/third_party/skia/src/gpu/GrPrimitiveProcessor.h
+++ b/src/third_party/skia/src/gpu/GrPrimitiveProcessor.h
@@ -66,8 +66,8 @@
         constexpr GrVertexAttribType cpuType() const { return fCPUType; }
         constexpr GrSLType           gpuType() const { return fGPUType; }
 
-        inline CONSTEXPR size_t size() const;
-        CONSTEXPR size_t sizeAlign4() const { return SkAlign4(this->size()); }
+        inline constexpr size_t size() const;
+        constexpr size_t sizeAlign4() const { return SkAlign4(this->size()); }
 
         GrShaderVar asShaderVar() const {
             return {fName, fGPUType, GrShaderVar::kIn_TypeModifier};
@@ -294,7 +294,7 @@
  * This was moved from include/private/GrTypesPriv.h in service of Skia dependents that build
  * with C++11.
  */
-static CONSTEXPR inline size_t GrVertexAttribTypeSize(GrVertexAttribType type) {
+static constexpr inline size_t GrVertexAttribTypeSize(GrVertexAttribType type) {
     switch (type) {
         case kFloat_GrVertexAttribType:
             return sizeof(float);
@@ -362,7 +362,7 @@
     return 0;
 }
 
-CONSTEXPR size_t GrPrimitiveProcessor::Attribute::size() const {
+constexpr size_t GrPrimitiveProcessor::Attribute::size() const {
     return GrVertexAttribTypeSize(fCPUType);
 }
 
diff --git a/src/third_party/skia/src/gpu/GrSurfaceContext.cpp b/src/third_party/skia/src/gpu/GrSurfaceContext.cpp
index 5a0e3b8..4b5e0a8 100644
--- a/src/third_party/skia/src/gpu/GrSurfaceContext.cpp
+++ b/src/third_party/skia/src/gpu/GrSurfaceContext.cpp
@@ -648,16 +648,13 @@
     result.fTransferBuffer = std::move(buffer);
     auto at = this->colorInfo().alphaType();
     if (supportedRead.fColorType != dstCT || flip) {
-        // Since initialized lambda captures are a C++14 extension, we need to
-        // initialize |w| and |h| first.
-        int32_t w = rect.width();
-        int32_t h = rect.height();
-        result.fPixelConverter = [w, h, dstCT, supportedRead, at](void* dst, const void* src) {
+        result.fPixelConverter = [w = rect.width(), h = rect.height(), dstCT, supportedRead, at](
+                void* dst, const void* src) {
             GrImageInfo srcInfo(supportedRead.fColorType, at, nullptr, w, h);
             GrImageInfo dstInfo(dstCT,                    at, nullptr, w, h);
-            GrConvertPixels(dstInfo, dst, dstInfo.minRowBytes(), srcInfo, src,
-                            srcInfo.minRowBytes(),
-                            /* flipY = */ false);
+              GrConvertPixels(dstInfo, dst, dstInfo.minRowBytes(),
+                              srcInfo, src, srcInfo.minRowBytes(),
+                              /* flipY = */ false);
         };
     }
     return result;
diff --git a/src/third_party/skia/src/gpu/GrSwizzle.cpp b/src/third_party/skia/src/gpu/GrSwizzle.cpp
index f5d3cb8..f1b3b85 100644
--- a/src/third_party/skia/src/gpu/GrSwizzle.cpp
+++ b/src/third_party/skia/src/gpu/GrSwizzle.cpp
@@ -11,29 +11,6 @@
 
 void GrSwizzle::apply(SkRasterPipeline* pipeline) const {
     SkASSERT(pipeline);
-#if defined(COBALT)
-    // GrSwizzle cannot be a constant expression in C++11 because its
-    // constructor depends on CToI(), which cannot be a constant expression in
-    // C++11 either because it includes switch case statements and multiple
-    // return statements. So in situations where are still using C++11, we
-    // cannot use GrSwizzle objects in switch case statements because the case
-    // needs to be a constant value.
-    if (fKey == GrSwizzle("bgra").asKey()) {
-        pipeline->append(SkRasterPipeline::swap_rb);
-    } else if (fKey == GrSwizzle("aaa1").asKey()) {
-        pipeline->append(SkRasterPipeline::alpha_to_gray);
-    } else if (fKey == GrSwizzle("rgb1").asKey()) {
-        pipeline->append(SkRasterPipeline::force_opaque);
-    } else {
-        GR_STATIC_ASSERT(sizeof(uintptr_t) >= 4 * sizeof(char));
-        // Rather than allocate the 4 control bytes on the heap somewhere, just jam them right
-        // into a uintptr_t context.
-        uintptr_t ctx;
-        memcpy(&ctx, fSwiz, 4 * sizeof(char));
-        pipeline->append(SkRasterPipeline::swizzle, ctx);
-    }
-    return;
-#else
     switch (fKey) {
         case GrSwizzle("rgba").asKey():
             return;
@@ -56,5 +33,4 @@
             return;
         }
     }
-#endif
 }
diff --git a/src/third_party/skia/src/gpu/GrSwizzle.h b/src/third_party/skia/src/gpu/GrSwizzle.h
index e70d421..94562ff 100644
--- a/src/third_party/skia/src/gpu/GrSwizzle.h
+++ b/src/third_party/skia/src/gpu/GrSwizzle.h
@@ -16,60 +16,60 @@
 /** Represents a rgba swizzle. It can be converted either into a string or a eight bit int. */
 class GrSwizzle {
 public:
-    CONSTEXPR GrSwizzle() : GrSwizzle("rgba") {}
-    explicit CONSTEXPR GrSwizzle(const char c[4]);
+    constexpr GrSwizzle() : GrSwizzle("rgba") {}
+    explicit constexpr GrSwizzle(const char c[4]);
 
-    CONSTEXPR GrSwizzle(const GrSwizzle&);
-    CONSTEXPR GrSwizzle& operator=(const GrSwizzle& that);
+    constexpr GrSwizzle(const GrSwizzle&);
+    constexpr GrSwizzle& operator=(const GrSwizzle& that);
 
-    static CONSTEXPR GrSwizzle Concat(const GrSwizzle& a, const GrSwizzle& b);
+    static constexpr GrSwizzle Concat(const GrSwizzle& a, const GrSwizzle& b);
 
-    CONSTEXPR bool operator==(const GrSwizzle& that) const { return fKey == that.fKey; }
-    CONSTEXPR bool operator!=(const GrSwizzle& that) const { return !(*this == that); }
+    constexpr bool operator==(const GrSwizzle& that) const { return fKey == that.fKey; }
+    constexpr bool operator!=(const GrSwizzle& that) const { return !(*this == that); }
 
     /** Compact representation of the swizzle suitable for a key. */
-    CONSTEXPR uint16_t asKey() const { return fKey; }
+    constexpr uint16_t asKey() const { return fKey; }
 
     /** 4 char null terminated string consisting only of chars 'r', 'g', 'b', 'a', '0', and '1'. */
-    CONSTEXPR const char* c_str() const { return fSwiz; }
+    constexpr const char* c_str() const { return fSwiz; }
 
-    CONSTEXPR char operator[](int i) const {
+    constexpr char operator[](int i) const {
         SkASSERT(i >= 0 && i < 4);
         return fSwiz[i];
     }
 
     /** Applies this swizzle to the input color and returns the swizzled color. */
     template <SkAlphaType AlphaType>
-    CONSTEXPR SkRGBA4f<AlphaType> applyTo(const SkRGBA4f<AlphaType>& color) const;
+    constexpr SkRGBA4f<AlphaType> applyTo(const SkRGBA4f<AlphaType>& color) const;
 
     void apply(SkRasterPipeline*) const;
 
-    static CONSTEXPR GrSwizzle RGBA() { return GrSwizzle("rgba"); }
-    static CONSTEXPR GrSwizzle AAAA() { return GrSwizzle("aaaa"); }
-    static CONSTEXPR GrSwizzle RRRR() { return GrSwizzle("rrrr"); }
-    static CONSTEXPR GrSwizzle RRRA() { return GrSwizzle("rrra"); }
-    static CONSTEXPR GrSwizzle BGRA() { return GrSwizzle("bgra"); }
-    static CONSTEXPR GrSwizzle RGB1() { return GrSwizzle("rgb1"); }
+    static constexpr GrSwizzle RGBA() { return GrSwizzle("rgba"); }
+    static constexpr GrSwizzle AAAA() { return GrSwizzle("aaaa"); }
+    static constexpr GrSwizzle RRRR() { return GrSwizzle("rrrr"); }
+    static constexpr GrSwizzle RRRA() { return GrSwizzle("rrra"); }
+    static constexpr GrSwizzle BGRA() { return GrSwizzle("bgra"); }
+    static constexpr GrSwizzle RGB1() { return GrSwizzle("rgb1"); }
 
 private:
     template <SkAlphaType AlphaType>
-    static CONSTEXPR float ComponentIndexToFloat(const SkRGBA4f<AlphaType>& color, int idx);
-    static CONSTEXPR int CToI(char c);
-    static CONSTEXPR char IToC(int idx);
+    static constexpr float ComponentIndexToFloat(const SkRGBA4f<AlphaType>& color, int idx);
+    static constexpr int CToI(char c);
+    static constexpr char IToC(int idx);
 
     char fSwiz[5];
     uint16_t fKey;
 };
 
-inline CONSTEXPR GrSwizzle::GrSwizzle(const char c[4])
+constexpr GrSwizzle::GrSwizzle(const char c[4])
         : fSwiz{c[0], c[1], c[2], c[3], '\0'}
         , fKey((CToI(c[0]) << 0) | (CToI(c[1]) << 4) | (CToI(c[2]) << 8) | (CToI(c[3]) << 12)) {}
 
-inline CONSTEXPR GrSwizzle::GrSwizzle(const GrSwizzle& that)
+constexpr GrSwizzle::GrSwizzle(const GrSwizzle& that)
         : fSwiz{that.fSwiz[0], that.fSwiz[1], that.fSwiz[2], that.fSwiz[3], '\0'}
         , fKey(that.fKey) {}
 
-inline CONSTEXPR GrSwizzle& GrSwizzle::operator=(const GrSwizzle& that) {
+constexpr GrSwizzle& GrSwizzle::operator=(const GrSwizzle& that) {
     fSwiz[0] = that.fSwiz[0];
     fSwiz[1] = that.fSwiz[1];
     fSwiz[2] = that.fSwiz[2];
@@ -80,7 +80,7 @@
 }
 
 template <SkAlphaType AlphaType>
-CONSTEXPR SkRGBA4f<AlphaType> GrSwizzle::applyTo(const SkRGBA4f<AlphaType>& color) const {
+constexpr SkRGBA4f<AlphaType> GrSwizzle::applyTo(const SkRGBA4f<AlphaType>& color) const {
     uint32_t key = fKey;
     // Index of the input color that should be mapped to output r.
     int idx = (key & 15);
@@ -98,7 +98,7 @@
 }
 
 template <SkAlphaType AlphaType>
-CONSTEXPR float GrSwizzle::ComponentIndexToFloat(const SkRGBA4f<AlphaType>& color, int idx) {
+constexpr float GrSwizzle::ComponentIndexToFloat(const SkRGBA4f<AlphaType>& color, int idx) {
     if (idx <= 3) {
         return color[idx];
     }
@@ -111,7 +111,7 @@
     SkUNREACHABLE;
 }
 
-inline CONSTEXPR int GrSwizzle::CToI(char c) {
+constexpr int GrSwizzle::CToI(char c) {
     switch (c) {
         // r...a must map to 0...3 because other methods use them as indices into fSwiz.
         case 'r': return 0;
@@ -124,29 +124,7 @@
     }
 }
 
-inline CONSTEXPR char GrSwizzle::IToC(int idx) {
-#if defined(COBALT)
-    // CToI() cannot be a constexpr function in C++11 because it includes switch
-    // case statements and multiple return statements. Because it is not a
-    // constant value, it cannot be used in other switch case statements. So for
-    // situations where we are still using C++11, we need to use if-else
-    // statements instead.
-    if (idx == CToI('r')) {
-        return 'r';
-    } else if (idx == CToI('g')) {
-        return 'g';
-    } else if (idx == CToI('b')) {
-        return 'b';
-    } else if (idx == CToI('a')) {
-        return 'a';
-    } else if (idx == CToI('0')) {
-        return '0';
-    } else if (idx == CToI('1')) {
-        return '1';
-    } else {
-        SkUNREACHABLE;
-    }
-#else
+constexpr char GrSwizzle::IToC(int idx) {
     switch (idx) {
         case CToI('r'): return 'r';
         case CToI('g'): return 'g';
@@ -156,30 +134,17 @@
         case CToI('1'): return '1';
         default:        SkUNREACHABLE;
     }
-#endif
 }
 
-inline CONSTEXPR GrSwizzle GrSwizzle::Concat(const GrSwizzle& a, const GrSwizzle& b) {
+constexpr GrSwizzle GrSwizzle::Concat(const GrSwizzle& a, const GrSwizzle& b) {
     char swiz[4]{};
     for (int i = 0; i < 4; ++i) {
         int idx = (b.fKey >> (4U * i)) & 0xfU;
-#if defined(COBALT)
-        // Same reason as above in IToC() for using if-else statements instead
-        // of switch case statements when building with C++11 as in IToC().
-        if (idx == CToI('0')) {
-            swiz[i] = '0';
-        } else if (idx == CToI('1')) {
-            swiz[i] = '1';
-        } else {
-            swiz[i] = a.fSwiz[idx];
-        }
-#else
         switch (idx) {
             case CToI('0'): swiz[i] = '0';          break;
             case CToI('1'): swiz[i] = '1';          break;
             default:        swiz[i] = a.fSwiz[idx]; break;
         }
-#endif
     }
     return GrSwizzle(swiz);
 }
diff --git a/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.cpp b/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.cpp
index 1e653b9..e5e13ae 100644
--- a/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.cpp
+++ b/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.cpp
@@ -76,7 +76,8 @@
     }
 
     LazyInstantiateAtlasCallback cb = std::move(callback);
-    auto instantiate = [cb, pixelConfig, format, sampleCount](GrResourceProvider* rp) {
+    auto instantiate = [cb = std::move(callback), pixelConfig, format,
+                        sampleCount](GrResourceProvider* rp) {
         return cb(rp, pixelConfig, format, sampleCount);
     };
     sk_sp<GrTextureProxy> proxy = GrProxyProvider::MakeFullyLazyProxy(
diff --git a/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.h b/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.h
index 8bffb3f..ac7faaa 100644
--- a/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.h
+++ b/src/third_party/skia/src/gpu/ccpr/GrCCAtlas.h
@@ -54,7 +54,7 @@
         kA8_LiteralCoverage
     };
 
-    static CONSTEXPR GrColorType CoverageTypeToColorType(CoverageType coverageType) {
+    static constexpr GrColorType CoverageTypeToColorType(CoverageType coverageType) {
         switch (coverageType) {
             case CoverageType::kFP16_CoverageCount:
                 return GrColorType::kAlpha_F16;
diff --git a/src/third_party/skia/src/gpu/ccpr/GrGSCoverageProcessor.cpp b/src/third_party/skia/src/gpu/ccpr/GrGSCoverageProcessor.cpp
index b59875e..1398d5b 100644
--- a/src/third_party/skia/src/gpu/ccpr/GrGSCoverageProcessor.cpp
+++ b/src/third_party/skia/src/gpu/ccpr/GrGSCoverageProcessor.cpp
@@ -414,25 +414,15 @@
     if (4 == this->numInputPoints() || this->hasInputWeight()) {
         fInputXOrYValues =
                 {"x_or_y_values", kFloat4_GrVertexAttribType, kFloat4_GrSLType};
-// GrVertexAttribTypeSize() cannot be a constexpr function in C++11 because
-// switch-case statements + multiple return types are both C++14 extensions
-// for constexpr functions. Thus we do not use GrVertexAttribTypeSize() in
-// static asserts for this situation.
-#if !defined(COBALT)
         GR_STATIC_ASSERT(sizeof(QuadPointInstance) ==
                          2 * GrVertexAttribTypeSize(kFloat4_GrVertexAttribType));
         GR_STATIC_ASSERT(offsetof(QuadPointInstance, fY) ==
                          GrVertexAttribTypeSize(kFloat4_GrVertexAttribType));
-#endif
     } else {
         fInputXOrYValues =
                 {"x_or_y_values", kFloat3_GrVertexAttribType, kFloat3_GrSLType};
-// Same reason as above for not using GrVertexAttribTypeSize() in static
-// asserts.
-#if !defined(COBALT)
         GR_STATIC_ASSERT(sizeof(TriPointInstance) ==
                          2 * GrVertexAttribTypeSize(kFloat3_GrVertexAttribType));
-#endif
     }
 
     this->setVertexAttributes(&fInputXOrYValues, 1);
diff --git a/src/third_party/skia/src/gpu/ccpr/GrVSCoverageProcessor.cpp b/src/third_party/skia/src/gpu/ccpr/GrVSCoverageProcessor.cpp
index 548c29d..ae5cfca 100644
--- a/src/third_party/skia/src/gpu/ccpr/GrVSCoverageProcessor.cpp
+++ b/src/third_party/skia/src/gpu/ccpr/GrVSCoverageProcessor.cpp
@@ -504,25 +504,15 @@
     GrSLType xySLType;
     if (4 == this->numInputPoints() || this->hasInputWeight()) {
         GR_STATIC_ASSERT(offsetof(QuadPointInstance, fX) == 0);
-// GrVertexAttribTypeSize() cannot be a constexpr function in C++11 because
-// switch-case statements + multiple return types are both C++14 extensions
-// for constexpr functions. Thus we do not use GrVertexAttribTypeSize() in
-// static asserts for this situation.
-#if !defined(COBALT)
         GR_STATIC_ASSERT(sizeof(QuadPointInstance::fX) ==
                          GrVertexAttribTypeSize(kFloat4_GrVertexAttribType));
         GR_STATIC_ASSERT(sizeof(QuadPointInstance::fY) ==
                          GrVertexAttribTypeSize(kFloat4_GrVertexAttribType));
-#endif
         xyAttribType = kFloat4_GrVertexAttribType;
         xySLType = kFloat4_GrSLType;
     } else {
-// Same reason as above for not using GrVertexAttribTypeSize() in static
-// asserts.
-#if !defined(COBALT)
         GR_STATIC_ASSERT(sizeof(TriPointInstance) ==
                          2 * GrVertexAttribTypeSize(kFloat3_GrVertexAttribType));
-#endif
         xyAttribType = kFloat3_GrVertexAttribType;
         xySLType = kFloat3_GrSLType;
     }
diff --git a/src/third_party/skia/src/gpu/gl/GrGLGpu.h b/src/third_party/skia/src/gpu/gl/GrGLGpu.h
index 4612a2a..e49f459 100644
--- a/src/third_party/skia/src/gpu/gl/GrGLGpu.h
+++ b/src/third_party/skia/src/gpu/gl/GrGLGpu.h
@@ -543,9 +543,7 @@
         GrGLVertexArray*     fCoreProfileVertexArray;
     }                                       fHWVertexArrayState;
 
-    // Struct needs to be named so that we can specify a return type below in
-    // hwBufferState(), as C++11 does not support auto return types.
-    struct BufferState {
+    struct {
         GrGLenum                fGLTarget;
         GrGpuResource::UniqueID fBoundBufferUniqueID;
         bool                    fBufferZeroKnownBound;
@@ -556,7 +554,7 @@
         }
     }                                       fHWBufferState[kGrGpuBufferTypeCount];
 
-    BufferState* hwBufferState(GrGpuBufferType type) {
+    auto* hwBufferState(GrGpuBufferType type) {
         unsigned typeAsUInt = static_cast<unsigned>(type);
         SkASSERT(typeAsUInt < SK_ARRAY_COUNT(fHWBufferState));
         return &fHWBufferState[typeAsUInt];
diff --git a/src/third_party/skia/src/gpu/gl/GrGLUtil.h b/src/third_party/skia/src/gpu/gl/GrGLUtil.h
index 467e0ed..35d9ad8 100644
--- a/src/third_party/skia/src/gpu/gl/GrGLUtil.h
+++ b/src/third_party/skia/src/gpu/gl/GrGLUtil.h
@@ -277,56 +277,35 @@
 // call glGetError without doing a redundant error check or logging.
 #define GR_GL_GET_ERROR(IFACE) (IFACE)->fFunctions.fGetError()
 
-    static CONSTEXPR GrGLFormat GrGLFormatFromGLEnum(GrGLenum glFormat) {
-        switch (glFormat) {
-            case GR_GL_RGBA8:
-                return GrGLFormat::kRGBA8;
-            case GR_GL_R8:
-                return GrGLFormat::kR8;
-            case GR_GL_ALPHA8:
-                return GrGLFormat::kALPHA8;
-            case GR_GL_LUMINANCE8:
-                return GrGLFormat::kLUMINANCE8;
-            case GR_GL_BGRA8:
-                return GrGLFormat::kBGRA8;
-            case GR_GL_RGB565:
-                return GrGLFormat::kRGB565;
-            case GR_GL_RGBA16F:
-                return GrGLFormat::kRGBA16F;
-            case GR_GL_LUMINANCE16F:
-                return GrGLFormat::kLUMINANCE16F;
-            case GR_GL_R16F:
-                return GrGLFormat::kR16F;
-            case GR_GL_RGB8:
-                return GrGLFormat::kRGB8;
-            case GR_GL_RG8:
-                return GrGLFormat::kRG8;
-            case GR_GL_RGB10_A2:
-                return GrGLFormat::kRGB10_A2;
-            case GR_GL_RGBA4:
-                return GrGLFormat::kRGBA4;
-            case GR_GL_SRGB8_ALPHA8:
-                return GrGLFormat::kSRGB8_ALPHA8;
-            case GR_GL_COMPRESSED_RGB8_ETC2:
-                return GrGLFormat::kCOMPRESSED_RGB8_ETC2;
-            case GR_GL_COMPRESSED_ETC1_RGB8:
-                return GrGLFormat::kCOMPRESSED_ETC1_RGB8;
-            case GR_GL_R16:
-                return GrGLFormat::kR16;
-            case GR_GL_RG16:
-                return GrGLFormat::kRG16;
-            case GR_GL_RGBA16:
-                return GrGLFormat::kRGBA16;
-            case GR_GL_RG16F:
-                return GrGLFormat::kRG16F;
+static constexpr GrGLFormat GrGLFormatFromGLEnum(GrGLenum glFormat) {
+    switch (glFormat) {
+        case GR_GL_RGBA8:                return GrGLFormat::kRGBA8;
+        case GR_GL_R8:                   return GrGLFormat::kR8;
+        case GR_GL_ALPHA8:               return GrGLFormat::kALPHA8;
+        case GR_GL_LUMINANCE8:           return GrGLFormat::kLUMINANCE8;
+        case GR_GL_BGRA8:                return GrGLFormat::kBGRA8;
+        case GR_GL_RGB565:               return GrGLFormat::kRGB565;
+        case GR_GL_RGBA16F:              return GrGLFormat::kRGBA16F;
+        case GR_GL_LUMINANCE16F:         return GrGLFormat::kLUMINANCE16F;
+        case GR_GL_R16F:                 return GrGLFormat::kR16F;
+        case GR_GL_RGB8:                 return GrGLFormat::kRGB8;
+        case GR_GL_RG8:                  return GrGLFormat::kRG8;
+        case GR_GL_RGB10_A2:             return GrGLFormat::kRGB10_A2;
+        case GR_GL_RGBA4:                return GrGLFormat::kRGBA4;
+        case GR_GL_SRGB8_ALPHA8:         return GrGLFormat::kSRGB8_ALPHA8;
+        case GR_GL_COMPRESSED_RGB8_ETC2: return GrGLFormat::kCOMPRESSED_RGB8_ETC2;
+        case GR_GL_COMPRESSED_ETC1_RGB8: return GrGLFormat::kCOMPRESSED_ETC1_RGB8;
+        case GR_GL_R16:                  return GrGLFormat::kR16;
+        case GR_GL_RG16:                 return GrGLFormat::kRG16;
+        case GR_GL_RGBA16:               return GrGLFormat::kRGBA16;
+        case GR_GL_RG16F:                return GrGLFormat::kRG16F;
 
-            default:
-                return GrGLFormat::kUnknown;
-        }
+        default:                         return GrGLFormat::kUnknown;
+    }
 }
 
 /** Returns either the sized internal format or compressed internal format of the GrGLFormat. */
-static CONSTEXPR GrGLenum GrGLFormatToEnum(GrGLFormat format) {
+static constexpr GrGLenum GrGLFormatToEnum(GrGLFormat format) {
     switch (format) {
         case GrGLFormat::kRGBA8:                return GR_GL_RGBA8;
         case GrGLFormat::kR8:                   return GR_GL_R8;
diff --git a/src/third_party/skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp b/src/third_party/skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp
index ae1bf3c..3ce3267 100644
--- a/src/third_party/skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp
+++ b/src/third_party/skia/src/gpu/gl/builders/GrGLProgramBuilder.cpp
@@ -117,7 +117,7 @@
     fInstanceAttributeCnt = primProc.numInstanceAttributes();
     fAttributes.reset(
             new GrGLProgram::Attribute[fVertexAttributeCnt + fInstanceAttributeCnt]);
-    auto addAttr = [&](int i, const GrPrimitiveProcessor::Attribute& a, size_t* stride) {
+    auto addAttr = [&](int i, const auto& a, size_t* stride) {
         fAttributes[i].fCPUType = a.cpuType();
         fAttributes[i].fGPUType = a.gpuType();
         fAttributes[i].fOffset = *stride;
@@ -129,12 +129,12 @@
     };
     fVertexStride = 0;
     int i = 0;
-    for (const GrPrimitiveProcessor::Attribute& attr : primProc.vertexAttributes()) {
+    for (const auto& attr : primProc.vertexAttributes()) {
         addAttr(i++, attr, &fVertexStride);
     }
     SkASSERT(fVertexStride == primProc.vertexStride());
     fInstanceStride = 0;
-    for (const GrPrimitiveProcessor::Attribute& attr : primProc.instanceAttributes()) {
+    for (const auto& attr : primProc.instanceAttributes()) {
         addAttr(i++, attr, &fInstanceStride);
     }
     SkASSERT(fInstanceStride == primProc.instanceStride());
diff --git a/src/third_party/v8/toolchain.gypi b/src/third_party/v8/toolchain.gypi
index df598c9..c16480e 100644
--- a/src/third_party/v8/toolchain.gypi
+++ b/src/third_party/v8/toolchain.gypi
@@ -182,6 +182,13 @@
         'cflags!': [ '-Wall', '-Wextra' ],
         'cflags': [ '-Wno-return-type', '-Wno-int-in-bool-context' ],
       }],
+      ['v8_target_arch=="arm64" or v8_target_arch=="x64"', {
+        'defines': [
+          # enables pointer compression on 64 bit platforms for Cobalt.
+          'V8_COMPRESS_POINTERS',
+          'V8_31BIT_SMIS_ON_64BIT_ARCH',
+        ],
+      }],
       ['v8_target_arch=="arm"', {
         'defines': [
           'V8_TARGET_ARCH_ARM',
diff --git a/src/third_party/v8/v8.gyp b/src/third_party/v8/v8.gyp
index efdfb66..db2a8ed 100644
--- a/src/third_party/v8/v8.gyp
+++ b/src/third_party/v8/v8.gyp
@@ -24,8 +24,6 @@
     'v8_enable_snapshot_native_code_counters%': 0,
     # Enable code-generation-time checking of types in the CodeStubAssembler.
     'v8_enable_verify_csa%': 0,
-    # Enable pointer compression (sets -dV8_COMPRESS_POINTERS).
-    'v8_enable_pointer_compression%': 0,
     'v8_enable_31bit_smis_on_64bit_arch%': 0,
     # Sets -dOBJECT_PRINT.
     'v8_enable_object_print%': 0,
@@ -248,6 +246,12 @@
     'v8_compiler_sources': ['<!@pymod_do_main(third_party.v8.gypfiles.GN-scraper "<(V8_ROOT)/BUILD.gn"  "v8_compiler_sources = ")'],
 
     'conditions': [
+      ['v8_target_arch=="arm64" or v8_target_arch=="x64"', {
+        # Enable pointer compression (sets -dV8_COMPRESS_POINTERS).
+        'v8_enable_pointer_compression%': 1,
+      }, {
+        'v8_enable_pointer_compression%': 0,
+      }],
       ['v8_enable_i18n_support', {
         'torque_files_v8_root_relative': [
           "src/objects/intl-objects.tq",
diff --git a/src/third_party/web_platform_tests/dom/events/EventTarget-constructible.any.js b/src/third_party/web_platform_tests/dom/events/EventTarget-constructible.any.js
new file mode 100644
index 0000000..1da39f6
--- /dev/null
+++ b/src/third_party/web_platform_tests/dom/events/EventTarget-constructible.any.js
@@ -0,0 +1,63 @@
+"use strict";
+
+test(() => {
+  const target = new EventTarget();
+  const event = new Event("foo", { bubbles: true, cancelable: false });
+  let callCount = 0;
+
+  function listener(e) {
+    assert_equals(e, event);
+    ++callCount;
+  }
+
+  target.addEventListener("foo", listener);
+
+  target.dispatchEvent(event);
+  assert_equals(callCount, 1);
+
+  target.dispatchEvent(event);
+  assert_equals(callCount, 2);
+
+  target.removeEventListener("foo", listener);
+  target.dispatchEvent(event);
+  assert_equals(callCount, 2);
+}, "A constructed EventTarget can be used as expected");
+
+test(() => {
+  class NicerEventTarget extends EventTarget {
+    on(...args) {
+      this.addEventListener(...args);
+    }
+
+    off(...args) {
+      this.removeEventListener(...args);
+    }
+
+    dispatch(type, detail) {
+      this.dispatchEvent(new CustomEvent(type, { detail }));
+    }
+  }
+
+  const target = new NicerEventTarget();
+  const event = new Event("foo", { bubbles: true, cancelable: false });
+  const detail = "some data";
+  let callCount = 0;
+
+  function listener(e) {
+    assert_equals(e.detail, detail);
+    ++callCount;
+  }
+
+  target.on("foo", listener);
+
+  target.dispatch("foo", detail);
+  assert_equals(callCount, 1);
+
+  target.dispatch("foo", detail);
+  assert_equals(callCount, 2);
+
+  target.off("foo", listener);
+  target.dispatch("foo", detail);
+  assert_equals(callCount, 2);
+}, "EventTarget can be subclassed");
+
diff --git a/src/third_party/zlib/BUILD.gn b/src/third_party/zlib/BUILD.gn
index 0f59c0a..f1ed18e 100644
--- a/src/third_party/zlib/BUILD.gn
+++ b/src/third_party/zlib/BUILD.gn
@@ -158,6 +158,8 @@
       "contrib/optimizations/inflate.c",
     ]
 
+    cflags_c = [ "-Wno-unused-function" ]
+
     if (use_arm_neon_optimizations && !is_debug) {
       # Here we trade better performance on newer/bigger ARMv8 cores
       # for less perf on ARMv7, per crbug.com/772870#c40
@@ -166,7 +168,9 @@
     }
   }
 
-  configs -= [ "//build/config/compiler:chromium_code" ]
+  if (!is_starboard) {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+  }
   configs += [
     ":zlib_internal_config",
     "//build/config/compiler:no_chromium_code",
@@ -224,7 +228,9 @@
     ]
   }
 
-  configs -= [ "//build/config/compiler:chromium_code" ]
+  if (!is_starboard) {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+  }
   configs += [
     ":zlib_internal_config",
     "//build/config/compiler:no_chromium_code",
@@ -275,6 +281,7 @@
 
   defines = []
   deps = []
+  cflags_c = []
 
   if (use_x86_x64_optimizations || use_arm_neon_optimizations) {
     deps += [
@@ -283,17 +290,36 @@
     ]
 
     if (use_x86_x64_optimizations) {
-      sources += [ "x86.c" ]
+      sources += [
+        "arm_stub.c",
+        "x86.c",
+      ]
+      cflags_c += [ "-Wno-missing-braces" ]
       deps += [ ":zlib_crc32_simd" ]
     } else if (use_arm_neon_optimizations) {
       sources += [ "contrib/optimizations/slide_hash_neon.h" ]
       deps += [ ":zlib_arm_crc32" ]
     }
   } else {
-    sources += [ "inflate.c" ]
+    sources += [
+      "arm_stub.c",
+      "inflate.c",
+    ]
   }
 
-  configs -= [ "//build/config/compiler:chromium_code" ]
+  if (is_starboard) {
+    sources += [ "inflate.c" ]
+    sources -= [
+      "gzclose.c",
+      "gzguts.h",
+      "gzlib.c",
+      "gzread.c",
+      "gzwrite.c",
+    ]
+    deps += [ "//starboard/common" ]
+  } else {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+  }
   configs += [
     ":zlib_internal_config",
     "//build/config/compiler:no_chromium_code",
@@ -346,7 +372,9 @@
     ":zlib",
   ]
 
-  configs -= [ "//build/config/compiler:chromium_code" ]
+  if (!is_starboard) {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+  }
   configs += [
     "//build/config/compiler:no_chromium_code",
 
@@ -357,22 +385,26 @@
   public_configs = [ ":zlib_config" ]
 }
 
-executable("zlib_bench") {
-  include_dirs = [ "." ]
+if (!is_starboard) {
+  executable("zlib_bench") {
+    include_dirs = [ "." ]
 
-  sources = [
-    "contrib/bench/zlib_bench.cc",
-  ]
+    sources = [
+      "contrib/bench/zlib_bench.cc",
+    ]
 
-  if (!is_debug) {
-    configs -= [ "//build/config/compiler:default_optimization" ]
-    configs += [ "//build/config/compiler:optimize_speed" ]
+    if (!is_debug) {
+      configs -= [ "//build/config/compiler:default_optimization" ]
+      configs += [ "//build/config/compiler:optimize_speed" ]
+    }
+
+    if (!is_starboard) {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    deps = [
+      ":zlib",
+    ]
   }
-
-  configs -= [ "//build/config/compiler:chromium_code" ]
-  configs += [ "//build/config/compiler:no_chromium_code" ]
-
-  deps = [
-    ":zlib",
-  ]
 }
diff --git a/src/tools/format_ninja.py b/src/tools/format_ninja.py
index e4e6ffc..b0d1ba0 100644
--- a/src/tools/format_ninja.py
+++ b/src/tools/format_ninja.py
@@ -90,7 +90,7 @@
     directory = entry['directory']
     command = normalize_command(entry['command'], directory)
     file_entry = relativize_path(entry['file'], directory)
-    output_entry = relativize_path(entry['output'], directory)
+    output_entry = entry['output']
     yield {'command': command, 'file': file_entry, 'output': output_entry}