Initial import of Cobalt 2.8885 2016-07-27
diff --git a/src/build/README.chromium b/src/build/README.chromium
new file mode 100644
index 0000000..012df35
--- /dev/null
+++ b/src/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+  common.vsprops
+    Not used anymore. No-op. Kept for compatibility with current projects.
+
+  debug.vsprops
+    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+  external_code.vsprops
+    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+  output_dll_copy.rules
+    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+  release.vsprops
+    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/src/build/all.gyp b/src/build/all.gyp
new file mode 100644
index 0000000..1b16c89
--- /dev/null
+++ b/src/build/all.gyp
@@ -0,0 +1,736 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'xcode_create_dependents_test_runner': 1,
+      'dependencies': [
+        'some.gyp:*',
+        '../base/base.gyp:*',
+        '../chrome/chrome.gyp:*',
+        '../content/content.gyp:*',
+        '../crypto/crypto.gyp:*',
+        '../media/media.gyp:*',
+        '../net/net.gyp:*',
+        '../sdch/sdch.gyp:*',
+        '../sql/sql.gyp:*',
+        '../sync/sync.gyp:*',
+        '../testing/gmock.gyp:*',
+        '../testing/gtest.gyp:*',
+        '../third_party/bzip2/bzip2.gyp:*',
+        '../third_party/icu/icu.gyp:*',
+        '../third_party/libxml/libxml.gyp:*',
+        '../third_party/sqlite/sqlite.gyp:*',
+        '../third_party/zlib/zlib.gyp:*',
+        '../ui/ui.gyp:*',
+        '../webkit/support/webkit_support.gyp:*',
+        'temp_gyp/googleurl.gyp:*',
+      ],
+      'conditions': [
+        ['OS!="ios"', {
+          'dependencies': [
+            '../cc/cc_tests.gyp:*',
+            '../device/device.gyp:*',
+            '../gpu/gpu.gyp:*',
+            '../gpu/tools/tools.gyp:*',
+            '../ipc/ipc.gyp:*',
+            '../jingle/jingle.gyp:*',
+            '../ppapi/ppapi.gyp:*',
+            '../ppapi/ppapi_internal.gyp:*',
+            '../printing/printing.gyp:*',
+            '../skia/skia.gyp:*',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+            '../third_party/cld/cld.gyp:*',
+            '../third_party/codesighs/codesighs.gyp:*',
+            '../third_party/ffmpeg/ffmpeg.gyp:*',
+            '../third_party/iccjpeg/iccjpeg.gyp:*',
+            '../third_party/libpng/libpng.gyp:*',
+            '../third_party/libusb/libusb.gyp:*',
+            '../third_party/libwebp/libwebp.gyp:*',
+            '../third_party/libxslt/libxslt.gyp:*',
+            '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+            '../third_party/mesa/mesa.gyp:*',
+            '../third_party/modp_b64/modp_b64.gyp:*',
+            '../third_party/npapi/npapi.gyp:*',
+            '../third_party/ots/ots.gyp:*',
+            '../third_party/qcms/qcms.gyp:*',
+            '../third_party/re2/re2.gyp:re2',
+            '../third_party/WebKit/Source/WebKit/chromium/All.gyp:*',
+            '../v8/tools/gyp/v8.gyp:*',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:*',
+            '../webkit/webkit.gyp:*',
+            '<(libjpeg_gyp_path):*',
+          ],
+        }],
+        ['os_posix==1 and OS!="android" and OS!="ios"', {
+          'dependencies': [
+            '../third_party/yasm/yasm.gyp:*#host',
+          ],
+        }],
+        ['OS=="mac" or OS=="ios" or OS=="win"', {
+          'dependencies': [
+            '../third_party/nss/nss.gyp:*',
+           ],
+        }],
+        ['OS=="lb_shell"', {
+          'dependencies!': [
+            '../third_party/modp_b64/modp_b64.gyp:*',
+          ],
+        }],
+        ['OS=="win" or OS=="ios" or OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+           ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../third_party/ocmock/ocmock.gyp:*',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../courgette/courgette.gyp:*',
+            '../dbus/dbus.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+          ],
+          'conditions': [
+            ['branding=="Chrome"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_packages_<(channel)',
+              ],
+            }],
+            ['chromeos==0', {
+              'dependencies': [
+                '../third_party/cros_dbus_cplusplus/cros_dbus_cplusplus.gyp:*',
+                '../third_party/libmtp/libmtp.gyp:*',
+                '../third_party/mtpd/mtpd.gyp:*',
+              ],
+            }],
+          ],
+        }],
+        ['toolkit_uses_gtk==1', {
+          'dependencies': [
+            '../tools/gtk_clipboard_dump/gtk_clipboard_dump.gyp:*',
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'conditions': [
+            ['win_use_allocator_shim==1', {
+              'dependencies': [
+                '../base/allocator/allocator.gyp:*',
+              ],
+            }],
+          ],
+          'dependencies': [
+            '../chrome_frame/chrome_frame.gyp:*',
+            '../cloud_print/cloud_print.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../rlz/rlz.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+            '../third_party/angle/src/build_angle.gyp:*',
+            '../third_party/bsdiff/bsdiff.gyp:*',
+            '../third_party/bspatch/bspatch.gyp:*',
+            '../third_party/gles2_book/gles2_book.gyp:*',
+            '../tools/memory_watcher/memory_watcher.gyp:*',
+          ],
+        }, {
+          'dependencies': [
+            '../third_party/libevent/libevent.gyp:*',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/views/controls/webview/webview.gyp:*',
+            '../ui/views/views.gyp:*',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ash/ash.gyp:*',
+            '../ui/aura/aura.gyp:*',
+            '../ui/oak/oak.gyp:*',
+          ],
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting.gyp:*',
+          ],
+        }],
+        ['use_openssl==0', {
+          'dependencies': [
+            '../net/third_party/nss/ssl.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All
+    {
+      'target_name': 'All_syzygy',
+      'type': 'none',
+      'conditions': [
+        ['OS=="win" and fastbuild==0 and asan!=1', {
+            'dependencies': [
+              '../chrome/installer/mini_installer_syzygy.gyp:*',
+            ],
+          },
+        ],
+      ],
+    }, # target_name: All_syzygy
+    {
+      'target_name': 'chromium_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../chrome/chrome.gyp:unit_tests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../media/media.gyp:media_unittests',
+        '../net/net.gyp:net_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../ui/ui.gyp:ui_unittests',
+        'temp_gyp/googleurl.gyp:googleurl_unittests',
+      ],
+      'conditions': [
+        ['OS!="ios"', {
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver2_tests',
+            '../chrome/chrome.gyp:chromedriver2_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../device/device.gyp:device_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:mini_installer_test',
+            # mini_installer_tests depends on mini_installer. This should be
+            # defined in installer.gyp.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+            '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../webkit/webkit.gyp:pull_in_copy_TestNetscapePlugIn',
+            '../ui/views/views.gyp:views_unittests',
+            '../webkit/webkit.gyp:test_shell_common',
+           ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+          ],
+        }],
+      ],
+    }, # target_name: chromium_builder_tests
+    {
+      'target_name': 'chromium_2010_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        'chromium_builder_tests',
+      ],
+    }, # target_name: chromium_2010_builder_tests
+  ],
+  'conditions': [
+    ['OS!="ios"', {
+      'targets': [
+        {
+          'target_name': 'chromium_builder_nacl_win_integration',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_qa', # needed for pyauto
+            'chromium_builder_tests',
+          ],
+        }, # target_name: chromium_builder_nacl_win_integration
+        {
+          'target_name': 'chromium_builder_perf',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_qa', # needed for pyauto
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:sync_performance_tests',
+          ],
+        }, # target_name: chromium_builder_perf
+        {
+          'target_name': 'chromium_gpu_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:gpu_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../content/content.gyp:content_browsertests',
+            '../gpu/gpu.gyp:gl_tests',
+          ],
+          'conditions': [
+            ['internal_gles2_conform_tests', {
+              'dependencies': [
+                '../gpu/gles2_conform_test/gles2_conform_test.gyp:gles2_conform_test',
+              ],
+            }], # internal_gles2_conform
+          ],
+        }, # target_name: chromium_gpu_builder
+        {
+          'target_name': 'chromium_gpu_debug_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:gpu_tests',
+            '../content/content.gyp:content_browsertests',
+            '../gpu/gpu.gyp:gl_tests',
+          ],
+          'conditions': [
+            ['internal_gles2_conform_tests', {
+              'dependencies': [
+                '../gpu/gles2_conform_test/gles2_conform_test.gyp:gles2_conform_test',
+              ],
+            }], # internal_gles2_conform
+          ],
+        }, # target_name: chromium_gpu_debug_builder
+        {
+          'target_name': 'chromium_builder_qa',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver2',
+            # Dependencies of pyauto_functional tests.
+            '../remoting/remoting.gyp:remoting_webapp',
+          ],
+          'conditions': [
+            # If you change this condition, make sure you also change it
+            # in chrome_tests.gypi
+            ['enable_automation==1 and (OS=="mac" or OS=="win" or (os_posix==1 and target_arch==python_arch))', {
+              'dependencies': [
+                '../chrome/chrome.gyp:pyautolib',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_me2me_host_archive',
+              ],
+            }],
+            ['OS=="win" and component != "shared_library" and wix_exists == "True" and sas_dll_exists == "True"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_host_installation',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_builder_qa
+        {
+          'target_name': 'chromium_builder_perf_av',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_qa',  # needed for perf pyauto tests
+            '../webkit/webkit.gyp:pull_in_DumpRenderTree',  # to run layout tests
+          ],
+        },  # target_name: chromium_builder_perf_av
+        {
+          'target_name': 'chromium_builder_webrtc',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_qa',  # needed for perf pyauto tests
+            '../third_party/libjingle/libjingle.gyp:peerconnection_server',
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+        },  # target_name: chromium_builder_webrtc
+      ],  # targets
+    }],
+    ['OS=="mac"', {
+      'targets': [
+        {
+          # Target to build everything plus the dmg.  We don't put the dmg
+          # in the All target because developers really don't need it.
+          'target_name': 'all_and_dmg',
+          'type': 'none',
+          'dependencies': [
+            'All',
+            '../chrome/chrome.gyp:build_app_dmg',
+          ],
+        },
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder_dbg',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ui/ui.gyp:ui_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../rlz/rlz.gyp:*',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_rel',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ui/ui.gyp:ui_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+            '../net/net.gyp:net_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_asan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../net/net.gyp:dns_fuzz_stub',
+            '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_valgrind_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ui/ui.gyp:ui_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+      ],  # targets
+    }], # OS="mac"
+    ['OS=="win"', {
+      'targets': [
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:mini_installer_test',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            # mini_installer_tests depends on mini_installer. This should be
+            # defined in installer.gyp.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+            '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ui/ui.gyp:ui_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../ui/views/views.gyp:views_unittests',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+            '../webkit/webkit.gyp:pull_in_copy_TestNetscapePlugIn',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_win_cf',
+          'type': 'none',
+          'dependencies': [
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+            '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'webkit_builder_win',
+          'type': 'none',
+          'dependencies': [
+            '../webkit/webkit.gyp:test_shell',
+            '../webkit/webkit.gyp:test_shell_tests',
+            '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+            '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+          ],
+        },
+      ],  # targets
+      'conditions': [
+        ['branding=="Chrome"', {
+          'targets': [
+            {
+              'target_name': 'chrome_official_builder',
+              'type': 'none',
+              'dependencies': [
+                '../chrome/chrome.gyp:chromedriver',
+                '../chrome/chrome.gyp:chromedriver2',
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:crash_service_win64',
+                '../chrome/chrome.gyp:performance_ui_tests',
+                '../chrome/chrome.gyp:policy_templates',
+                '../chrome/chrome.gyp:pyautolib',
+                '../chrome/chrome.gyp:reliability_tests',
+                '../chrome/chrome.gyp:automated_ui_tests',
+                '../chrome/installer/mini_installer.gyp:mini_installer',
+                '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+                '../courgette/courgette.gyp:courgette',
+                '../courgette/courgette.gyp:courgette64',
+                '../cloud_print/cloud_print.gyp:cloud_print',
+                '../remoting/remoting.gyp:remoting_webapp',
+                '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmplugin',
+              ],
+              'conditions': [
+                ['internal_pdf', {
+                  'dependencies': [
+                    '../pdf/pdf.gyp:pdf',
+                  ],
+                }], # internal_pdf
+                ['component != "shared_library" and wix_exists == "True" and \
+                    sas_dll_exists == "True"', {
+                  'dependencies': [
+                    '../remoting/remoting.gyp:remoting_host_installation',
+                  ],
+                }], # component != "shared_library"
+              ]
+            },
+          ], # targets
+        }], # branding=="Chrome"
+       ], # conditions
+    }], # OS="win"
+    ['use_aura==1', {
+      'targets': [
+        {
+          'target_name': 'aura_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../device/device.gyp:device_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../ui/aura/aura.gyp:*',
+            '../ui/compositor/compositor.gyp:*',
+            '../ui/ui.gyp:ui_unittests',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_examples_with_content_exe',
+            '../ui/views/views.gyp:views_unittests',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+            '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              # Remove this when we have the real compositor.
+              'copies': [
+                {
+                  'destination': '<(PRODUCT_DIR)',
+                  'files': ['../third_party/directxsdk/files/dlls/D3DX10d_43.dll']
+                },
+              ],
+              'dependencies': [
+                '../content/content.gyp:content_unittests',
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_shell',
+                '../ash/ash.gyp:ash_unittests',
+              ],
+            }],
+            ['OS=="linux"', {
+              # Tests that currently only work on Linux.
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../content/content.gyp:content_unittests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+              ],
+            }],
+            ['OS=="mac"', {
+              # Exclude dependencies that are not currently implemented.
+              'dependencies!': [
+                '../chrome/chrome.gyp:chrome',
+                '../chrome/chrome.gyp:unit_tests',
+                '../device/device.gyp:device_unittests',
+                '../ui/views/views.gyp:views_unittests',
+              ],
+            }],
+            ['chromeos==1', {
+              'dependencies': [
+                '../chromeos/chromeos.gyp:chromeos_unittests',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "use_aura==1"
+    ['test_isolation_mode != "noop"', {
+      'targets': [
+        {
+          'target_name': 'chromium_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../chrome/chrome.gyp:browser_tests_run',
+            '../chrome/chrome.gyp:sync_integration_tests_run',
+            '../chrome/chrome.gyp:unit_tests_run',
+            '../net/net.gyp:net_unittests_run',
+          ],
+        }, # target_name: chromium_swarm_tests
+      ],
+    }],
+  ], # conditions
+}
diff --git a/src/build/all_android.gyp b/src/build/all_android.gyp
new file mode 100644
index 0000000..a8cb2a3
--- /dev/null
+++ b/src/build/all_android.gyp
@@ -0,0 +1,128 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is all.gyp file for Android to prevent breakage in Android and other
+# platform; It will be churning a lot in the short term and eventually be merged
+# into all.gyp.
+
+{
+  'variables': {
+    # A hook that can be overridden in other repositories to add additional
+    # compilation targets to 'All'
+    'android_app_targets%': [],
+  },
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        '../content/content.gyp:content_shell_apk',
+        '<@(android_app_targets)',
+        'android_builder_tests',
+        '../android_webview/android_webview.gyp:android_webview_apk',
+        '../chrome/chrome.gyp:chromium_testshell',
+        # TODO(nyquist) This should instead by a target for sync when all of
+        # the sync-related code for Android has been upstreamed.
+        # See http://crbug.com/159203
+        '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
+      ],
+    }, # target_name: All
+    {
+      # The current list of tests for android.  This is temporary
+      # until the full set supported.  If adding a new test here,
+      # please also add it to build/android/run_tests.py, else the
+      # test is not run.
+      #
+      # WARNING:
+      # Do not add targets here without communicating the implications
+      # on tryserver triggers and load.  Discuss with jrg please.
+      'target_name': 'android_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../android_webview/android_webview.gyp:android_webview_unittests',
+        '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests',
+        '../base/base.gyp:base_unittests',
+        '../cc/cc_tests.gyp:cc_perftests_apk',
+        '../cc/cc_tests.gyp:cc_unittests',
+        '../chrome/chrome.gyp:unit_tests',
+        '../content/content.gyp:content_shell_test_apk',
+        '../content/content.gyp:content_unittests',
+        '../gpu/gpu.gyp:gpu_unittests',
+        '../ipc/ipc.gyp:ipc_tests',
+        '../media/media.gyp:media_unittests',
+        '../net/net.gyp:net_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../third_party/WebKit/Source/WebKit/chromium/All.gyp:*',
+        '../tools/android/device_stats_monitor/device_stats_monitor.gyp:device_stats_monitor',
+        '../tools/android/fake_dns/fake_dns.gyp:fake_dns',
+        '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test',
+        '../tools/android/forwarder2/forwarder.gyp:forwarder2',
+        '../tools/android/md5sum/md5sum.gyp:md5sum',
+        '../ui/ui.gyp:ui_unittests',
+        # Required by ui_unittests.
+        # TODO(wangxianzhu): It'd better let ui_unittests depend on it, but
+        # this would cause circular gyp dependency which needs refactoring the
+        # gyps to resolve.
+        '../chrome/chrome_resources.gyp:packed_resources',
+      ],
+      'conditions': [
+        ['linux_breakpad==1', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:breakpad_unittests',
+            # Also compile the tools needed to deal with minidumps, they are
+            # needed to run minidump tests upstream.
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:symupload#host',
+            '../breakpad/breakpad.gyp:minidump_dump#host',
+            '../breakpad/breakpad.gyp:minidump_stackwalk#host'
+          ],
+        }],
+        ['"<(gtest_target_type)"=="shared_library"', {
+          'dependencies': [
+            # The first item is simply the template.  We add as a dep
+            # to make sure it builds in ungenerated form.  TODO(jrg):
+            # once stable, transition to a test-only (optional)
+            # target.
+            '../testing/android/native_test.gyp:native_test_apk',
+            # Unit test bundles packaged as an apk.
+            '../android_webview/android_webview.gyp:android_webview_unittests_apk',
+            '../base/base.gyp:base_unittests_apk',
+            '../cc/cc_tests.gyp:cc_unittests_apk',
+            '../chrome/chrome.gyp:unit_tests_apk',
+            '../content/content.gyp:content_unittests_apk',
+            '../gpu/gpu.gyp:gpu_unittests_apk',
+            '../ipc/ipc.gyp:ipc_tests_apk',
+            '../media/media.gyp:media_unittests_apk',
+            '../net/net.gyp:net_unittests_apk',
+            '../sql/sql.gyp:sql_unittests_apk',
+            '../sync/sync.gyp:sync_unit_tests_apk',
+            '../ui/ui.gyp:ui_unittests_apk',
+            '../android_webview/android_webview.gyp:android_webview_test_apk',
+            '../chrome/chrome.gyp:chromium_testshell_test_apk',
+            '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests_apk'
+          ],
+        }],
+      ],
+    },
+    {
+      # Experimental / in-progress targets that are expected to fail
+      # but we still try to compile them on bots (turning the stage
+      # orange, not red).
+      'target_name': 'android_experimental',
+      'type': 'none',
+      'dependencies': [
+      ],
+    },
+    {
+      # In-progress targets that are expected to fail and are NOT run
+      # on any bot.
+      'target_name': 'android_in_progress',
+      'type': 'none',
+      'dependencies': [
+        '../content/content.gyp:content_browsertests',
+      ],
+    },
+  ],  # targets
+}
diff --git a/src/build/android/AndroidManifest.xml b/src/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..0822e36
--- /dev/null
+++ b/src/build/android/AndroidManifest.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This dummy manifest is passed to aapt when generating R.java in java.gypi.
+  Nothing in the manifest is used, but it is still required by aapt.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dummy.package" />
diff --git a/src/build/android/adb_chromium_testshell_command_line b/src/build/android/adb_chromium_testshell_command_line
new file mode 100755
index 0000000..8c09e3f
--- /dev/null
+++ b/src/build/android/adb_chromium_testshell_command_line
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current chromium test shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the chromium
+# test shell flags. For example:
+#   adb_chromium_testshell_command_line --enable-webgl
+#
+# To remove all chromium test shell flags, pass an empty string for the flags:
+#   adb_chromium_testshell_command_line ""
+
+CMD_LINE_FILE=/data/local/tmp/chromium-testshell-command-line
+
+if [ $# -eq 0 ] ; then
+  # If nothing specified, print the command line (stripping off "chromium_testshell")
+  tempfile=$(tempfile)
+  adb pull $CMD_LINE_FILE $tempfile 2>/dev/null
+  if [ $? -eq 0 ] ; then
+    rm $tempfile
+    adb shell cat $CMD_LINE_FILE | cut -d " " -f "2-" 2>/dev/null
+  fi
+elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+  # If given an empty string, delete the command line.
+  set -x
+  adb shell rm $CMD_LINE_FILE >/dev/null
+else
+  # Else set it.
+  set -x
+  adb shell "echo 'chromium_testshell $*' > $CMD_LINE_FILE"
+  # Prevent other apps from modifying flags -- this can create security issues.
+  adb shell chmod 0664 $CMD_LINE_FILE
+fi
+
diff --git a/src/build/android/adb_content_shell_command_line b/src/build/android/adb_content_shell_command_line
new file mode 100755
index 0000000..f3c1d4f
--- /dev/null
+++ b/src/build/android/adb_content_shell_command_line
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_content_shell_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_content_shell_command_line ""
+
+CMD_LINE_FILE=/data/local/tmp/content-shell-command-line
+
+if [ $# -eq 0 ] ; then
+  # If nothing specified, print the command line (stripping off "content_shell")
+  tempfile=$(tempfile)
+  adb pull $CMD_LINE_FILE $tempfile 2>/dev/null
+  if [ $? -eq 0 ] ; then
+    rm $tempfile
+    adb shell cat $CMD_LINE_FILE | cut -d " " -f "2-" 2>/dev/null
+  fi
+elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+  # If given an empty string, delete the command line.
+  set -x
+  adb shell rm $CMD_LINE_FILE >/dev/null
+else
+  # Else set it.
+  set -x
+  adb shell "echo 'content_shell $*' > $CMD_LINE_FILE"
+  # Prevent other apps from modifying flags -- this can create security issues.
+  adb shell chmod 0664 $CMD_LINE_FILE
+fi
+
diff --git a/src/build/android/adb_device_functions.sh b/src/build/android/adb_device_functions.sh
new file mode 100755
index 0000000..66cc32f
--- /dev/null
+++ b/src/build/android/adb_device_functions.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# A collection of functions useful for maintaining android devices
+
+
+# Run an adb command on all connected device in parallel.
+# Usage: adb_all command line to eval.  Quoting is optional.
+#
+# Examples:
+#  adb_all install Chrome.apk
+#  adb_all 'shell cat /path/to/file'
+#
+adb_all() {
+  if [[ $# == 0 ]]; then
+    echo "Usage: adb_all <adb command>.  Quoting is optional."
+    echo "Example: adb_all install Chrome.apk"
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices -b)
+  local NUM_DEVICES=$(echo $DEVICES | wc -w)
+  if (( $NUM_DEVICES > 1 )); then
+    echo "Looping over $NUM_DEVICES devices"
+  fi
+  _adb_multi "$DEVICES" "$*"
+}
+
+
+# Run a command on each connected device.  Quoting the command is suggested but
+# not required.  The script setups up variable DEVICE to correspond to the
+# current serial number.  Intended for complex one_liners that don't work in
+# adb_all
+# Usage: adb_device_loop 'command line to eval'
+adb_device_loop() {
+  if [[ $# == 0 ]]; then
+    echo "Intended for more complex one-liners that cannot be done with" \
+        "adb_all."
+    echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
+        'adb shell cat /data/local.prop)"'
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices)
+  if [[ -z $DEVICES ]]; then
+    return
+  fi
+  # Do not change DEVICE variable name - part of api
+  for DEVICE in $DEVICES; do
+    DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
+    echo "Running on $DEVICE ($DEV_TYPE)"
+    ANDROID_SERIAL=$DEVICE eval "$*"
+  done
+}
+
+# Erases data from any devices visible on adb.  To preserve a device,
+# disconnect it or:
+#  1) Reboot it into fastboot with 'adb reboot bootloader'
+#  2) Run wipe_all_devices to wipe remaining devices
+#  3) Restore device it with 'fastboot reboot'
+#
+#  Usage: wipe_all_devices [-f]
+#
+wipe_all_devices() {
+  if [[ -z $(which adb) || -z $(which fastboot) ]]; then
+    echo "aborting: adb and fastboot not in path"
+    return 1
+  elif ! $(groups | grep -q 'plugdev'); then
+    echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
+  fi
+
+  local DEVICES=$(adb_get_devices -b)
+
+  if [[ $1 != '-f' ]]; then
+    echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
+    read -p "Hit enter to continue"
+  fi
+
+  _adb_multi "$DEVICES" "reboot bootloader"
+  # Subshell to isolate job list
+  (
+  for DEVICE in $DEVICES; do
+    fastboot_erase $DEVICE &
+  done
+  wait
+  )
+
+  # Reboot devices together
+  for DEVICE in $DEVICES; do
+    fastboot -s $DEVICE reboot
+  done
+}
+
+# Wipe a device in fastboot.
+# Usage fastboot_erase [serial]
+fastboot_erase() {
+  if [[ -n $1 ]]; then
+    echo "Wiping $1"
+    local SERIAL="-s $1"
+  else
+    if [ -z $(fastboot devices) ]; then
+      echo "No devices in fastboot, aborting."
+      echo "Check out wipe_all_devices to see if sufficient"
+      echo "You can put a device in fastboot using adb reboot bootloader"
+      return 1
+    fi
+    local SERIAL=""
+  fi
+  fastboot $SERIAL erase cache
+  fastboot $SERIAL erase userdata
+}
+
+# Get list of devices connected via adb
+# Args: -b block until adb detects a device
+adb_get_devices() {
+  local DEVICES="$(adb devices | grep 'device$')"
+  if [[ -z $DEVICES && $1 == '-b' ]]; then
+    echo '- waiting for device -' >&2
+    local DEVICES="$(adb wait-for-device devices | grep 'device$')"
+  fi
+  echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
+}
+
+###################################################
+## HELPER FUNCTIONS
+###################################################
+
+# Run an adb command in parallel over a device list
+_adb_multi() {
+  local DEVICES=$1
+  local ADB_ARGS=$2
+  (
+    for DEVICE in $DEVICES; do
+      adb -s $DEVICE $ADB_ARGS &
+    done
+    wait
+  )
+}
diff --git a/src/build/android/adb_gdb b/src/build/android/adb_gdb
new file mode 100755
index 0000000..fcbbed6
--- /dev/null
+++ b/src/build/android/adb_gdb
@@ -0,0 +1,895 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell, or adb_gdb_drt
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell rm "$TARGET_GDBSERVER" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C.
+trap "clean_exit 1" INT
+
+panic () {
+  echo "ERROR: $@" >&2
+  clean_exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the default activity through
+# the ADB_GDB_ACTIVITY variable. Users are still able to change the
+# final activity name through --activity=<name> option.
+#
+# This is only for cosmetic reasons, i.e. to display the proper default
+# in the --help output.
+#
+DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"}
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ACTIVITY=$DEFAULT_ACTIVITY
+ADB=
+ANNOTATE=
+# Note: Ignore BUILDTYPE variable, because the Ninja build doesn't use it.
+BUILDTYPE=
+FORCE=
+GDBINIT=
+GDBSERVER=
+HELP=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+SANDBOXED=
+SANDBOXED_INDEX=
+START=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --activity=*)
+      ACTIVITY=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --force)
+      FORCE=true
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --sandboxed)
+      SANDBOXED=true
+      ;;
+    --sandboxed=*)
+      SANDBOXED=true
+      SANDBOXED_INDEX=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --start)
+      START=true
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    --debug)
+      BUILDTYPE=Debug
+      ;;
+    --release)
+      BUILDTYPE=Release
+      ;;
+    -*)
+      panic "Unknown option $OPT, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+print_help_options () {
+  cat <<EOF
+EOF
+}
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+This can be a regular Android application process, or a sandboxed
+service, if you use the --sandboxed or --sandboxed=<num> option.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+If you have sourced Chromium's build/android/envsetup.sh, this script will
+find all of them automatically. This is the recommended way to use it.
+
+Otherwise, if you have ANDROID_NDK_ROOT defined in your environment,
+the script will use it to find the gdb and gdbserver binaries. You can
+also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/out/Release/lib/           (used by Ninja builds)
+  \$CHROMIUM_SRC/out/Debug/lib/             (used by Ninja builds)
+  \$CHROMIUM_SRC/out/Release/lib.target/    (used by Make builds)
+  \$CHROMIUM_SRC/out/Debug/lib.target/      (used by Make builds)
+
+You can restrict this search by using --release or --debug to specify the
+build type, or simply use --symbol-dir=<path> to specify the file manually.
+
+The script tries to extract the target architecture from your GYP_DEFINES,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and ANDROID_SERIAL is not defined.
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --sandboxed           Debug first sandboxed process we find.
+  --sandboxed=<num>     Debug specific sandboxed process.
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --force               Kill any previous debugging session, if any.
+  --start               Start package's activity on device.
+  --activity=<name>     Activity name for --start [$DEFAULT_ACTIVITY].
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify targer gdbserver binary.
+  --gdb=<program>       Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<program>       Specify host ADB binary.
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+  --debug               Use libraries under out/Debug.
+  --release             Use libraries under out/Release.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  if [ -z "$ANDROID_NDK_ROOT" ]; then
+    panic "Can't find NDK directory, please source \
+build/android/envsetup.sh!"
+  fi
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Find the target architecture from our $GYP_DEFINES
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  local ARCH=$(echo $GYP_DEFINES | tr ' ' '\n' | grep '^target_arch=' |\
+               cut -d= -f2)
+  case $ARCH in
+    ia32|i?86|x86) echo "x86";;
+    mips|arm) echo "$ARCH";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system tag, i.e. the name used to identify the host.
+# out: NDK system tag (e.g. 'linux-x86').
+get_ndk_host_tag () {
+  if [ -z "$NDK_HOST_TAG" ]; then
+    case $(uname -s) in
+      Linux) NDK_HOST_TAG=linux-x86;;
+      Darwin) NDK_HOST_TAG=darwin-x86;;
+      *) panic "You can't run this script on this system: $uname -a";;
+    esac
+  fi
+  echo "$NDK_HOST_TAG"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86!
+  if [ "$1" = "x86" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.6/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.4.3/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST GCC CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  GCC=
+  HOST=$(get_ndk_host_tag)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  GCC=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST/bin/$CONFIG-gcc")
+  if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST/bin/i686-android-linux-gcc")
+  fi
+  if [ -z "$GCC" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local HOST=$(get_ndk_host_tag)
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+GDB=$(which $ANDROID_TOOLCHAIN/*-gdb 2>/dev/null | head -1)
+if [ -z "$GDB" ]; then
+  panic "Can't find Android gdb client in your path, check your \
+--toolchain path."
+fi
+log "Host gdb client: $GDB"
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -lt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# If --force is specified, try to kill any gdbserver process started by the
+# same user on the device. Normally, these are killed automatically by the
+# script on exit, but there are a few corner cases where this would still
+# be needed.
+if [ "$FORCE" ]; then
+  GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }')
+  for GDB_PID in $GDBSERVER_PIDS; do
+    log "Killing previous gdbserver (PID=$GDB_PID)"
+    adb_shell kill -9 $GDB_PID
+  done
+fi
+
+if [ "$START" ]; then
+  log "Starting $PROGRAM_NAME on device."
+  adb_shell am start -n $PACKAGE_NAME/$ACTIVITY 2>/dev/null
+  adb_shell ps | grep -q $PACKAGE_NAME
+  fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \
+package is installed?"
+fi
+
+# Return the timestamp of a given time, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_SRC/out/
+#
+# $1: $BUILDTYPE value, can be empty
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  local SUBDIRS SUBDIR LIST DIR DIR_LIBS TSTAMP
+  # Note: Ninja places debug libraries under out/$BUILDTYPE/lib/, while
+  # Make places then under out/$BUILDTYPE/lib.target.
+  if [ "$1" ]; then
+    SUBDIRS="$1/lib $1/lib.target"
+  else
+    SUBDIRS="Release/lib Debug/lib Release/lib.target Debug/lib.target"
+  fi
+  LIST=$TMPDIR/scan-subdirs-$$.txt
+  printf "" > "$LIST"
+  for SUBDIR in $SUBDIRS; do
+    DIR=$CHROMIUM_SRC/out/$SUBDIR
+    if [ -d "$DIR" ]; then
+      # Ignore build directories that don't contain symbol versions
+      # of the shared libraries.
+      DIR_LIBS=$(ls "$DIR"/lib*.so 2>/dev/null)
+      if [ -z "$DIR_LIBS" ]; then
+        echo "No shared libs: $DIR"
+        continue
+      fi
+      TSTAMP=$(get_file_timestamp "$DIR")
+      printf "%s %s\n" "$TSTAMP" "$SUBDIR" >> "$LIST"
+    fi
+  done
+  SUBDIR=$(cat $LIST | sort -r | head -1 | cut -d" " -f2)
+  rm -f "$LIST"
+
+  if [ -z "$SUBDIR" ]; then
+    if [ -z "$1" ]; then
+      panic "Could not find any build directory under \
+$CHROMIUM_SRC/out. Please build the program first!"
+    else
+      panic "Could not find any $1 directory under \
+$CHROMIUM_SRC/out. Check your build type!"
+    fi
+  fi
+
+  SYMBOL_DIR=$CHROMIUM_SRC/out/$SUBDIR
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir "$BUILDTYPE"
+fi
+
+# Allow several concurrent debugging sessions
+TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Return the build fingerprint contained in a build.prop file.
+# $1: path to build.prop file
+get_build_fingerprint_from () {
+  cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2
+}
+
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+# If --pull-libs-dir is not specified, and this is a platform build, look
+# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/
+# directly, if the build fingerprint matches the device.
+if [ -z "$ORG_PULL_LIBS_DIR" -a \
+     "$ANDROID_PRODUCT_OUT" -a \
+     -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then
+  ANDROID_FINGERPRINT=$(get_build_fingerprint_from \
+                        "$ANDROID_PRODUCT_OUT"/system/build.prop)
+  log "Android build fingerprint:  $ANDROID_FINGERPRINT"
+  if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then
+    log "Perfect match!"
+    PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols
+    HOST_FINGERPRINT=$ANDROID_FINGERPRINT
+    if [ "$PULL_LIBS" ]; then
+      log "Ignoring --pull-libs since the device and platform build \
+fingerprints match."
+      NO_PULL_LIBS=true
+    fi
+  fi
+fi
+
+# If neither --pull-libs an --no-pull-libs were specified, check the build
+# fingerprints of the device, and the cached system libraries on the host.
+#
+if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then
+  if [ ! -f "$PULL_LIBS_DIR/build.prop" ]; then
+    log "Auto-config: --pull-libs  (no cached libraries)"
+    PULL_LIBS=true
+  else
+    HOST_FINGERPRINT=$(get_build_fingerprint_from "$PULL_LIBS_DIR/build.prop")
+    log "Host build fingerprint:   $HOST_FINGERPRINT"
+    if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+      log "Auto-config: --no-pull-libs (fingerprint match)"
+      NO_PULL_LIBS=true
+    else
+      log "Auto-config: --pull-libs  (fingerprint mismatch)"
+      PULL_LIBS=true
+    fi
+  fi
+fi
+
+# Extract the system libraries from the device if necessary.
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+fi
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+GDB_ARGS=
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ "$SANDBOXED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX
+  elif [ "$SANDBOXED" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    if [ "$START" ]; then
+      panic "Can't find application process PID, did it crash?"
+    else
+      panic "Can't find application process PID, are you sure it is \
+running? Try using --start."
+    fi
+  fi
+  log "Found process PID: $PID"
+elif [ "$SANDBOXED" ]; then
+  echo "WARNING: --sandboxed option ignored due to use of --pid."
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+SHELL_UID=$(adb shell cat /proc/self/status | \
+            awk '$1 == "Uid:" { print $2; }')
+log "Shell UID: $SHELL_UID"
+COMMAND_PREFIX=
+if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+  log "Using run-as $PACKAGE_NAME to run without root."
+  COMMAND_PREFIX="run-as $PACKAGE_NAME"
+fi
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  SYSTEM_LIBS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps | \
+                awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Pulling device build.prop"
+  adb pull /system/build.prop $PULL_LIBS_DIR/build.prop
+  fail_panic "Could not pull device build.prop !?"
+fi
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+
+# This is a re-implementation of gdbclient, where we use compatible
+# versions of gdbserver and $GDBNAME to ensure that everything works
+# properly.
+#
+
+# Push gdbserver to the device
+log "Pushing gdbserver to $TARGET_GDBSERVER"
+adb push $GDBSERVER $TARGET_GDBSERVER &>/dev/null
+fail_panic "Could not copy gdbserver to the device!"
+
+PORT=5039
+HOST_PORT=$PORT
+TARGET_PORT=$PORT
+
+# Pull the app_process binary from the device
+GDBEXEC=app_process
+log "Pulling $GDBEXEC from device"
+adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)"
+adb forward tcp:$HOST_PORT tcp:$TARGET_PORT
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+log "Starting gdbserver in the background:"
+GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+--attach $PID"
+("$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+ --attach $PID > $GDBSERVER_LOG 2>&1) &
+GDBSERVER_PID=$!
+echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+log "background job pid: $GDBSERVER_PID"
+
+# Check that it is still running after a few seconds. If not, this means we
+# could not properly attach to it
+sleep 2
+log "Job control: $(jobs -l)"
+STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+if [ "$STATE" != "Running" ]; then
+  echo "ERROR: GDBServer could not attach to PID $PID!"
+  echo "Failure log (use --verbose for more information):"
+  cat $GDBSERVER_LOG
+  exit 1
+fi
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+echo -n "" > $COMMANDS
+echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS
+echo "directory $CHROMIUM_SRC" >> $COMMANDS
+echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS
+echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \
+    >> $COMMANDS
+echo "echo Attaching and reading symbols, this may take a while.." \
+    >> $COMMANDS
+echo "target remote :$HOST_PORT" >> $COMMANDS
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> $COMMANDS
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat $COMMANDS
+  echo "### END $COMMANDS"
+fi
+
+log "Launching gdb client: $GDB $GDBARGS -x $COMMANDS"
+$GDB $GDBARGS -x $COMMANDS &&
+rm -f "$GDBSERVER_PIDFILE"
+
+clean_exit $?
diff --git a/src/build/android/adb_gdb_chromium_testshell b/src/build/android/adb_gdb_chromium_testshell
new file mode 100755
index 0000000..0f1b4a7
--- /dev/null
+++ b/src/build/android/adb_gdb_chromium_testshell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromiumTestShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ChromiumTestShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromiumTestShell \
+    --package-name=org.chromium.chrome.testshell \
+    "$@"
diff --git a/src/build/android/adb_gdb_content_shell b/src/build/android/adb_gdb_content_shell
new file mode 100755
index 0000000..d0b6947
--- /dev/null
+++ b/src/build/android/adb_gdb_content_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ContentShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ContentShell \
+    --package-name=org.chromium.content_shell \
+    "$@"
diff --git a/src/build/android/adb_gdb_drt b/src/build/android/adb_gdb_drt
new file mode 100755
index 0000000..6157361
--- /dev/null
+++ b/src/build/android/adb_gdb_drt
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a DumpRenderTree process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ChromeNativeTestActivity
+"$PROGDIR"/adb_gdb \
+  --program-name=DumpRenderTree \
+  --package-name=org.chromium.native_test \
+  "$@"
diff --git a/src/build/android/adb_install_apk.py b/src/build/android/adb_install_apk.py
new file mode 100755
index 0000000..db2c62c
--- /dev/null
+++ b/src/build/android/adb_install_apk.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import multiprocessing
+import optparse
+import os
+import sys
+
+from pylib import android_commands
+from pylib import apk_info
+from pylib import constants
+from pylib import test_options_parser
+
+
+def _InstallApk(args):
+  apk_path, apk_package, device = args
+  result = android_commands.AndroidCommands(device=device).ManagedInstall(
+      apk_path, False, apk_package)
+  print '-----  Installed on %s  -----' % device
+  print result
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  test_options_parser.AddInstallAPKOption(parser)
+  options, args = parser.parse_args(argv)
+  test_options_parser.ValidateInstallAPKOption(parser, options)
+  if len(args) > 1:
+    raise Exception('Error: Unknown argument:', args[1:])
+
+  devices = android_commands.GetAttachedDevices()
+  if not devices:
+    raise Exception('Error: no connected devices')
+
+  if not options.apk_package:
+    options.apk_package = apk_info.GetPackageNameForApk(options.apk)
+
+  pool = multiprocessing.Pool(len(devices))
+  # Send a tuple (apk_path, apk_package, device) per device.
+  pool.map(_InstallApk, zip([options.apk] * len(devices),
+                            [options.apk_package] * len(devices),
+                            devices))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/adb_kill_content_shell b/src/build/android/adb_kill_content_shell
new file mode 100755
index 0000000..d24c7a9
--- /dev/null
+++ b/src/build/android/adb_kill_content_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running content shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Content shell."
+else 
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Content shell does not appear to be running."
+   fi
+fi
diff --git a/src/build/android/adb_logcat_monitor.py b/src/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..aeaef0b
--- /dev/null
+++ b/src/build/android/adb_logcat_monitor.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\w+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_, unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_, unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except:
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print 'adb_logcat_monitor: Initializing'
+    sys.exit(main(*sys.argv[1:3]))
+
+  print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/src/build/android/adb_logcat_printer.py b/src/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..5194668
--- /dev/null
+++ b/src/build/android/adb_logcat_printer.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+
+import cStringIO
+import logging
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match('^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile('^logcat_(\w+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if not device_id in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(base_dir, output_file):
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  if len(sys.argv) == 1:
+    print 'Usage: %s <base_dir>' % sys.argv[0]
+    sys.exit(1)
+  sys.exit(main(sys.argv[1], sys.stdout))
diff --git a/src/build/android/adb_run_chromium_testshell b/src/build/android/adb_run_chromium_testshell
new file mode 100755
index 0000000..b17482c
--- /dev/null
+++ b/src/build/android/adb_run_chromium_testshell
@@ -0,0 +1,14 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome.testshell/.ChromiumTestShellActivity \
+  $INTENT_ARGS
diff --git a/src/build/android/adb_run_content_shell b/src/build/android/adb_run_content_shell
new file mode 100755
index 0000000..ea92f5a
--- /dev/null
+++ b/src/build/android/adb_run_content_shell
@@ -0,0 +1,14 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.content_shell/.ContentShellActivity \
+  $INTENT_ARGS
diff --git a/src/build/android/ant/chromium-apk.xml b/src/build/android/ant/chromium-apk.xml
new file mode 100644
index 0000000..2e24084
--- /dev/null
+++ b/src/build/android/ant/chromium-apk.xml
@@ -0,0 +1,102 @@
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+<project default="error">
+  <property name="ant.project.name" value="${APK_NAME}"/>
+  <!--
+    Gyp will pass CONFIGURATION_NAME as the target for ant to build. These targets will call the
+    appropriate sdk tools target.
+  -->
+  <target name="Debug" depends="debug"/>
+  <target name="Release" depends="release"/>
+  <!-- __LB_SHELL__
+    Map LbShell configurations into Android build system configurations
+  -->
+  <target name="Android_Debug" depends="debug"/>
+  <target name="Android_Devel" depends="debug"/>
+  <target name="Android_QA" depends="debug"/>
+  <target name="Android_Gold" depends="release"/>
+  <target name="error">
+    <fail message="CONFIGURATION_NAME should be passed as a target to ant."/>
+  </target>
+
+  <description>
+    Building ${ant.project.name}.apk
+  </description>
+  <import file="common.xml"/>
+
+  <property-location
+      name="out.dir"
+      location="${PRODUCT_DIR}/${PACKAGE_NAME}"
+      check-exists="false"
+  />
+
+  <path id="javac.srcdirs.additional">
+    <filelist files="${ADDITIONAL_SRC_DIRS}"/>
+    <filelist files="${GENERATED_SRC_DIRS}"/>
+  </path>
+
+  <!--
+    Include additional resource folders in the apk, e.g. content/.../res.  We
+    list the res folders in project.library.res.folder.path and the
+    corresponding java packages in project.library.packages, which must be
+    semicolon-delimited while ADDITIONAL_RES_PACKAGES is space-delimited, hence
+    the javascript task.
+  -->
+
+  <path id="project.library.res.folder.path">
+    <filelist files="${ADDITIONAL_RES_DIRS}"/>
+  </path>
+  <!-- __LB_SHELL__
+    javascript isn't supported in stock Apache Ant(TM) version 1.8.2
+    so we are going to disable this section for now.
+  <script language="javascript">
+    var before = project.getProperty("ADDITIONAL_RES_PACKAGES");
+    project.setProperty("project.library.packages", before.replaceAll(" ", ";"));
+  </script>
+  -->
+  <property-value name="target.abi" value="${APP_ABI}"/>
+  <property name="resource.absolute.dir" value="${RESOURCE_DIR}"/>
+  <property-value name="gen.absolute.dir" value="${out.dir}/gen"/>
+  <property-location name="native.libs.absolute.dir" location="${out.dir}/libs"
+      check-exists="false"/>
+
+  <property-value name="version.code" value="${APP_MANIFEST_VERSION_CODE}"/>
+  <property-value name="version.name" value="${APP_MANIFEST_VERSION_NAME}"/>
+
+  <!--
+    We use the PROGUARD_ENABLED flag for enabling proguard. By default proguard is enabled for
+    Release builds if proguard.config is set. Setting proguard.config even to an empty string will
+    enable proguard. Set this property only when we have explicitly enabled proguard.
+  -->
+  <condition property="proguard.config" value="${PROGUARD_FLAGS}">
+    <istrue value="${PROGUARD_ENABLED}"/>
+  </condition>
+  <!-- TODO(shashishekhar): Enable emma and code-coverage filters. -->
+
+  <condition property="asset.absolute.dir"
+      value="${out.dir}/assets"
+      else="${ASSET_DIR}">
+    <equals arg1="${ASSET_DIR}" arg2=""/>
+  </condition>
+
+  <!-- Set the output directory for the final apk to the ${apks.dir}. -->
+  <property-location name="out.final.file"
+      location="${apks.dir}/${ant.project.name}.apk"
+      check-exists="false"/>
+
+  <!-- Classpath for javac -->
+  <path id="javac.custom.classpath">
+    <filelist files="${INPUT_JARS_PATHS}"/>
+  </path>
+
+  <path id="out.dex.jar.input.ref">
+    <path refid="javac.custom.classpath"/>
+  </path>
+
+  <import file="sdk-targets.xml"/>
+  <import file="${sdk.dir}/tools/ant/build.xml"/>
+</project>
+
diff --git a/src/build/android/ant/chromium-debug.keystore b/src/build/android/ant/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/src/build/android/ant/chromium-debug.keystore
Binary files differ
diff --git a/src/build/android/ant/chromium-jars.xml b/src/build/android/ant/chromium-jars.xml
new file mode 100644
index 0000000..d13c927
--- /dev/null
+++ b/src/build/android/ant/chromium-jars.xml
@@ -0,0 +1,116 @@
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+<project name="chromium-jars" default="dist">
+  <!--
+    Common ant build file for for chromium_*.jars.
+    For creating a new chromium_*.jar :
+    1. Use build/java.gypi action.
+    The jar will be created as chromium_${PACKAGE_NAME} in
+    ${PRODUCT_DIR}/lib.java.
+  -->
+  <description>
+    Building ${PROJECT_NAME}/ java source code with ant.
+  </description>
+
+  <import file="common.xml"/>
+
+  <path id="javac.custom.classpath">
+    <filelist files="${INPUT_JARS_PATHS}"/>
+    <pathelement location="${ANDROID_SDK}/android.jar"/>
+  </path>
+
+  <path id="javac.srcdirs.additional">
+    <filelist files="${ADDITIONAL_SRC_DIRS}"/>
+    <filelist files="${GENERATED_SRC_DIRS}"/>
+  </path>
+
+  <property-value
+    name="javac.srcdir"
+    value="src:${toString:javac.srcdirs.additional}"
+  />
+
+  <property-location
+    name="dest.dir"
+    location="${PRODUCT_DIR}/java/${PACKAGE_NAME}"
+    check-exists="false"
+  />
+
+    <condition property="javac_includes_message"
+               value=""
+               else="Include filter: ${JAVAC_INCLUDES}">
+        <equals arg1="${JAVAC_INCLUDES}" arg2=""/>
+    </condition>
+
+  <target name="init">
+    <!-- Create the time stamp -->
+    <tstamp/>
+    <!-- Create the build directory structure used by compile -->
+    <mkdir dir="${dest.dir}"/>
+
+    <!-- Remove all .class files from dest.dir. This prevents inclusion of
+         incorrect .class files in the final .jar. For example, if a .java file
+         was deleted, the .jar should not contain the .class files for that
+         .java from previous builds.
+    -->
+    <delete>
+      <fileset dir="${dest.dir}" includes="**/*.class"/>
+    </delete>
+  </target>
+
+  <target name="compile" depends="init" description="Compiles source.">
+    <fail message="Error: javac.custom.classpath is not set. Please set it to
+        classpath for javac.">
+      <condition>
+        <not><isreference refid="javac.custom.classpath"/></not>
+      </condition>
+    </fail>
+
+    <echo>
+      Compiling ${javac.srcdir}, classpath: ${toString:javac.custom.classpath}
+      ${javac_includes_message}
+    </echo>
+
+    <!-- __LB_SHELL__
+      Android SDK build toolchain doesn't support Java 1.7 so we force
+      the source and target versions to 1.6. Failure to do will result
+      in .class files that can't be parsed by the dexer.
+    -->
+    <javac
+        source="1.6"
+        target="1.6"
+        srcdir="${javac.srcdir}"
+        destdir="${dest.dir}"
+        classpathref="javac.custom.classpath"
+        debug="true"
+        includeantruntime="false"
+        includes="${JAVAC_INCLUDES}">
+      <compilerarg value="-Xlint:unchecked"/>
+    </javac>
+  </target>
+
+  <target name="dist" depends="compile"
+      description="Generate chromium_${PACKAGE_NAME}.jar.">
+    <!-- Create the distribution directory.  We exclude R.class and R$*.class
+         files since new versions of these files with the correct resource -> ID
+         mapping will be provided when we build each individual apk. -->
+    <jar
+      jarfile="${lib.java.dir}/chromium_${PACKAGE_NAME}.jar"
+      excludes="**/R.class **/R$*.class"
+      basedir="${dest.dir}"
+    />
+
+    <!-- If Gyp thinks this output is stale but Ant doesn't, the modification
+         time should still be updated. Otherwise, this target will continue to
+         be rebuilt in future builds.
+    -->
+    <touch file="${lib.java.dir}/chromium_${PACKAGE_NAME}.jar"/>
+  </target>
+
+  <target name="clean" description="clean up">
+    <!-- Delete the appropriate directory trees -->
+    <delete dir="${dest.dir}"/>
+  </target>
+</project>
diff --git a/src/build/android/ant/common.xml b/src/build/android/ant/common.xml
new file mode 100644
index 0000000..1001f19
--- /dev/null
+++ b/src/build/android/ant/common.xml
@@ -0,0 +1,90 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+<project name="chrome_common_defines">
+  <!-- Common build properties for Chrome for android. -->
+
+  <!--
+    Macro for checking that a property is correctly set. Performs checks for:
+    1. Property is set and not null.
+    2. String value of property does not contains any '$' signs.
+  -->
+  <macrodef name="check-property-value">
+    <attribute name="property"/>
+    <sequential>
+      <fail message ="Property @{property} is not set.">
+        <condition>
+          <or>
+            <not><isset property="@{property}"/></not>
+            <length string="${@{property}}" trim="true" when="less" length="1"/>
+          </or>
+        </condition>
+      </fail>
+      <!--
+        Check for $ signs. This catches errors when properties are initialized from environment
+        variables. E.g. if we have <property name="foo" value="${env.bar}" /> but env.bar is
+        not set then foo will have the literal value of '${env.bar}'.
+      -->
+      <fail message="Value checked failed for property: @{property} : ${@{property}}.
+          Property value contains an uninitialized environment variable.">
+        <condition>
+          <contains string="${@{property}}" substring="$"/>
+        </condition>
+      </fail>
+    </sequential>
+  </macrodef>
+
+  <!--
+    A safe setter for location properties. Checks that a location is not
+    empty and actually exists. For specifying output directories, location
+    check can be disabled by specifying check-exists="false".
+  -->
+  <macrodef name="property-location">
+    <attribute name="name"/>
+    <attribute name="location"/>
+    <attribute name="check-exists" default="true"/>
+    <sequential>
+      <property name="@{name}" location="@{location}"/>
+      <check-property-value property="@{name}"/>
+      <fail message="Location specified for @{name} : @{location} does not exist.">
+        <condition>
+          <and>
+            <equals arg1="@{check-exists}" arg2="true"/>
+            <not><available file="@{location}"/></not>
+          </and>
+        </condition>
+      </fail>
+    </sequential>
+  </macrodef>
+
+  <!-- A safe setter for property values -->
+  <macrodef name="property-value">
+    <attribute name="name"/>
+    <attribute name="value"/>
+    <sequential>
+      <property name="@{name}" value="@{value}"/>
+      <check-property-value property="@{name}"/>
+    </sequential>
+  </macrodef>
+
+  <!-- Common environment properties. -->
+  <property-location name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
+  <property-value name="target" value="android-${ANDROID_SDK_VERSION}"/>
+  <property name="source.dir" location="src"/>
+  <property-location name="android.gdbserver" location="${ANDROID_GDBSERVER}"/>
+  <!--
+       Common directories used by SDK Build, when making changes here
+       make sure to update gyp files and test scripts constants in
+       build/android/pylib/constants.py
+  -->
+  <!-- Common directory for chromium_*.jars. -->
+  <property-location name="lib.java.dir" location="${PRODUCT_DIR}/lib.java"/>
+  <!-- Common directory for test jars. -->
+  <property-location name="test.lib.java.dir"
+    location="${PRODUCT_DIR}/test.lib.java"/>
+  <!-- Common directory for apks. -->
+  <property-location name="apks.dir" location="${PRODUCT_DIR}/apks"/>
+</project>
diff --git a/src/build/android/ant/create-test-jar.js b/src/build/android/ant/create-test-jar.js
new file mode 100644
index 0000000..d22c003
--- /dev/null
+++ b/src/build/android/ant/create-test-jar.js
@@ -0,0 +1,70 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/**
+ * Combines classes from javac.custom.classpath property and ${out.dir}/classes
+ * into a single jar file ${ant.project.name}.jar and places the file in
+ * ${lib.java.dir}.
+ */
+
+importClass(java.io.File);
+importClass(org.apache.tools.ant.types.Reference);
+importClass(org.apache.tools.ant.types.FileSet);
+importClass(org.apache.tools.ant.types.ZipFileSet);
+importClass(org.apache.tools.ant.taskdefs.Zip);
+
+var echo = project.createTask("echo");
+var jarTask = project.createTask("jar");
+
+// Do not allow duplicates in the jar, the default behavior of Jar task
+// is "add" which means duplicates are allowed.
+// This can cause a class file to be included multiple times, setting the
+// duplicate to "preserve" ensures that only the first definition is included.
+
+var duplicate = Zip.Duplicate();
+duplicate.setValue("preserve");
+jarTask.setDuplicate(duplicate);
+
+var destFile = project.getProperty("ant.project.name") + ".jar";
+var destPath = File(project.getProperty("test.lib.java.dir") + "/" + destFile);
+jarTask.setDestFile(destPath);
+
+// Include all the jars in the classpath.
+var javacCustomClasspath =
+    project.getReference("javac.custom.classpath").list();
+
+for (var i in javacCustomClasspath) {
+  var fileName = javacCustomClasspath[i]
+  var fileExtension = fileName.split("\\.").pop();
+  if(fileExtension == "jar")
+  {
+    var zipFileSet = ZipFileSet();
+    zipFileSet.setIncludes("**/*.class");
+    zipFileSet.setSrc(File(fileName));
+    jarTask.addFileset(zipFileSet);
+  }
+}
+
+// Add the compiled classes in ${out.dir}/classes.
+var projectClasses = FileSet();
+projectClasses.setIncludes("**/*.class");
+projectClasses.setDir(File(project.getProperty("out.dir") + "/classes"));
+jarTask.addFileset(projectClasses);
+
+// Exclude manifest and resource classes.
+var appPackagePath =
+    (project.getProperty("project.app.package")).replace('.','/');
+var excludedClasses = ["R.class", "R$*.class", "Manifest.class",
+    "Manifest$*.class", "BuildConfig.class"]
+
+var exclusionString = "";
+for (var i in excludedClasses) {
+  exclusionString += appPackagePath+ "/" + excludedClasses[i] + " ";
+}
+
+jarTask.setExcludes(exclusionString);
+echo.setMessage("Creating test jar: " +
+                jarTask.getDestFile().getAbsolutePath());
+echo.perform();
+jarTask.perform();
diff --git a/src/build/android/ant/empty/res/.keep b/src/build/android/ant/empty/res/.keep
new file mode 100644
index 0000000..1fd038b
--- /dev/null
+++ b/src/build/android/ant/empty/res/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/src/build/android/ant/sdk-targets.xml b/src/build/android/ant/sdk-targets.xml
new file mode 100644
index 0000000..91a0220
--- /dev/null
+++ b/src/build/android/ant/sdk-targets.xml
@@ -0,0 +1,289 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+<project name="chrome_sdk_overrides" >
+  <!--
+    Redefinition of targets used by SDK tools.
+    Supported version: SDK tools revision 20.
+
+    SDK tools do not allow easy way of extending classpaths
+    for aidl and javac. This file defines targets which can be used to
+    override targets used by tools.
+  -->
+  <target name="-pre-compile">
+    <!--
+      Remove all .class files from the output directory. This prevents inclusion of incorrect .class
+      files in the final apk. For example, if a .java file was deleted, the apk should not contain
+      the .class files for that .java from previous builds.
+    -->
+    <delete>
+      <fileset dir="${out.classes.absolute.dir}" includes="**/*.class"/>
+    </delete>
+  </target>
+
+  <!--
+    Override the -compile target.
+    This target requires 'javac.custom.classpath' to be set to reference
+    of classpath to be used for javac. Also accepts custom path for
+    sources: 'javac.custom.sourcepath'.
+  -->
+  <target
+      name="-compile"
+      depends="-build-setup, -pre-build, -code-gen, -pre-compile">
+    <do-only-if-manifest-hasCode elseText="hasCode = false. Skipping..." >
+      <!-- If javac.srcdirs.additional isn't set, set it to an empty path. -->
+      <if>
+        <condition>
+          <not>
+            <isreference refid="javac.srcdirs.additional"/>
+          </not>
+        </condition>
+        <then>
+          <path id="javac.srcdirs.additional"/>
+        </then>
+      </if>
+      <javac
+          bootclasspathref="project.target.class.path"
+          classpathref="javac.custom.classpath"
+          debug="true"
+          destdir="${out.classes.absolute.dir}"
+          encoding="${java.encoding}"
+          extdirs=""
+          fork="${need.javac.fork}"
+          includeantruntime="false"
+          source="${java.source}"
+          target="${java.target}"
+          verbose="${verbose}">
+        <src path="${source.absolute.dir}"/>
+        <src path="${gen.absolute.dir}"/>
+        <src>
+          <path refid="javac.srcdirs.additional"/>
+        </src>
+        <compilerarg value="-Xlint:unchecked"/>
+        <compilerarg value="-Xlint:deprecation"/>
+        <compilerarg line="${java.compilerargs}"/>
+      </javac>
+      <!--
+        If the project is instrumented, then instrument the classes
+        TODO(shashishekhar): Add option to override emma filter.
+      -->
+      <if condition="${build.is.instrumented}">
+        <then>
+          <echo level="info">
+            Instrumenting classes from ${out.absolute.dir}/classes...
+          </echo>
+          <!-- build the default filter to remove R, Manifest, BuildConfig -->
+          <getemmafilter
+              appPackage="${project.app.package}"
+              filterOut="emma.default.filter"
+              libraryPackagesRefId="project.library.packages"/>
+          <!--
+            Define where the .em file is output.
+            This may have been setup already if this is a library.
+          -->
+          <property name="emma.coverage.absolute.file"
+              location="${out.absolute.dir}/coverage.em"/>
+          <!-- It only instruments class files, not any external libs -->
+
+          <emma enabled="true">
+            <instr
+                instrpath="${out.absolute.dir}/classes"
+                metadatafile="${emma.coverage.absolute.file}"
+                mode="overwrite"
+                outdir="${out.absolute.dir}/classes"
+                verbosity="${verbosity}">
+              <filter excludes="${emma.default.filter}"/>
+              <filter value="${emma.filter}"/>
+            </instr>
+          </emma>
+        </then>
+      </if>
+      <!--
+        If the project needs a test jar then generate a jar containing
+        all compiled classes and referenced jars.
+        project.is.testapp is set by Android's ant build system based on the
+        target's manifest. It is true only for instrumentation apks.
+      -->
+      <if condition="${project.is.testapp}">
+        <then>
+          <echo level="info">Creating test jar file:
+              ${ant.project.name}.jar</echo>
+          <property-location name="create.test.jar.file"
+              location="${CHROMIUM_SRC}/build/android/ant/create-test-jar.js"/>
+          <script language="javascript" src="${create.test.jar.file}"/>
+        </then>
+      </if>
+
+    </do-only-if-manifest-hasCode>
+  </target>
+
+  <!--
+    For debug builds, the Android SDK tools create a key in ~/.android and sign the build with it.
+    This has caused all kinds of issues. Instead, the debug build should be signed with a key in
+    build/android/ant. The SDK tools do not provide any support for overriding that behavior and so
+    instead one must use the hack below.
+  -->
+
+  <!-- Disables automatic signing. -->
+  <property name="build.is.signing.debug" value="false"/>
+
+  <!-- TODO(cjhopman): Remove this property when all gyp files define the CHROMIUM_SRC property. -->
+  <!-- __LB_SHELL__
+    The path needs to be adjusted since LbShell is using a different directory
+    configuration.
+  -->
+  <property name="CHROMIUM_SRC" value="${PRODUCT_DIR}/../../../external/chromium" />
+  <property name="LBSHELL_SRC" value="${PRODUCT_DIR}/../../../lbshell" />
+  <!-- __LB_SHELL__
+      Use our custom key for signing debug packages.
+  -->
+  <property name="key.store" value="${LBSHELL_SRC}/build/lbshell-debug.keystore"/>
+  <property name="key.store.password" value="lbshell"/>
+  <property name="key.alias" value="lbshelldebugkey"/>
+  <property name="key.alias.password" value="lbshell"/>
+
+  <!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
+  <property name="out.packaged.file"
+    value="${apks.dir}/${ant.project.name}-unsigned.apk" />
+  <property name="out.unaligned.file"
+    value="${apks.dir}/${ant.project.name}-unaligned.apk" />
+
+  <!-- By default, the SDK tools build only aligns the APK in the -do-debug target. -->
+  <target name="-do-debug"
+      depends="-set-debug-mode, -debug-obfuscation-check, -package, -post-package">
+    <!-- only create apk if *not* a library project -->
+    <do-only-if-not-library elseText="Library project: do not create apk..." >
+      <sequential>
+        <!-- Signs the APK -->
+        <echo level="info">Signing final apk...</echo>
+        <signapk
+          input="${out.packaged.file}"
+          output="${out.unaligned.file}"
+          keystore="${key.store}"
+          storepass="${key.store.password}"
+          alias="${key.alias}"
+          keypass="${key.alias.password}"/>
+
+        <!-- Zip aligns the APK -->
+        <zipalign-helper
+          in.package="${out.unaligned.file}"
+          out.package="${out.final.file}" />
+        <echo level="info">Release Package: ${out.final.file}</echo>
+      </sequential>
+    </do-only-if-not-library>
+    <record-build-info />
+  </target>
+
+  <path id="native.libs.gdbserver">
+    <fileset file="${android.gdbserver}"/>
+  </path>
+
+  <target name="-post-compile">
+    <!--
+      Copy gdbserver to main libs directory if building a non-instrumentation debug apk.
+    -->
+    <if>
+      <condition>
+        <and>
+          <equals arg1="${build.target}" arg2="debug"/>
+          <isfalse value="${project.is.testapp}"/>
+        </and>
+      </condition>
+      <then>
+        <echo message="Copying gdbserver to the apk to enable native debugging"/>
+        <copy todir="${out.dir}/libs/${target.abi}">
+          <path refid="native.libs.gdbserver"/>
+        </copy>
+      </then>
+    </if>
+
+    <!-- Package all the compiled .class files into a .jar. -->
+    <jar
+      jarfile="${lib.java.dir}/${JAR_NAME}"
+      basedir="${out.classes.absolute.dir}"
+    />
+  </target>
+
+  <!--
+    Override obfuscate target to pass javac.custom.classpath to Proguard. SDK tools do not provide
+    any way to pass custom class paths to Proguard.
+   -->
+  <target name="-obfuscate">
+    <if condition="${proguard.enabled}">
+      <then>
+        <property name="obfuscate.absolute.dir" location="${out.absolute.dir}/proguard"/>
+        <property name="preobfuscate.jar.file" value="${obfuscate.absolute.dir}/original.jar"/>
+        <property name="obfuscated.jar.file" value="${obfuscate.absolute.dir}/obfuscated.jar"/>
+        <!-- input for dex will be proguard's output -->
+        <property name="out.dex.input.absolute.dir" value="${obfuscated.jar.file}"/>
+
+        <!-- Add Proguard Tasks -->
+        <property name="proguard.jar" location="${android.tools.dir}/proguard/lib/proguard.jar"/>
+        <taskdef name="proguard" classname="proguard.ant.ProGuardTask" classpath="${proguard.jar}"/>
+
+        <!-- Set the android classpath Path object into a single property. It'll be
+                 all the jar files separated by a platform path-separator.
+                 Each path must be quoted if it contains spaces.
+        -->
+        <pathconvert property="project.target.classpath.value" refid="project.target.class.path">
+          <firstmatchmapper>
+            <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+            <identitymapper/>
+          </firstmatchmapper>
+        </pathconvert>
+
+        <!-- Build a path object with all the jar files that must be obfuscated.
+             This include the project compiled source code and any 3rd party jar
+             files. -->
+        <path id="project.all.classes.path">
+          <pathelement location="${preobfuscate.jar.file}"/>
+          <path refid="project.all.jars.path"/>
+          <!-- Pass javac.custom.classpath for apks. -->
+          <path refid="javac.custom.classpath"/>
+        </path>
+        <!-- Set the project jar files Path object into a single property. It'll be
+             all the jar files separated by a platform path-separator.
+             Each path must be quoted if it contains spaces.
+        -->
+        <pathconvert property="project.all.classes.value" refid="project.all.classes.path">
+          <firstmatchmapper>
+            <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+            <identitymapper/>
+          </firstmatchmapper>
+        </pathconvert>
+
+        <!-- Turn the path property ${proguard.config} from an A:B:C property
+             into a series of includes: -include A -include B -include C
+             suitable for processing by the ProGuard task. Note - this does
+             not include the leading '-include "' or the closing '"'; those
+             are added under the <proguard> call below.
+        -->
+        <path id="proguard.configpath">
+          <pathelement path="${proguard.config}"/>
+        </path>
+        <pathconvert pathsep='" -include "' property="proguard.configcmd"
+          refid="proguard.configpath"/>
+
+        <mkdir   dir="${obfuscate.absolute.dir}"/>
+        <delete file="${preobfuscate.jar.file}"/>
+        <delete file="${obfuscated.jar.file}"/>
+        <jar basedir="${out.classes.absolute.dir}"
+          destfile="${preobfuscate.jar.file}"/>
+        <proguard>
+          -include      "${proguard.configcmd}"
+          -include      "${out.absolute.dir}/proguard.txt"
+          -injars       ${project.all.classes.value}
+          -outjars      "${obfuscated.jar.file}"
+          -libraryjars  ${project.target.classpath.value}
+          -dump         "${obfuscate.absolute.dir}/dump.txt"
+          -printseeds   "${obfuscate.absolute.dir}/seeds.txt"
+          -printusage   "${obfuscate.absolute.dir}/usage.txt"
+          -printmapping "${obfuscate.absolute.dir}/mapping.txt"
+        </proguard>
+      </then>
+    </if>
+  </target>
+</project>
diff --git a/src/build/android/arm-linux-androideabi-gold/arm-linux-androideabi-ld b/src/build/android/arm-linux-androideabi-gold/arm-linux-androideabi-ld
new file mode 120000
index 0000000..5b178e9
--- /dev/null
+++ b/src/build/android/arm-linux-androideabi-gold/arm-linux-androideabi-ld
@@ -0,0 +1 @@
+../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-ld.gold
\ No newline at end of file
diff --git a/src/build/android/arm-linux-androideabi-gold/ld b/src/build/android/arm-linux-androideabi-gold/ld
new file mode 120000
index 0000000..2366dda
--- /dev/null
+++ b/src/build/android/arm-linux-androideabi-gold/ld
@@ -0,0 +1 @@
+../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/arm-linux-androideabi/bin/ld.gold
\ No newline at end of file
diff --git a/src/build/android/bb_run_sharded_steps.py b/src/build/android/bb_run_sharded_steps.py
new file mode 100755
index 0000000..9010d77
--- /dev/null
+++ b/src/build/android/bb_run_sharded_steps.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script to shard build bot steps and save results to disk.
+
+Our buildbot infrastructure requires each slave to run steps serially.
+This is sub-optimal for android, where these steps can run independently on
+multiple connected devices.
+
+The buildbots will run this script multiple times per cycle:
+- First, without params: all steps will be executed in parallel using all
+connected devices. Step results will be pickled to disk (each step has a unique
+name).
+The buildbot will treat this step as a regular step, and will not process any
+graph data.
+
+- Then, with -p STEP_NAME: at this stage, we'll simply print the file with the
+step results previously saved. The buildbot will then process the graph data
+accordingly.
+
+The JSON config contains is a file containing a dictionary in the format:
+{
+  'step_name_foo': 'script_to_execute foo',
+  'step_name_bar': 'script_to_execute bar'
+}
+
+Note that script_to_execute necessarily have to take at least the following
+options:
+  --device: the serial number to be passed to all adb commands.
+  --keep_test_server_ports: indicates it's being run as a shard, and shouldn't
+  reset test server port allocation.
+"""
+
+
+import datetime
+import json
+import logging
+import multiprocessing
+import optparse
+import pexpect
+import pickle
+import os
+import signal
+import shutil
+import sys
+
+from pylib import android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import ports
+
+
+_OUTPUT_DIR = os.path.join(constants.CHROME_DIR, 'out', 'step_results')
+
+
+def _SaveResult(result):
+  with file(os.path.join(_OUTPUT_DIR, result['name']), 'w') as f:
+    f.write(pickle.dumps(result))
+
+
+def _RunStepsPerDevice(steps):
+  results = []
+  for step in steps:
+    start_time = datetime.datetime.now()
+    print 'Starting %s: %s %s at %s' % (step['name'], step['cmd'],
+                                        start_time, step['device'])
+    output, exit_code  = pexpect.run(
+        step['cmd'], cwd=os.path.abspath(constants.CHROME_DIR),
+        withexitstatus=True, logfile=sys.stdout, timeout=1800,
+        env=os.environ)
+    end_time = datetime.datetime.now()
+    print 'Finished %s: %s %s at %s' % (step['name'], step['cmd'],
+                                        end_time, step['device'])
+    result = {'name': step['name'],
+              'output': output,
+              'exit_code': exit_code or 0,
+              'total_time': (end_time - start_time).seconds,
+              'device': step['device']}
+    _SaveResult(result)
+    results += [result]
+  return results
+
+
+def _RunShardedSteps(steps, devices):
+  assert steps
+  assert devices, 'No devices connected?'
+  if os.path.exists(_OUTPUT_DIR):
+    assert '/step_results' in _OUTPUT_DIR
+    shutil.rmtree(_OUTPUT_DIR)
+  if not os.path.exists(_OUTPUT_DIR):
+    os.makedirs(_OUTPUT_DIR)
+  step_names = sorted(steps.keys())
+  all_params = []
+  num_devices = len(devices)
+  shard_size = (len(steps) + num_devices - 1) / num_devices
+  for i, device in enumerate(devices):
+    steps_per_device = []
+    for s in steps.keys()[i * shard_size:(i + 1) * shard_size]:
+      steps_per_device += [{'name': s,
+                            'device': device,
+                            'cmd': steps[s] + ' --device ' + device +
+                            ' --keep_test_server_ports'}]
+    all_params += [steps_per_device]
+  print 'Start sharding (note: output is not synchronized...)'
+  print '*' * 80
+  start_time = datetime.datetime.now()
+  pool = multiprocessing.Pool(processes=num_devices)
+  async_results = pool.map_async(_RunStepsPerDevice, all_params)
+  results_per_device = async_results.get(999999)
+  end_time = datetime.datetime.now()
+  print '*' * 80
+  print 'Finished sharding.'
+  print 'Summary'
+  total_time = 0
+  for results in results_per_device:
+    for result in results:
+      print('%s : exit_code=%d in %d secs at %s' %
+            (result['name'], result['exit_code'], result['total_time'],
+             result['device']))
+      total_time += result['total_time']
+  print 'Step time: %d secs' % ((end_time - start_time).seconds)
+  print 'Bots time: %d secs' % total_time
+  # No exit_code for the sharding step: the individual _PrintResults step
+  # will return the corresponding exit_code.
+  return 0
+
+
+def _PrintStepOutput(step_name):
+  file_name = os.path.join(_OUTPUT_DIR, step_name)
+  if not os.path.exists(file_name):
+    print 'File not found ', file_name
+    return 1
+  with file(file_name, 'r') as f:
+    result = pickle.loads(f.read())
+  print result['output']
+  return result['exit_code']
+
+
+def _KillPendingServers():
+  for retry in range(5):
+    for server in ['lighttpd', 'web-page-replay']:
+      pids = cmd_helper.GetCmdOutput(['pgrep', '-f', server])
+      pids = [pid.strip() for pid in pids.split('\n') if pid.strip()]
+      for pid in pids:
+        try:
+          logging.warning('Killing %s %s', server, pid)
+          os.kill(int(pid), signal.SIGQUIT)
+        except Exception as e:
+          logging.warning('Failed killing %s %s %s', server, pid, e)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--steps',
+                    help='A JSON file containing all the steps to be '
+                         'sharded.')
+  parser.add_option('-p', '--print_results',
+                    help='Only prints the results for the previously '
+                         'executed step, do not run it again.')
+  options, urls = parser.parse_args(argv)
+  if options.print_results:
+    return _PrintStepOutput(options.print_results)
+
+  # At this point, we should kill everything that may have been left over from
+  # previous runs.
+  _KillPendingServers()
+
+  # Reset the test port allocation. It's important to do it before starting
+  # to dispatch any step.
+  if not ports.ResetTestServerPortAllocation():
+    raise Exception('Failed to reset test server port.')
+
+  # Sort the devices so that we'll try to always run a step in the same device.
+  devices = sorted(android_commands.GetAttachedDevices())
+  if not devices:
+    print 'You must attach a device'
+    return 1
+
+  with file(options.steps, 'r') as f:
+    steps = json.load(f)
+  return _RunShardedSteps(steps, devices)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/buildbot/bb_asan_builder.sh b/src/build/android/buildbot/bb_asan_builder.sh
new file mode 100755
index 0000000..57679e0
--- /dev/null
+++ b/src/build/android/buildbot/bb_asan_builder.sh
@@ -0,0 +1,15 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the fyi waterfall and fyi trybots.
+# Compile and zip the build.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_asan_tests.sh b/src/build/android/buildbot/bb_asan_tests.sh
new file mode 100755
index 0000000..122fe40
--- /dev/null
+++ b/src/build/android/buildbot/bb_asan_tests.sh
@@ -0,0 +1,19 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the fyi waterfall and fyi trybots.
+# Downloads and extracts a build from the builder and runs tests.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_unit_tests
+bb_run_instrumentation_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_clang_builder.sh b/src/build/android/buildbot/bb_clang_builder.sh
new file mode 100755
index 0000000..3bf88ae
--- /dev/null
+++ b/src/build/android/buildbot/bb_clang_builder.sh
@@ -0,0 +1,13 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for clang.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
diff --git a/src/build/android/buildbot/bb_fyi_builder.sh b/src/build/android/buildbot/bb_fyi_builder.sh
new file mode 100755
index 0000000..d76598b
--- /dev/null
+++ b/src/build/android/buildbot/bb_fyi_builder.sh
@@ -0,0 +1,18 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the fyi waterfall and fyi trybots.
+# Compile and zip the build.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_check_webview_licenses
+bb_compile
+bb_compile_experimental
+bb_run_findbugs
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_fyi_tester.sh b/src/build/android/buildbot/bb_fyi_tester.sh
new file mode 100755
index 0000000..e50a32b
--- /dev/null
+++ b/src/build/android/buildbot/bb_fyi_tester.sh
@@ -0,0 +1,21 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the fyi waterfall and fyi trybots.
+# Downloads and extracts a build from the builder and runs tests.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_unit_tests
+bb_run_instrumentation_tests
+bb_run_experimental_unit_tests
+bb_run_experimental_instrumentation_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_main_builder.sh b/src/build/android/buildbot/bb_main_builder.sh
new file mode 100755
index 0000000..13df0c4
--- /dev/null
+++ b/src/build/android/buildbot/bb_main_builder.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the main waterfall.  Compile only.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn the main waterfall android bots
+# green (preventing tree closures), uncomment the next line (and send
+# appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+bb_run_findbugs
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_main_clobber.sh b/src/build/android/buildbot/bb_main_clobber.sh
new file mode 100755
index 0000000..8c3df90
--- /dev/null
+++ b/src/build/android/buildbot/bb_main_clobber.sh
@@ -0,0 +1,19 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the main waterfall.  Compile only.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn the main waterfall android bots
+# green (preventing tree closures), uncomment the next line (and send
+# appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+
diff --git a/src/build/android/buildbot/bb_main_tester.sh b/src/build/android/buildbot/bb_main_tester.sh
new file mode 100755
index 0000000..287d281
--- /dev/null
+++ b/src/build/android/buildbot/bb_main_tester.sh
@@ -0,0 +1,23 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the main waterfall.  Tester only.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn the main waterfall android bots
+# green (preventing tree closures), uncomment the next line (and send
+# appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_unit_tests
+bb_run_instrumentation_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_perf_builder.sh b/src/build/android/buildbot/bb_perf_builder.sh
new file mode 100755
index 0000000..0907362
--- /dev/null
+++ b/src/build/android/buildbot/bb_perf_builder.sh
@@ -0,0 +1,14 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for chromium.perf.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_perf_gn_tests.sh b/src/build/android/buildbot/bb_perf_gn_tests.sh
new file mode 100755
index 0000000..ba36b29
--- /dev/null
+++ b/src/build/android/buildbot/bb_perf_gn_tests.sh
@@ -0,0 +1,17 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for chromium.perf.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_install_apk "ContentShell.apk" "org.chromium.content_shell"
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_try_builder.sh b/src/build/android/buildbot/bb_try_builder.sh
new file mode 100755
index 0000000..c26b546
--- /dev/null
+++ b/src/build/android/buildbot/bb_try_builder.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for trybots.  Compile only.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn "android_dbg" trybots green,
+# uncomment the next line (and send appropriate email out):
+## bb_force_bot_green_and_exit
+# You will also need to change buildbot_try_tester.sh
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+bb_run_findbugs
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_try_clang_builder.sh b/src/build/android/buildbot/bb_try_clang_builder.sh
new file mode 100755
index 0000000..75ff9f4
--- /dev/null
+++ b/src/build/android/buildbot/bb_try_clang_builder.sh
@@ -0,0 +1,13 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for trybots.  Compile only.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
diff --git a/src/build/android/buildbot/bb_try_fyi_builder.sh b/src/build/android/buildbot/bb_try_fyi_builder.sh
new file mode 100755
index 0000000..3041ccc
--- /dev/null
+++ b/src/build/android/buildbot/bb_try_fyi_builder.sh
@@ -0,0 +1,7 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Buildbot annotator entry for trybot mirroring fyi builder
+exec "$(dirname $0)/bb_fyi_builder.sh" "$@"
diff --git a/src/build/android/buildbot/bb_try_fyi_tester.sh b/src/build/android/buildbot/bb_try_fyi_tester.sh
new file mode 100755
index 0000000..f3ea8e0
--- /dev/null
+++ b/src/build/android/buildbot/bb_try_fyi_tester.sh
@@ -0,0 +1,7 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Buildbot annotator entry for trybot mirroring fyi tester
+exec "$(dirname $0)/bb_fyi_tester.sh" "$@"
diff --git a/src/build/android/buildbot/bb_try_tester.sh b/src/build/android/buildbot/bb_try_tester.sh
new file mode 100755
index 0000000..bfbff27
--- /dev/null
+++ b/src/build/android/buildbot/bb_try_tester.sh
@@ -0,0 +1,22 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for tester half of android trybots
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn "android" trybots green,
+# uncomment the next line (and send appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_unit_tests
+bb_run_instrumentation_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_webkit_latest_builder.sh b/src/build/android/buildbot/bb_webkit_latest_builder.sh
new file mode 100755
index 0000000..982f857
--- /dev/null
+++ b/src/build/android/buildbot/bb_webkit_latest_builder.sh
@@ -0,0 +1,19 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the WebKit builder on the Canary waterfall.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn the main waterfall android bots
+# green (preventing tree closures), uncomment the next line (and send
+# appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_compile
+bb_zip_build
diff --git a/src/build/android/buildbot/bb_webkit_latest_tester.sh b/src/build/android/buildbot/bb_webkit_latest_tester.sh
new file mode 100755
index 0000000..581fbea
--- /dev/null
+++ b/src/build/android/buildbot/bb_webkit_latest_tester.sh
@@ -0,0 +1,22 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the WebKit tester on the Canary waterfall.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+# SHERIFF: if you need to quickly turn the main waterfall android bots
+# green (preventing tree closures), uncomment the next line (and send
+# appropriate email out):
+## bb_force_bot_green_and_exit
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_unit_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/bb_webkit_latest_webkit_tester.sh b/src/build/android/buildbot/bb_webkit_latest_webkit_tester.sh
new file mode 100755
index 0000000..a49509f
--- /dev/null
+++ b/src/build/android/buildbot/bb_webkit_latest_webkit_tester.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -ex
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Buildbot annotator script for the WebKit latest WebKit tester on the
+# WebKit canary waterfall.
+
+BB_DIR="$(dirname $0)"
+BB_SRC_ROOT="$(cd  "$BB_DIR/../../.."; pwd)"
+. "$BB_DIR/buildbot_functions.sh"
+
+bb_baseline_setup "$BB_SRC_ROOT" "$@"
+bb_spawn_logcat_monitor_and_status
+bb_extract_build
+bb_reboot_phones
+bb_run_webkit_unit_tests
+bb_lint_webkit_expectation_files
+bb_run_webkit_layout_tests
+bb_print_logcat
diff --git a/src/build/android/buildbot/buildbot_functions.sh b/src/build/android/buildbot/buildbot_functions.sh
new file mode 100755
index 0000000..8efeb15
--- /dev/null
+++ b/src/build/android/buildbot/buildbot_functions.sh
@@ -0,0 +1,427 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Bash functions used by buildbot annotator scripts for the android
+# build of chromium.  Executing this script should not perform actions
+# other than setting variables and defining of functions.
+
+# Number of jobs on the compile line; e.g.  make -j"${JOBS}"
+JOBS="${JOBS:-4}"
+
+# Parse named arguments passed into the annotator script
+# and assign them global variable names.
+function bb_parse_args {
+  while [[ $1 ]]; do
+    case "$1" in
+      --factory-properties=*)
+        FACTORY_PROPERTIES="$(echo "$1" | sed 's/^[^=]*=//')"
+        BUILDTYPE=$(bb_get_json_prop "$FACTORY_PROPERTIES" target)
+        ;;
+      --build-properties=*)
+        BUILD_PROPERTIES="$(echo "$1" | sed 's/^[^=]*=//')"
+        ;;
+      *)
+        echo "@@@STEP_WARNINGS@@@"
+        echo "Warning, unparsed input argument: '$1'"
+        ;;
+    esac
+    shift
+  done
+}
+
+# Function to force-green a bot.
+function bb_force_bot_green_and_exit {
+  echo "@@@BUILD_STEP Bot forced green.@@@"
+  exit 0
+}
+
+# Basic setup for all bots to run after a source tree checkout.
+# Args:
+#   $1: source root.
+#   $2 and beyond: key value pairs which are parsed by bb_parse_args.
+function bb_baseline_setup {
+  SRC_ROOT="$1"
+  # Remove SRC_ROOT param
+  shift
+  cd $SRC_ROOT
+
+  echo "@@@BUILD_STEP Environment setup@@@"
+  bb_parse_args "$@"
+
+  local BUILDTOOL=$(bb_get_json_prop "$FACTORY_PROPERTIES" buildtool)
+  if [[ $BUILDTOOL = ninja ]]; then
+    export GYP_GENERATORS=ninja
+  fi
+  export GOMA_DIR=/b/build/goma
+  . build/android/envsetup.sh
+
+  local extra_gyp_defines="$(bb_get_json_prop "$FACTORY_PROPERTIES" \
+     extra_gyp_defines)"
+  export GYP_DEFINES+=" fastbuild=1 $extra_gyp_defines"
+  if echo $extra_gyp_defines | grep -qE 'clang|asan'; then
+    unset CXX_target
+  fi
+
+  adb kill-server
+  adb start-server
+
+  local build_path="${SRC_ROOT}/out/${BUILDTYPE}"
+  local landmines_triggered_path="$build_path/.landmines_triggered"
+  python "$SRC_ROOT/build/landmines.py"
+
+  if [[ $BUILDBOT_CLOBBER || -f "$landmines_triggered_path" ]]; then
+    echo "@@@BUILD_STEP Clobber@@@"
+
+    if [[ -z $BUILDBOT_CLOBBER ]]; then
+      echo "Clobbering due to triggered landmines: "
+      cat "$landmines_triggered_path"
+    else
+      # Also remove all the files under out/ on an explicit clobber
+      find "${SRC_ROOT}/out" -maxdepth 1 -type f -exec rm -f {} +
+    fi
+
+    # Sdk key expires, delete android folder.
+    # crbug.com/145860
+    rm -rf ~/.android
+    rm -rf "$build_path"
+    if [[ -e $build_path ]] ; then
+      echo "Clobber appeared to fail?  $build_path still exists."
+      echo "@@@STEP_WARNINGS@@@"
+    fi
+  fi
+}
+
+function bb_compile_setup {
+  bb_setup_goma_internal
+  # Should be called only after envsetup is done.
+  gclient runhooks
+}
+
+# Setup goma.  Used internally to buildbot_functions.sh.
+function bb_setup_goma_internal {
+  export GOMA_API_KEY_FILE=${GOMA_DIR}/goma.key
+  export GOMA_COMPILER_PROXY_DAEMON_MODE=true
+  export GOMA_COMPILER_PROXY_RPC_TIMEOUT_SECS=300
+
+  echo "Killing old goma processes"
+  ${GOMA_DIR}/goma_ctl.sh stop || true
+  killall -9 compiler_proxy || true
+
+  echo "Starting goma"
+  ${GOMA_DIR}/goma_ctl.sh start
+  trap bb_stop_goma_internal SIGHUP SIGINT SIGTERM
+}
+
+# Stop goma.
+function bb_stop_goma_internal {
+  echo "Stopping goma"
+  ${GOMA_DIR}/goma_ctl.sh stop
+}
+
+# $@: make args.
+# Use goma if possible; degrades to non-Goma if needed.
+function bb_goma_make {
+  if [ "${GOMA_DIR}" = "" ]; then
+    make -j${JOBS} "$@"
+    return
+  fi
+
+  HOST_CC=$GOMA_DIR/gcc
+  HOST_CXX=$GOMA_DIR/g++
+  TARGET_CC=$(/bin/ls $ANDROID_TOOLCHAIN/*-gcc | head -n1)
+  TARGET_CXX=$(/bin/ls $ANDROID_TOOLCHAIN/*-g++ | head -n1)
+  TARGET_CC="$GOMA_DIR/gomacc $TARGET_CC"
+  TARGET_CXX="$GOMA_DIR/gomacc $TARGET_CXX"
+  COMMON_JAVAC="$GOMA_DIR/gomacc /usr/bin/javac -J-Xmx512M \
+    -target 1.5 -Xmaxerrs 9999999"
+
+  command make \
+    -j100 \
+    -l20 \
+    HOST_CC="$HOST_CC" \
+    HOST_CXX="$HOST_CXX" \
+    TARGET_CC="$TARGET_CC" \
+    TARGET_CXX="$TARGET_CXX" \
+    CC.host="$HOST_CC" \
+    CXX.host="$HOST_CXX" \
+    CC.target="$TARGET_CC" \
+    CXX.target="$TARGET_CXX" \
+    LINK.target="$TARGET_CXX" \
+    COMMON_JAVAC="$COMMON_JAVAC" \
+    BUILDTYPE="$BUILDTYPE" \
+    "$@"
+
+  local make_exit_code=$?
+  return $make_exit_code
+}
+
+# Build using ninja.
+function bb_goma_ninja {
+  echo "Using ninja to build."
+  local TARGET=$1
+  ninja -C out/$BUILDTYPE -j120 -l20 $TARGET
+}
+
+# Compile step
+function bb_compile {
+  # This must be named 'compile', not 'Compile', for CQ interaction.
+  # Talk to maruel for details.
+  echo "@@@BUILD_STEP compile@@@"
+  bb_compile_setup
+
+  BUILDTOOL=$(bb_get_json_prop "$FACTORY_PROPERTIES" buildtool)
+  if [[ $BUILDTOOL = ninja ]]; then
+    bb_goma_ninja All
+  else
+    bb_goma_make
+  fi
+
+  bb_stop_goma_internal
+}
+
+# Experimental compile step; does not turn the tree red if it fails.
+function bb_compile_experimental {
+  # Linking DumpRenderTree appears to hang forever?
+  EXPERIMENTAL_TARGETS="android_experimental"
+  for target in ${EXPERIMENTAL_TARGETS} ; do
+    echo "@@@BUILD_STEP Experimental Compile $target @@@"
+    set +e
+    if [[ $BUILDTOOL = ninja ]]; then
+      bb_goma_ninja "${target}"
+    else
+      bb_goma_make -k "${target}"
+    fi
+    if [ $? -ne 0 ] ; then
+      echo "@@@STEP_WARNINGS@@@"
+    fi
+    set -e
+  done
+}
+
+# Run tests on an emulator.
+function bb_run_tests_emulator {
+  echo "@@@BUILD_STEP Run Tests on an Emulator@@@"
+  build/android/run_tests.py -e --xvfb --verbose
+}
+
+function bb_spawn_logcat_monitor_and_status {
+  python build/android/device_status_check.py
+  LOGCAT_DUMP_DIR="$CHROME_SRC/out/logcat"
+  rm -rf "$LOGCAT_DUMP_DIR"
+  python build/android/adb_logcat_monitor.py "$LOGCAT_DUMP_DIR" &
+}
+
+function bb_print_logcat {
+  echo "@@@BUILD_STEP Logcat dump@@@"
+  python build/android/adb_logcat_printer.py "$LOGCAT_DUMP_DIR"
+}
+
+# Run tests on an actual device.  (Better have one plugged in!)
+function bb_run_unit_tests {
+  build/android/run_tests.py --xvfb --verbose
+}
+
+# Run WebKit's test suites: webkit_unit_tests and TestWebKitAPI
+function bb_run_webkit_unit_tests {
+  if [[ $BUILDTYPE = Release ]]; then
+    local BUILDFLAG="--release"
+  fi
+  bb_run_step build/android/run_tests.py --xvfb --verbose $BUILDFLAG \
+      -s webkit_unit_tests
+  bb_run_step build/android/run_tests.py --xvfb --verbose $BUILDFLAG \
+      -s TestWebKitAPI
+}
+
+# Lint WebKit's TestExpectation files.
+function bb_lint_webkit_expectation_files {
+  echo "@@@BUILD_STEP webkit_lint@@@"
+  bb_run_step python webkit/tools/layout_tests/run_webkit_tests.py \
+    --lint-test-files \
+    --chromium
+}
+
+# Run layout tests on an actual device.
+function bb_run_webkit_layout_tests {
+  echo "@@@BUILD_STEP webkit_tests@@@"
+  local BUILDERNAME="$(bb_get_json_prop "$BUILD_PROPERTIES" buildername)"
+  local BUILDNUMBER="$(bb_get_json_prop "$BUILD_PROPERTIES" buildnumber)"
+  local MASTERNAME="$(bb_get_json_prop "$BUILD_PROPERTIES" mastername)"
+  local RESULTSERVER=\
+"$(bb_get_json_prop "$FACTORY_PROPERTIES" test_results_server)"
+
+  bb_run_step python webkit/tools/layout_tests/run_webkit_tests.py \
+      --no-show-results \
+      --no-new-test-results \
+      --full-results-html \
+      --clobber-old-results \
+      --exit-after-n-failures 5000 \
+      --exit-after-n-crashes-or-timeouts 100 \
+      --debug-rwt-logging \
+      --results-directory "../layout-test-results" \
+      --target "$BUILDTYPE" \
+      --builder-name "$BUILDERNAME" \
+      --build-number "$BUILDNUMBER" \
+      --master-name "$MASTERNAME" \
+      --build-name "$BUILDERNAME" \
+      --platform=chromium-android \
+      --test-results-server "$RESULTSERVER"
+}
+
+# Run experimental unittest bundles.
+function bb_run_experimental_unit_tests {
+  build/android/run_tests.py --xvfb --verbose -s android_webview_unittests
+}
+
+# Run findbugs.
+function bb_run_findbugs {
+  echo "@@@BUILD_STEP findbugs@@@"
+  if [[ $BUILDTYPE = Release ]]; then
+    local BUILDFLAG="--release-build"
+  fi
+  bb_run_step build/android/findbugs_diff.py $BUILDFLAG
+  bb_run_step tools/android/findbugs_plugin/test/run_findbugs_plugin_tests.py \
+    $BUILDFLAG
+}
+
+# Run a buildbot step and handle failure (failure will not halt build).
+function bb_run_step {
+  (
+  set +e
+  "$@"
+  if [[ $? != 0 ]]; then
+    echo "@@@STEP_FAILURE@@@"
+  fi
+  )
+}
+
+# Install a specific APK.
+# Args:
+#   $1: APK to be installed.
+#   $2: APK_PACKAGE for the APK to be installed.
+function bb_install_apk {
+  local APK=${1}
+  local APK_PACKAGE=${2}
+  if [[ $BUILDTYPE = Release ]]; then
+    local BUILDFLAG="--release"
+  fi
+
+  echo "@@@BUILD_STEP Install ${APK}@@@"
+  python build/android/adb_install_apk.py --apk ${APK} \
+      --apk_package ${APK_PACKAGE} ${BUILDFLAG}
+}
+
+# Run instrumentation tests for a specific APK.
+# Args:
+#   $1: APK to be installed.
+#   $2: APK_PACKAGE for the APK to be installed.
+#   $3: TEST_APK to run the tests against.
+#   $4: TEST_DATA in format destination:source
+function bb_run_all_instrumentation_tests_for_apk {
+  local APK=${1}
+  local APK_PACKAGE=${2}
+  local TEST_APK=${3}
+  local TEST_DATA=${4}
+
+  # Install application APK.
+  bb_install_apk ${APK} ${APK_PACKAGE}
+
+  # Run instrumentation tests. Using -I to install the test apk.
+  echo "@@@BUILD_STEP Run instrumentation tests ${TEST_APK}@@@"
+  bb_run_step python build/android/run_instrumentation_tests.py \
+      -vvv --test-apk ${TEST_APK} -I --test_data ${TEST_DATA}
+}
+
+# Run instrumentation tests for all relevant APKs on device.
+function bb_run_instrumentation_tests {
+  bb_run_all_instrumentation_tests_for_apk "ContentShell.apk" \
+      "org.chromium.content_shell" "ContentShellTest" \
+      "content:content/test/data/android/device_files"
+  bb_run_all_instrumentation_tests_for_apk "ChromiumTestShell.apk" \
+      "org.chromium.chrome.testshell" "ChromiumTestShellTest" \
+      "chrome:chrome/test/data/android/device_files"
+  bb_run_all_instrumentation_tests_for_apk "AndroidWebView.apk" \
+      "org.chromium.android_webview" "AndroidWebViewTest" \
+      "webview:android_webview/test/data/device_files"
+}
+
+# Run instrumentation tests for experimental APKs on device.
+function bb_run_experimental_instrumentation_tests {
+  echo "" # Can't have empty functions in bash.
+}
+
+# Zip and archive a build.
+function bb_zip_build {
+  echo "@@@BUILD_STEP Zip build@@@"
+  python ../../../../scripts/slave/zip_build.py \
+    --src-dir "$SRC_ROOT" \
+    --exclude-files "lib.target,gen,android_webview,jingle_unittests" \
+    --factory-properties "$FACTORY_PROPERTIES" \
+    --build-properties "$BUILD_PROPERTIES"
+}
+
+# Download and extract a build.
+function bb_extract_build {
+  echo "@@@BUILD_STEP Download and extract build@@@"
+  if [[ -z $FACTORY_PROPERTIES || -z $BUILD_PROPERTIES ]]; then
+    return 1
+  fi
+
+  # When extract_build.py downloads an unversioned build it
+  # issues a warning by exiting with large numbered return code
+  # When it fails to download it build, it exits with return
+  # code 1.  We disable halt on error mode and return normally
+  # unless the python tool returns 1.
+  (
+  set +e
+  python ../../../../scripts/slave/extract_build.py \
+    --build-dir "$SRC_ROOT/build" \
+    --build-output-dir "../out" \
+    --factory-properties "$FACTORY_PROPERTIES" \
+    --build-properties "$BUILD_PROPERTIES"
+  local extract_exit_code=$?
+  if (( $extract_exit_code > 1 )); then
+    echo "@@@STEP_WARNINGS@@@"
+    return
+  fi
+  return $extract_exit_code
+  )
+}
+
+# Reboot all phones and wait for them to start back up
+# Does not break build if a phone fails to restart
+function bb_reboot_phones {
+  echo "@@@BUILD_STEP Rebooting phones@@@"
+  (
+  set +e
+  cd $CHROME_SRC/build/android/pylib;
+  for DEVICE in $(adb_get_devices); do
+    python -c "import android_commands;\
+        android_commands.AndroidCommands(device='$DEVICE').Reboot(True)" &
+  done
+  wait
+  )
+}
+
+# Runs the license checker for the WebView build.
+function bb_check_webview_licenses {
+  echo "@@@BUILD_STEP Check licenses for WebView@@@"
+  (
+  set +e
+  cd "${SRC_ROOT}"
+  python android_webview/tools/webview_licenses.py scan
+  if [[ $? -ne 0 ]]; then
+    echo "@@@STEP_WARNINGS@@@"
+  fi
+  return 0
+  )
+}
+
+# Retrieve a packed json property using python
+function bb_get_json_prop {
+  local JSON="$1"
+  local PROP="$2"
+
+  python -c "import json; print json.loads('$JSON').get('$PROP', '')"
+}
diff --git a/src/build/android/cpufeatures.gypi b/src/build/android/cpufeatures.gypi
new file mode 100644
index 0000000..17b262c
--- /dev/null
+++ b/src/build/android/cpufeatures.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Depend on the Android NDK's cpu feature detection. The WebView build is part
+# of the system and the library already exists; for the normal build there is a
+# gyp file in the checked-in NDK to build it.
+{
+  'conditions': [
+    ['android_build_type != 0', {
+      'libraries': [
+        'cpufeatures.a'
+      ],
+    }, {
+      'dependencies': [
+        '<(android_ndk_root)/android_tools_ndk.gyp:cpu_features',
+      ],
+    }],
+  ],
+}
diff --git a/src/build/android/device_stats_monitor.py b/src/build/android/device_stats_monitor.py
new file mode 100755
index 0000000..181c3db
--- /dev/null
+++ b/src/build/android/device_stats_monitor.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides iotop/top style profiling for android.
+
+Usage:
+  ./device_stats_monitor.py --hz=20 --duration=5 --outfile=/tmp/foo
+"""
+
+import optparse
+import os
+import sys
+import time
+
+from pylib import android_commands
+from pylib import device_stats_monitor
+from pylib import test_options_parser
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--hz', type='int', default=20,
+                           help='Number of samples/sec.')
+  option_parser.add_option('--duration', type='int', default=5,
+                           help='Seconds to monitor.')
+  option_parser.add_option('--outfile', default='/tmp/devicestatsmonitor',
+                           help='Location to start output file.')
+  test_options_parser.AddBuildTypeOption(option_parser)
+  options, args = option_parser.parse_args(argv)
+
+  monitor = device_stats_monitor.DeviceStatsMonitor(
+      android_commands.AndroidCommands(), options.hz, options.build_type)
+  monitor.Start()
+  print 'Waiting for %d seconds while profiling.' % options.duration
+  time.sleep(options.duration)
+  url = monitor.StopAndCollect(options.outfile)
+  print 'View results in browser at %s' % url
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/device_status_check.py b/src/build/android/device_status_check.py
new file mode 100755
index 0000000..3d695a2
--- /dev/null
+++ b/src/build/android/device_status_check.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to keep track of devices across builds and report state."""
+import logging
+import optparse
+import os
+import smtplib
+import sys
+
+from pylib import buildbot_report
+from pylib.android_commands import GetAttachedDevices
+from pylib.cmd_helper import GetCmdOutput
+
+
+def DeviceInfo(serial):
+  """Gathers info on a device via various adb calls.
+
+  Args:
+    serial: The serial of the attached device to construct info about.
+
+  Returns:
+    Tuple of device type, build id and report as a string.
+  """
+
+  def AdbShellCmd(cmd):
+    return GetCmdOutput('adb -s %s shell %s' % (serial, cmd),
+                        shell=True).strip()
+
+  device_type = AdbShellCmd('getprop ro.build.product')
+  device_build = AdbShellCmd('getprop ro.build.id')
+
+  report = ['Device %s (%s)' % (serial, device_type),
+            '  Build: %s (%s)' % (device_build,
+                                  AdbShellCmd('getprop ro.build.fingerprint')),
+            '  Battery: %s%%' % AdbShellCmd('dumpsys battery | grep level '
+                                            "| awk '{print $2}'"),
+            '  Battery temp: %s' % AdbShellCmd('dumpsys battery'
+                                               '| grep temp '
+                                               "| awk '{print $2}'"),
+            '  IMEI slice: %s' % AdbShellCmd('dumpsys iphonesubinfo '
+                                             '| grep Device'
+                                             "| awk '{print $4}'")[-6:],
+            '  Wifi IP: %s' % AdbShellCmd('getprop dhcp.wlan0.ipaddress'),
+            '']
+
+  return device_type, device_build, '\n'.join(report)
+
+
+def CheckForMissingDevices(options, adb_online_devs):
+  """Uses file of previous online devices to detect broken phones.
+
+  Args:
+    options: out_dir parameter of options argument is used as the base
+             directory to load and update the cache file.
+    adb_online_devs: A list of serial numbers of the currently visible
+                     and online attached devices.
+  """
+  # TODO(navabi): remove this once the bug that causes different number
+  # of devices to be detected between calls is fixed.
+  logger = logging.getLogger()
+  logger.setLevel(logging.INFO)
+
+  out_dir = os.path.abspath(options.out_dir)
+
+  def ReadDeviceList(file_name):
+    devices_path = os.path.join(out_dir, file_name)
+    devices = []
+    try:
+      with open(devices_path) as f:
+        devices = f.read().splitlines()
+    except IOError:
+      # Ignore error, file might not exist
+      pass
+    return devices
+
+  def WriteDeviceList(file_name, device_list):
+    path = os.path.join(out_dir, file_name)
+    if not os.path.exists(out_dir):
+      os.makedirs(out_dir)
+    with open(path, 'w') as f:
+      # Write devices currently visible plus devices previously seen.
+      f.write('\n'.join(set(device_list)))
+
+  last_devices_path = os.path.join(out_dir, '.last_devices')
+  last_devices = ReadDeviceList('.last_devices')
+
+  missing_devs = list(set(last_devices) - set(adb_online_devs))
+  if missing_devs:
+    from_address = 'buildbot@chromium.org'
+    to_address = 'chromium-android-device-alerts@google.com'
+    bot_name = os.environ['BUILDBOT_BUILDERNAME']
+    slave_name = os.environ['BUILDBOT_SLAVENAME']
+    num_online_devs = len(adb_online_devs)
+    subject = 'Devices offline on %s, %s (%d remaining).' % (slave_name,
+                                                             bot_name,
+                                                             num_online_devs)
+    buildbot_report.PrintWarning()
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    buildbot_report.PrintSummaryText(devices_missing_msg)
+
+    # TODO(navabi): Debug by printing both output from GetCmdOutput and
+    # GetAttachedDevices to compare results.
+    body = '\n'.join(
+        ['Current online devices: %s' % adb_online_devs,
+         '%s are no longer visible. Were they removed?\n' % missing_devs,
+         'SHERIFF: See go/chrome_device_monitor',
+         'Cache file: %s\n\n' % last_devices_path,
+         'adb devices: %s' % GetCmdOutput(['adb', 'devices']),
+         'adb devices(GetAttachedDevices): %s' % GetAttachedDevices()])
+
+    print body
+
+    # Only send email if the first time a particular device goes offline
+    last_missing = ReadDeviceList('.last_missing')
+    new_missing_devs = set(missing_devs) - set(last_missing)
+
+    if new_missing_devs:
+      msg_body = '\r\n'.join(
+          ['From: %s' % from_address,
+           'To: %s' % to_address,
+           'Subject: %s' % subject,
+           '', body])
+      try:
+        server = smtplib.SMTP('localhost')
+        server.sendmail(from_address, [to_address], msg_body)
+        server.quit()
+      except Exception as e:
+        print 'Failed to send alert email. Error: %s' % e
+  else:
+    new_devs = set(adb_online_devs) - set(last_devices)
+    if new_devs and os.path.exists(last_devices_path):
+      buildbot_report.PrintWarning()
+      buildbot_report.PrintSummaryText(
+          '%d new devices detected' % len(new_devs))
+      print ('New devices detected %s. And now back to your '
+             'regularly scheduled program.' % list(new_devs))
+  WriteDeviceList('.last_devices', (adb_online_devs + last_devices))
+  WriteDeviceList('.last_missing', missing_devs)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('', '--out-dir',
+                    help='Directory where the device path is stored',
+                    default=os.path.join(os.path.dirname(__file__), '..',
+                                         '..', 'out'))
+
+  options, args = parser.parse_args()
+  if args:
+    parser.error('Unknown options %s' % args)
+  buildbot_report.PrintNamedStep('Device Status Check')
+  devices = GetAttachedDevices()
+  types, builds, reports = [], [], []
+  if devices:
+    types, builds, reports = zip(*[DeviceInfo(dev) for dev in devices])
+
+  unique_types = list(set(types))
+  unique_builds = list(set(builds))
+
+  buildbot_report.PrintMsg('Online devices: %d. Device types %s, builds %s'
+                           % (len(devices), unique_types, unique_builds))
+  print '\n'.join(reports)
+  CheckForMissingDevices(options, devices)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/android/empty/src/.keep b/src/build/android/empty/src/.keep
new file mode 100644
index 0000000..0f710b6
--- /dev/null
+++ b/src/build/android/empty/src/.keep
@@ -0,0 +1,6 @@
+This is a file that needs to live here until http://crbug.com/158155 has
+been fixed.
+
+The ant build system requires that a src folder is always present, and for
+some of our targets that is not the case. Giving it an empty src-folder works
+nicely though.
diff --git a/src/build/android/emulator.py b/src/build/android/emulator.py
new file mode 100755
index 0000000..77c9a75
--- /dev/null
+++ b/src/build/android/emulator.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+Assumes system environment ANDROID_NDK_ROOT has been set.
+
+  Emulator: The class provides the methods to launch/shutdown the emulator with
+            the android virtual device named 'avd_armeabi' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import sys
+import time
+
+from pylib import android_commands
+from pylib import cmd_helper
+
+# adb_interface.py is under ../../third_party/android_testrunner/
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
+   '..', 'third_party', 'android_testrunner'))
+import adb_interface
+import errors
+import run_command
+
+class EmulatorLaunchException(Exception):
+  """Emulator failed to launch."""
+  pass
+
+def _KillAllEmulators():
+  """Kill all running emulators that look like ones we started.
+
+  There are odd 'sticky' cases where there can be no emulator process
+  running but a device slot is taken.  A little bot trouble and and
+  we're out of room forever.
+  """
+  emulators = android_commands.GetEmulators()
+  if not emulators:
+    return
+  for emu_name in emulators:
+    cmd_helper.GetCmdOutput(['adb', '-s', emu_name, 'emu', 'kill'])
+  logging.info('Emulator killing is async; give a few seconds for all to die.')
+  for i in range(5):
+    if not android_commands.GetEmulators():
+      return
+    time.sleep(1)
+
+
+def DeleteAllTempAVDs():
+  """Delete all temporary AVDs which are created for tests.
+
+  If the test exits abnormally and some temporary AVDs created when testing may
+  be left in the system. Clean these AVDs.
+  """
+  avds = android_commands.GetAVDs()
+  if not avds:
+    return
+  for avd_name in avds:
+    if 'run_tests_avd' in avd_name:
+      cmd = ['android', '-s', 'delete', 'avd', '--name', avd_name]
+      cmd_helper.GetCmdOutput(cmd)
+      logging.info('Delete AVD %s' % avd_name)
+
+
+class PortPool(object):
+  """Pool for emulator port starting position that changes over time."""
+  _port_min = 5554
+  _port_max = 5585
+  _port_current_index = 0
+
+  @classmethod
+  def port_range(cls):
+    """Return a range of valid ports for emulator use.
+
+    The port must be an even number between 5554 and 5584.  Sometimes
+    a killed emulator "hangs on" to a port long enough to prevent
+    relaunch.  This is especially true on slow machines (like a bot).
+    Cycling through a port start position helps make us resilient."""
+    ports = range(cls._port_min, cls._port_max, 2)
+    n = cls._port_current_index
+    cls._port_current_index = (n + 1) % len(ports)
+    return ports[n:] + ports[:n]
+
+
+def _GetAvailablePort():
+  """Returns an available TCP port for the console."""
+  used_ports = []
+  emulators = android_commands.GetEmulators()
+  for emulator in emulators:
+    used_ports.append(emulator.split('-')[1])
+  for port in PortPool.port_range():
+    if str(port) not in used_ports:
+      return port
+
+
+class Emulator(object):
+  """Provides the methods to lanuch/shutdown the emulator.
+
+  The emulator has the android virtual device named 'avd_armeabi'.
+
+  The emulator could use any even TCP port between 5554 and 5584 for the
+  console communication, and this port will be part of the device name like
+  'emulator-5554'. Assume it is always True, as the device name is the id of
+  emulator managed in this class.
+
+  Attributes:
+    emulator: Path of Android's emulator tool.
+    popen: Popen object of the running emulator process.
+    device: Device name of this emulator.
+  """
+
+  # Signals we listen for to kill the emulator on
+  _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+  # Time to wait for an emulator launch, in seconds.  This includes
+  # the time to launch the emulator and a wait-for-device command.
+  _LAUNCH_TIMEOUT = 120
+
+  # Timeout interval of wait-for-device command before bouncing to a a
+  # process life check.
+  _WAITFORDEVICE_TIMEOUT = 5
+
+  # Time to wait for a "wait for boot complete" (property set on device).
+  _WAITFORBOOT_TIMEOUT = 300
+
+  def __init__(self, new_avd_name, fast_and_loose):
+    """Init an Emulator.
+
+    Args:
+      nwe_avd_name: If set, will create a new temporary AVD.
+      fast_and_loose: Loosen up the rules for reliable running for speed.
+        Intended for quick testing or re-testing.
+
+    """
+    try:
+      android_sdk_root = os.environ['ANDROID_SDK_ROOT']
+    except KeyError:
+      logging.critical('The ANDROID_SDK_ROOT must be set to run the test on '
+                       'emulator.')
+      raise
+    self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+    self.android = os.path.join(android_sdk_root, 'tools', 'android')
+    self.popen = None
+    self.device = None
+    self.default_avd = True
+    self.fast_and_loose = fast_and_loose
+    self.abi = 'armeabi-v7a'
+    self.avd = 'avd_armeabi'
+    if 'x86' in os.environ.get('TARGET_PRODUCT', ''):
+      self.abi = 'x86'
+      self.avd = 'avd_x86'
+    if new_avd_name:
+      self.default_avd = False
+      self.avd = self._CreateAVD(new_avd_name)
+
+  def _DeviceName(self):
+    """Return our device name."""
+    port = _GetAvailablePort()
+    return ('emulator-%d' % port, port)
+
+  def _CreateAVD(self, avd_name):
+    """Creates an AVD with the given name.
+
+    Return avd_name.
+    """
+    avd_command = [
+        self.android,
+        '--silent',
+        'create', 'avd',
+        '--name', avd_name,
+        '--abi', self.abi,
+        '--target', 'android-16',
+        '-c', '128M',
+        '--force',
+    ]
+    avd_process = subprocess.Popen(args=avd_command,
+                                   stdin=subprocess.PIPE,
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT)
+    avd_process.stdin.write('no\n')
+    avd_process.wait()
+    logging.info('Create AVD command: %s', ' '.join(avd_command))
+    return avd_name
+
+  def _DeleteAVD(self):
+    """Delete the AVD of this emulator."""
+    avd_command = [
+        self.android,
+        '--silent',
+        'delete',
+        'avd',
+        '--name', self.avd,
+    ]
+    avd_process = subprocess.Popen(args=avd_command,
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT)
+    logging.info('Delete AVD command: %s', ' '.join(avd_command))
+    avd_process.wait()
+
+  def Launch(self, kill_all_emulators):
+    """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the
+    emulator is ready for use.
+
+    If fails, an exception will be raised.
+    """
+    if kill_all_emulators:
+      _KillAllEmulators()  # just to be sure
+    if not self.fast_and_loose:
+      self._AggressiveImageCleanup()
+    (self.device, port) = self._DeviceName()
+    emulator_command = [
+        self.emulator,
+        # Speed up emulator launch by 40%.  Really.
+        '-no-boot-anim',
+        # The default /data size is 64M.
+        # That's not enough for 8 unit test bundles and their data.
+        '-partition-size', '512',
+        # Enable GPU by default.
+        '-gpu', 'on',
+        # Use a familiar name and port.
+        '-avd', self.avd,
+        '-port', str(port)]
+    if not self.fast_and_loose:
+      emulator_command.extend([
+          # Wipe the data.  We've seen cases where an emulator
+          # gets 'stuck' if we don't do this (every thousand runs or
+          # so).
+          '-wipe-data',
+          ])
+    logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+    self.popen = subprocess.Popen(args=emulator_command,
+                                  stderr=subprocess.STDOUT)
+    self._InstallKillHandler()
+
+  def _AggressiveImageCleanup(self):
+    """Aggressive cleanup of emulator images.
+
+    Experimentally it looks like our current emulator use on the bot
+    leaves image files around in /tmp/android-$USER.  If a "random"
+    name gets reused, we choke with a 'File exists' error.
+    TODO(jrg): is there a less hacky way to accomplish the same goal?
+    """
+    logging.info('Aggressive Image Cleanup')
+    emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+    if not os.path.exists(emulator_imagedir):
+      return
+    for image in os.listdir(emulator_imagedir):
+      full_name = os.path.join(emulator_imagedir, image)
+      if 'emulator' in full_name:
+        logging.info('Deleting emulator image %s', full_name)
+        os.unlink(full_name)
+
+  def ConfirmLaunch(self, wait_for_boot=False):
+    """Confirm the emulator launched properly.
+
+    Loop on a wait-for-device with a very small timeout.  On each
+    timeout, check the emulator process is still alive.
+    After confirming a wait-for-device can be successful, make sure
+    it returns the right answer.
+    """
+    seconds_waited = 0
+    number_of_waits = 2  # Make sure we can wfd twice
+    adb_cmd = "adb -s %s %s" % (self.device, 'wait-for-device')
+    while seconds_waited < self._LAUNCH_TIMEOUT:
+      try:
+        run_command.RunCommand(adb_cmd,
+                               timeout_time=self._WAITFORDEVICE_TIMEOUT,
+                               retry_count=1)
+        number_of_waits -= 1
+        if not number_of_waits:
+          break
+      except errors.WaitForResponseTimedOutError as e:
+        seconds_waited += self._WAITFORDEVICE_TIMEOUT
+        adb_cmd = "adb -s %s %s" % (self.device, 'kill-server')
+        run_command.RunCommand(adb_cmd)
+      self.popen.poll()
+      if self.popen.returncode != None:
+        raise EmulatorLaunchException('EMULATOR DIED')
+    if seconds_waited >= self._LAUNCH_TIMEOUT:
+      raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+    logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+    if wait_for_boot:
+      # Now that we checked for obvious problems, wait for a boot complete.
+      # Waiting for the package manager is sometimes problematic.
+      a = android_commands.AndroidCommands(self.device)
+      a.WaitForSystemBootCompleted(self._WAITFORBOOT_TIMEOUT)
+
+  def Shutdown(self):
+    """Shuts down the process started by launch."""
+    if not self.default_avd:
+      self._DeleteAVD()
+    if self.popen:
+      self.popen.poll()
+      if self.popen.returncode == None:
+        self.popen.kill()
+      self.popen = None
+
+  def _ShutdownOnSignal(self, signum, frame):
+    logging.critical('emulator _ShutdownOnSignal')
+    for sig in self._SIGNALS:
+      signal.signal(sig, signal.SIG_DFL)
+    self.Shutdown()
+    raise KeyboardInterrupt  # print a stack
+
+  def _InstallKillHandler(self):
+    """Install a handler to kill the emulator when we exit unexpectedly."""
+    for sig in self._SIGNALS:
+      signal.signal(sig, self._ShutdownOnSignal)
+
+def main(argv):
+  Emulator(None, True).Launch(True)
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/src/build/android/enable_asserts.py b/src/build/android/enable_asserts.py
new file mode 100755
index 0000000..5659e9e
--- /dev/null
+++ b/src/build/android/enable_asserts.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Enables dalvik vm asserts in the android device."""
+
+from pylib import android_commands
+import optparse
+import sys
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--enable_asserts', dest='set_asserts',
+      action='store_true', default=None,
+      help='Sets the dalvik.vm.enableassertions property to "all"')
+  option_parser.add_option('--disable_asserts', dest='set_asserts',
+      action='store_false', default=None,
+      help='Removes the dalvik.vm.enableassertions property')
+  options, _ = option_parser.parse_args(argv)
+
+  commands = android_commands.AndroidCommands()
+  if options.set_asserts != None:
+    if commands.SetJavaAssertsEnabled(options.set_asserts):
+      commands.Reboot(full_reboot=False)
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/src/build/android/envsetup.sh b/src/build/android/envsetup.sh
new file mode 100644
index 0000000..10d9ec8
--- /dev/null
+++ b/src/build/android/envsetup.sh
@@ -0,0 +1,138 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android.  It can either be
+# compiled with the Android tree or using the Android SDK/NDK. To build with
+# NDK/SDK: ". build/android/envsetup.sh".  Environment variable
+# ANDROID_SDK_BUILD=1 will then be defined and used in the rest of the setup to
+# specifiy build type.
+
+# Source functions script.  The file is in the same directory as this script.
+. "$(dirname $BASH_SOURCE)"/envsetup_functions.sh
+
+export ANDROID_SDK_BUILD=1  # Default to SDK build.
+
+process_options "$@"
+
+# When building WebView as part of Android we can't use the SDK. Other builds
+# default to using the SDK.
+if [[ "${CHROME_ANDROID_BUILD_WEBVIEW}" -eq 1 ]]; then
+  export ANDROID_SDK_BUILD=0
+fi
+
+if [[ "${ANDROID_SDK_BUILD}" -eq 1 ]]; then
+  echo "Using SDK build"
+fi
+
+# Get host architecture, and abort if it is 32-bit, unless --try-32
+# is also used.
+host_arch=$(uname -m)
+case "${host_arch}" in
+  x86_64)  # pass
+    ;;
+  i?86)
+    if [[ -z "${try_32bit_host_build}" ]]; then
+      echo "ERROR: Android build requires a 64-bit host build machine."
+      echo "If you really want to try it on this machine, use the \
+--try-32bit-host flag."
+      echo "Be warned that this may fail horribly at link time, due \
+very large binaries."
+      return 1
+    else
+      echo "WARNING: 32-bit host build enabled. Here be dragons!"
+      host_arch=x86
+    fi
+    ;;
+  *)
+    echo "ERROR: Unsupported host architecture (${host_arch})."
+    echo "Try running this script on a Linux/x86_64 machine instead."
+    return 1
+esac
+
+host_os=$(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')
+
+case "${host_os}" in
+  "linux")
+    toolchain_dir="linux-${host_arch}"
+    ;;
+  "mac")
+    toolchain_dir="darwin-${host_arch}"
+    ;;
+  *)
+    echo "Host platform ${host_os} is not supported" >& 2
+    return 1
+esac
+
+CURRENT_DIR="$(readlink -f "$(dirname $BASH_SOURCE)/../../")"
+if [[ -z "${CHROME_SRC}" ]]; then
+  # If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+  export CHROME_SRC="${CURRENT_DIR}"
+fi
+
+if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
+  # If current directory is not in $CHROME_SRC, it might be set for other
+  # source tree. If $CHROME_SRC was set correctly and we are in the correct
+  # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
+  # Otherwise, it will equal to "${CURRENT_DIR}"
+  echo "Warning: Current directory is out of CHROME_SRC, it may not be \
+the one you want."
+  echo "${CHROME_SRC}"
+fi
+
+# Android sdk platform version to use
+export ANDROID_SDK_VERSION=16
+
+if [[ "${ANDROID_SDK_BUILD}" -eq 1 ]]; then
+  if [[ -z "${TARGET_ARCH}" ]]; then
+    return 1
+  fi
+  sdk_build_init
+# Sets up environment for building Chromium for Android with source. Expects
+# android environment setup and lunch.
+elif [[ -z "$ANDROID_BUILD_TOP" || \
+        -z "$ANDROID_TOOLCHAIN" || \
+        -z "$ANDROID_PRODUCT_OUT" ]]; then
+  echo "Android build environment variables must be set."
+  echo "Please cd to the root of your Android tree and do: "
+  echo "  . build/envsetup.sh"
+  echo "  lunch"
+  echo "Then try this again."
+  echo "Or did you mean NDK/SDK build. Run envsetup.sh without any arguments."
+  return 1
+elif [[ -n "$CHROME_ANDROID_BUILD_WEBVIEW" ]]; then
+  webview_build_init
+fi
+
+# Workaround for valgrind build
+if [[ -n "$CHROME_ANDROID_VALGRIND_BUILD" ]]; then
+# arm_thumb=0 is a workaround for https://bugs.kde.org/show_bug.cgi?id=270709
+  DEFINES+=" arm_thumb=0 release_extra_cflags='-fno-inline\
+ -fno-omit-frame-pointer -fno-builtin' release_valgrind_build=1\
+ release_optimize=1"
+fi
+
+# Source a bunch of helper functions
+. ${CHROME_SRC}/build/android/adb_device_functions.sh
+
+ANDROID_GOMA_WRAPPER=""
+if [[ -d $GOMA_DIR ]]; then
+  ANDROID_GOMA_WRAPPER="$GOMA_DIR/gomacc"
+fi
+export ANDROID_GOMA_WRAPPER
+
+# Declare Android are cross compile.
+export GYP_CROSSCOMPILE=1
+
+# Performs a gyp_chromium run to convert gyp->Makefile for android code.
+android_gyp() {
+  # This is just a simple wrapper of gyp_chromium, please don't add anything
+  # in this function.
+  echo "GYP_GENERATORS set to '$GYP_GENERATORS'"
+  (
+    "${CHROME_SRC}/build/gyp_chromium" --depth="${CHROME_SRC}" --check "$@"
+  )
+}
+
+# FLOCK needs to be null on system that has no flock
+which flock > /dev/null || export FLOCK=
diff --git a/src/build/android/envsetup_functions.sh b/src/build/android/envsetup_functions.sh
new file mode 100755
index 0000000..99eddd4
--- /dev/null
+++ b/src/build/android/envsetup_functions.sh
@@ -0,0 +1,306 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines functions for envsetup.sh which sets up environment for building
+# Chromium on Android.  The build can be either use the Android NDK/SDK or
+# android source tree.  Each has a unique init function which calls functions
+# prefixed with "common_" that is common for both environment setups.
+
+################################################################################
+# Check to make sure the toolchain exists for the NDK version.
+################################################################################
+common_check_toolchain() {
+  if [[ ! -d "${ANDROID_TOOLCHAIN}" ]]; then
+    echo "Can not find Android toolchain in ${ANDROID_TOOLCHAIN}." >& 2
+    echo "The NDK version might be wrong." >& 2
+    return 1
+  fi
+}
+
+################################################################################
+# Exports environment variables common to both sdk and non-sdk build (e.g. PATH)
+# based on CHROME_SRC and ANDROID_TOOLCHAIN, along with DEFINES for GYP_DEFINES.
+################################################################################
+common_vars_defines() {
+  # Set toolchain path according to product architecture.
+  case "${TARGET_ARCH}" in
+    "arm")
+      toolchain_arch="arm-linux-androideabi"
+      ;;
+    "x86")
+      toolchain_arch="x86"
+      ;;
+    *)
+      echo "TARGET_ARCH: ${TARGET_ARCH} is not supported." >& 2
+      print_usage
+      return 1
+      ;;
+  esac
+
+  toolchain_version="4.6"
+  # We directly set the gcc_version since we know what we use, and it should
+  # be set to xx instead of x.x. Refer the output of compiler_version.py.
+  gcc_version="46"
+  toolchain_target=$(basename \
+    ${ANDROID_NDK_ROOT}/toolchains/${toolchain_arch}-${toolchain_version})
+  toolchain_path="${ANDROID_NDK_ROOT}/toolchains/${toolchain_target}"\
+"/prebuilt/${toolchain_dir}/bin/"
+
+  # Set only if not already set.
+  # Don't override ANDROID_TOOLCHAIN if set by Android configuration env.
+  export ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN:-${toolchain_path}}
+
+  common_check_toolchain
+
+  # Add Android SDK/NDK tools to system path.
+  export PATH=$PATH:${ANDROID_NDK_ROOT}
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/tools
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+
+  # This must be set before ANDROID_TOOLCHAIN, so that clang could find the
+  # gold linker.
+  # TODO(michaelbai): Remove this path once the gold linker become the default
+  # linker.
+  export PATH=$PATH:${CHROME_SRC}/build/android/${toolchain_arch}-gold
+
+  # Must have tools like arm-linux-androideabi-gcc on the path for ninja
+  export PATH=$PATH:${ANDROID_TOOLCHAIN}
+
+  # Add Chromium Android development scripts to system path.
+  # Must be after CHROME_SRC is set.
+  export PATH=$PATH:${CHROME_SRC}/build/android
+
+  # TODO(beverloo): Remove these once all consumers updated to --strip-binary.
+  export OBJCOPY=$(echo ${ANDROID_TOOLCHAIN}/*-objcopy)
+  export STRIP=$(echo ${ANDROID_TOOLCHAIN}/*-strip)
+
+  # The set of GYP_DEFINES to pass to gyp. Use 'readlink -e' on directories
+  # to canonicalize them (remove double '/', remove trailing '/', etc).
+  DEFINES="OS=android"
+  DEFINES+=" host_os=${host_os}"
+  DEFINES+=" gcc_version=${gcc_version}"
+
+  if [[ -n "$CHROME_ANDROID_OFFICIAL_BUILD" ]]; then
+    DEFINES+=" branding=Chrome"
+    DEFINES+=" buildtype=Official"
+
+    # These defines are used by various chrome build scripts to tag the binary's
+    # version string as 'official' in linux builds (e.g. in
+    # chrome/trunk/src/chrome/tools/build/version.py).
+    export OFFICIAL_BUILD=1
+    export CHROMIUM_BUILD="_google_chrome"
+    export CHROME_BUILD_TYPE="_official"
+
+    # Used by chrome_version_info_posix.cc to display the channel name.
+    # Valid values: "unstable", "stable", "dev", "beta".
+    export CHROME_VERSION_EXTRA="beta"
+  fi
+
+  # The order file specifies the order of symbols in the .text section of the
+  # shared library, libchromeview.so.  The file is an order list of section
+  # names and the library is linked with option
+  # --section-ordering-file=<orderfile>. The order file is updated by profiling
+  # startup after compiling with the order_profiling=1 GYP_DEFINES flag.
+  ORDER_DEFINES="order_text_section=${CHROME_SRC}/orderfiles/orderfile.out"
+
+  # The following defines will affect ARM code generation of both C/C++ compiler
+  # and V8 mksnapshot.
+  case "${TARGET_ARCH}" in
+    "arm")
+      DEFINES+=" arm_neon=0 armv7=1 arm_thumb=1 arm_fpu=vfpv3-d16"
+      DEFINES+=" arm_neon_optional=1"  # Enable dynamic NEON support.
+      DEFINES+=" ${ORDER_DEFINES}"
+      DEFINES+=" target_arch=arm"
+      ;;
+    "x86")
+    # TODO(tedbo): The ia32 build fails on ffmpeg, so we disable it here.
+      DEFINES+=" use_libffmpeg=0"
+
+      host_arch=$(uname -m | sed -e \
+        's/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/i86pc/ia32/')
+      DEFINES+=" host_arch=${host_arch}"
+      DEFINES+=" target_arch=ia32"
+      ;;
+    *)
+      echo "TARGET_ARCH: ${TARGET_ARCH} is not supported." >& 2
+      print_usage
+      return 1
+  esac
+
+  DEFINES+=" android_gdbserver=${ANDROID_NDK_ROOT}/prebuilt/\
+android-${TARGET_ARCH}/gdbserver/gdbserver"
+}
+
+
+################################################################################
+# Exports common GYP variables based on variable DEFINES and CHROME_SRC.
+################################################################################
+common_gyp_vars() {
+  export GYP_DEFINES="${DEFINES}"
+
+  # Set GYP_GENERATORS to make-android if it's currently unset or null.
+  export GYP_GENERATORS="${GYP_GENERATORS:-make-android}"
+
+  # Use our All target as the default
+  export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} default_target=All"
+
+  # We want to use our version of "all" targets.
+  export CHROMIUM_GYP_FILE="${CHROME_SRC}/build/all_android.gyp"
+}
+
+
+################################################################################
+# Prints out help message on usage.
+################################################################################
+print_usage() {
+  echo "usage: ${0##*/} [--target-arch=value] [--help]" >& 2
+  echo "--target-arch=value     target CPU architecture (arm=default, x86)" >& 2
+  echo "--try-32bit-host        try building a 32-bit host architecture" >&2
+  echo "--help                  this help" >& 2
+}
+
+################################################################################
+# Process command line options.
+# --target-arch=  Specifices target CPU architecture. Currently supported
+#                 architectures are "arm" (default), and "x86".
+# --help          Prints out help message.
+################################################################################
+process_options() {
+  try_32bit_host_build=
+  while [[ $1 ]]; do
+    case "$1" in
+      --target-arch=*)
+        target_arch="$(echo "$1" | sed 's/^[^=]*=//')"
+        ;;
+      --try-32bit-host)
+        try_32bit_host_build=true
+        ;;
+      --help)
+        print_usage
+        return 1
+        ;;
+      *)
+        # Ignore other command line options
+        echo "Unknown option: $1"
+        ;;
+    esac
+    shift
+  done
+
+  # Sets TARGET_ARCH. Defaults to arm if not specified.
+  TARGET_ARCH=${target_arch:-arm}
+}
+
+################################################################################
+# Initializes environment variables for NDK/SDK build. Only Android NDK Revision
+# 7 on Linux or Mac is offically supported. To run this script, the system
+# environment ANDROID_NDK_ROOT must be set to Android NDK's root path.  The
+# ANDROID_SDK_ROOT only needs to be set to override the default SDK which is in
+# the tree under $ROOT/src/third_party/android_tools/sdk.
+# To build Chromium for Android with NDK/SDK follow the steps below:
+#  > export ANDROID_NDK_ROOT=<android ndk root>
+#  > export ANDROID_SDK_ROOT=<android sdk root> # to override the default sdk
+#  > . build/android/envsetup.sh
+#  > make
+################################################################################
+sdk_build_init() {
+  # If ANDROID_NDK_ROOT is set when envsetup is run, use the ndk pointed to by
+  # the environment variable.  Otherwise, use the default ndk from the tree.
+  if [[ -z "${ANDROID_NDK_ROOT}" || ! -d "${ANDROID_NDK_ROOT}" ]]; then
+    export ANDROID_NDK_ROOT="${CHROME_SRC}/third_party/android_tools/ndk/"
+  fi
+
+  # If ANDROID_SDK_ROOT is set when envsetup is run, and if it has the
+  # right SDK-compatible directory layout, use the sdk pointed to by the
+  # environment variable.  Otherwise, use the default sdk from the tree.
+  local sdk_suffix=platforms/android-${ANDROID_SDK_VERSION}
+  if [[ -z "${ANDROID_SDK_ROOT}" || \
+       ! -d "${ANDROID_SDK_ROOT}/${sdk_suffix}" ]]; then
+    export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
+  fi
+
+  unset ANDROID_BUILD_TOP
+
+  # Set default target.
+  export TARGET_PRODUCT="${TARGET_PRODUCT:-trygon}"
+
+  # Unset toolchain so that it can be set based on TARGET_PRODUCT.
+  # This makes it easy to switch between architectures.
+  unset ANDROID_TOOLCHAIN
+
+  common_vars_defines
+
+  DEFINES+=" sdk_build=1"
+
+  # Sets android specific directories to NOT_SDK_COMPLIANT.  This will allow
+  # android_gyp to generate make files, but will cause errors when (and only
+  # when) building targets that depend on these directories.
+  DEFINES+=" android_src='NOT_SDK_COMPLIANT'"
+  DEFINES+=" android_product_out=${CHROME_SRC}/out/android"
+  DEFINES+=" android_lib='NOT_SDK_COMPLIANT'"
+  DEFINES+=" android_static_lib='NOT_SDK_COMPLIANT'"
+  DEFINES+=" android_sdk=${ANDROID_SDK_ROOT}/${sdk_suffix}"
+  DEFINES+=" android_sdk_root=${ANDROID_SDK_ROOT}"
+  DEFINES+=" android_sdk_tools=${ANDROID_SDK_ROOT}/platform-tools"
+  DEFINES+=" android_sdk_version=${ANDROID_SDK_VERSION}"
+  DEFINES+=" android_toolchain=${ANDROID_TOOLCHAIN}"
+
+  common_gyp_vars
+
+  if [[ -n "$CHROME_ANDROID_BUILD_WEBVIEW" ]]; then
+    # Can not build WebView with NDK/SDK because it needs the Android build
+    # system and build inside an Android source tree.
+    echo "Can not build WebView with NDK/SDK.  Requires android source tree." \
+        >& 2
+    echo "Try . build/android/envsetup.sh instead." >& 2
+    return 1
+  fi
+
+}
+
+################################################################################
+# To build WebView, we use the Android build system and build inside an Android
+# source tree. This method is called from non_sdk_build_init() and adds to the
+# settings specified there.
+#############################################################################
+webview_build_init() {
+  # For the WebView build we always use the NDK and SDK in the Android tree,
+  # and we don't touch ANDROID_TOOLCHAIN which is already set by Android.
+  export ANDROID_NDK_ROOT=${ANDROID_BUILD_TOP}/prebuilts/ndk/8
+  export ANDROID_SDK_ROOT=${ANDROID_BUILD_TOP}/prebuilts/sdk/\
+${ANDROID_SDK_VERSION}
+
+  common_vars_defines
+
+  # We need to supply SDK paths relative to the top of the Android tree to make
+  # sure the generated Android makefiles are portable, as they will be checked
+  # into the Android tree.
+  ANDROID_SDK=$(python -c \
+      "import os.path; print os.path.relpath('${ANDROID_SDK_ROOT}', \
+      '${ANDROID_BUILD_TOP}')")
+  ANDROID_SDK_TOOLS=$(python -c \
+      "import os.path; \
+      print os.path.relpath('${ANDROID_SDK_ROOT}/../tools/linux', \
+      '${ANDROID_BUILD_TOP}')")
+  DEFINES+=" android_build_type=1"
+  DEFINES+=" sdk_build=0"
+  DEFINES+=" android_src=\$(GYP_ABS_ANDROID_TOP_DIR)"
+  DEFINES+=" android_product_out=NOT_USED_ON_WEBVIEW"
+  DEFINES+=" android_sdk=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK}"
+  DEFINES+=" android_sdk_root=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK}"
+  DEFINES+=" android_sdk_tools=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK_TOOLS}"
+  DEFINES+=" android_sdk_version=${ANDROID_SDK_VERSION}"
+  DEFINES+=" android_toolchain=${ANDROID_TOOLCHAIN}"
+  export GYP_DEFINES="${DEFINES}"
+
+  export GYP_GENERATORS="android"
+
+  export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} default_target=All"
+  export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} limit_to_target_all=1"
+  export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} auto_regeneration=0"
+
+  export CHROMIUM_GYP_FILE="${CHROME_SRC}/android_webview/all_webview.gyp"
+}
diff --git a/src/build/android/findbugs_diff.py b/src/build/android/findbugs_diff.py
new file mode 100755
index 0000000..eb49824
--- /dev/null
+++ b/src/build/android/findbugs_diff.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs findbugs, and returns an error code if there are new warnings.
+This runs findbugs with an additional flag to exclude known bugs.
+To update the list of known bugs, do this:
+
+   findbugs_diff.py --rebaseline
+
+Note that this is separate from findbugs_exclude.xml. The "exclude" file has
+false positives that we do not plan to fix. The "known bugs" file has real
+bugs that we *do* plan to fix (but haven't done so yet).
+
+Other options
+  --only-analyze used to only analyze the class you are interested.
+  --relase-build analyze the classes in out/Release directory.
+  --findbugs-args used to passin other findbugs's options.
+
+Run
+  $CHROM_SRC/third_party/findbugs/bin/findbugs -textui for details.
+
+"""
+
+import optparse
+import os
+import sys
+
+from pylib import findbugs
+
+
+def main(argv):
+  if not findbugs.CheckEnvironment():
+    return 1
+
+  parser = findbugs.GetCommonParser()
+
+  options, _ = parser.parse_args()
+
+  chrome_src = os.getenv('CHROME_SRC')
+
+  if not options.base_dir:
+    options.base_dir = os.path.join(chrome_src, 'build', 'android',
+                                    'findbugs_filter')
+  if not options.only_analyze:
+    options.only_analyze = 'org.chromium.-'
+
+  return findbugs.Run(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/findbugs_filter/findbugs_exclude.xml b/src/build/android/findbugs_filter/findbugs_exclude.xml
new file mode 100644
index 0000000..49fa811
--- /dev/null
+++ b/src/build/android/findbugs_filter/findbugs_exclude.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+Documentation: http://findbugs.sourceforge.net/manual/filter.html
+In particular, ~ at the start of a string means it's a regex.
+-->
+<FindBugsFilter>
+  <!-- Skip the generated resource classes (including nested classes). -->
+  <Match>
+    <Class name="~org\.chromium\..*\.R(\$\w+)?" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*\.Manifest(\$\w+)?" />
+  </Match>
+  <!-- Ignore "reliance on default String encoding" warnings, as we're not multi-platform -->
+  <Bug pattern="DM_DEFAULT_ENCODING" />
+</FindBugsFilter>
diff --git a/src/build/android/findbugs_filter/findbugs_known_bugs.txt b/src/build/android/findbugs_filter/findbugs_known_bugs.txt
new file mode 100644
index 0000000..cf91422
--- /dev/null
+++ b/src/build/android/findbugs_filter/findbugs_known_bugs.txt
@@ -0,0 +1,142 @@
+H B Nm: The class name org.chromium.content.browser.test.util.TouchUtils shadows the simple name of the superclass android.test.TouchUtils  At TouchUtils.java
+H C EC: Using pointer equality to compare a JavaBridgeCoercionTest$CustomType with a JavaBridgeCoercionTest$CustomType2 in org.chromium.content.browser.JavaBridgeCoercionTest.testPassJavaObject()  At JavaBridgeCoercionTest.java
+H D RCN: Redundant nullcheck of org.chromium.content.browser.SandboxedProcessConnection.mConnectionParams, which is known to be non-null in org.chromium.content.browser.SandboxedProcessConnection.doConnectionSetup()  Redundant null check at SandboxedProcessConnection.java
+M D ST: Write to static field org.chromium.content.app.SandboxedProcessService.sContext from instance method org.chromium.content.app.SandboxedProcessService.onCreate()  At SandboxedProcessService.java
+H D ST: Write to static field org.chromium.net.test.util.TestWebServer.sInstance from instance method org.chromium.net.test.util.TestWebServer.shutdown()  At TestWebServer.java
+H V MS: org.chromium.android_webview.test.AndroidWebViewTestBase.WAIT_TIMEOUT_SECONDS isn't final but should be  At AndroidWebViewTestBase.java
+H V MS: org.chromium.android_webview.test.LoadDataWithBaseUrlTest.WAIT_TIMEOUT_SECONDS isn't final but should be  At LoadDataWithBaseUrlTest.java
+H V MS: org.chromium.content.browser.ContentViewTestBase.WAIT_TIMEOUT_SECONDS isn't final but should be  At ContentViewTestBase.java
+H V MS: org.chromium.content.browser.test.util.CallbackHelper.WAIT_TIMEOUT_SECONDS isn't final but should be  At CallbackHelper.java
+H V MS: org.chromium.content.browser.test.util.HistoryUtils.WAIT_TIMEOUT_SECONDS isn't final but should be  At HistoryUtils.java
+M B DE: org.chromium.net.X509Util.clearTestRootCertificates() might ignore java.io.IOException  At X509Util.java
+M B Nm: The method name org.chromium.base.test.util.ScalableTimeout.ScaleTimeout(long) doesn't start with a lower case letter  At ScalableTimeout.java
+M B RV: exceptional return value of java.io.File.delete() ignored in org.chromium.android_webview.test.ArchiveTest.doArchiveTest(AwContents, String, boolean, String)  At ArchiveTest.java
+M B RV: exceptional return value of java.io.File.delete() ignored in org.chromium.android_webview.test.ArchiveTest.testAutoBadPath()  At ArchiveTest.java
+M B RV: exceptional return value of java.io.File.delete() ignored in org.chromium.android_webview.test.ArchiveTest.testExplicitBadPath()  At ArchiveTest.java
+M B RV: exceptional return value of java.io.File.delete() ignored in org.chromium.android_webview.test.ArchiveTest.testExplicitGoodPath()  At ArchiveTest.java
+M B RV: exceptional return value of java.io.File.delete() ignored in org.chromium.base.test.util.TestFileUtil.deleteFile(String)  At TestFileUtil.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At HttpAuthDatabase.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeArrayCoercionTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeArrayTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeBasicsTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeChildFrameTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeCoercionTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeFieldsTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeReturnValuesTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeTestBase.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At PerfTraceEvent.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SandboxedProcessConnection.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SandboxedProcessLauncher.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SimpleSynchronizedMethod.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SimpleSynchronizedStaticMethod.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At TraceEvent.java
+M C CST: Shouldn't use synchronized(this), please narrow down the synchronization scope.  At HttpAuthDatabase.java
+M C CST: Shouldn't use synchronized(this), please narrow down the synchronization scope.  At SimpleSynchronizedThis.java
+M C IJU: TestCase org.chromium.android_webview.test.AndroidWebViewTestBase defines setUp that doesn't call super.setUp()  At AndroidWebViewTestBase.java
+M C IJU: TestCase org.chromium.android_webview.test.ArchiveTest defines setUp that doesn't call super.setUp()  At ArchiveTest.java
+M C IJU: TestCase org.chromium.android_webview.test.HttpAuthDatabaseTest defines setUp that doesn't call super.setUp()  At HttpAuthDatabaseTest.java
+M C IJU: TestCase org.chromium.android_webview.test.HttpAuthDatabaseTest defines tearDown that doesn't call super.tearDown()  At HttpAuthDatabaseTest.java
+M C IJU: TestCase org.chromium.chrome.testshell.ProviderBookmarkNodeTest defines setUp that doesn't call super.setUp()  At ProviderBookmarkNodeTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$10.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$11.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$11.method(int) defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$11.method(int, int) defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$14$1.method(int) defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$14.getInnerObject() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$15.getInnerObject() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$17.captureThreadId() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$19.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1Base.method(int) defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1InnerObject.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1Test.safe() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1Test.unsafe() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1TestObject.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$1TestReturner.getTest() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$20.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$24.allowed() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$25.allowed() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$25.disallowed() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$2Test.checkJavascriptInterfaceFoo() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$2Test.checkTestAnnotationFoo() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$3Base.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$3Test.allowed() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$3Test.blocked() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$4Base.base() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$7.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$8.method2() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C UMAC: Uncallable method org.chromium.content.browser.JavaBridgeBasicsTest$9.method() defined in anonymous class  At JavaBridgeBasicsTest.java
+M C USELESS_STRING: Invocation of toString on certChain in org.chromium.net.X509Util.verifyServerCertificates(byte[][], String)  At X509Util.java
+M D DLS: Dead store to context in org.chromium.android_webview.test.AndroidWebViewTestBase.createAwTestContainerViewOnMainSync(boolean, AwContentsClient)  At AndroidWebViewTestBase.java
+M D DLS: Dead store to eventTime in org.chromium.content.browser.LongPressDetectorTest$1.run()  At LongPressDetectorTest.java
+M D DLS: Dead store to prevEditableLength in org.chromium.content.browser.ImeAdapter$AdapterInputConnection.setEditableText(String, int, int, int, int)  At ImeAdapter.java
+M D DLS: Dead store to testUrl in org.chromium.android_webview.test.ClientOnPageFinishedTest.testOnPageFinishedNotCalledForValidSubresources()  At ClientOnPageFinishedTest.java
+M D DLS: Dead store to time in org.chromium.net.test.util.TestWebServer.setDateHeaders(HttpResponse)  At TestWebServer.java
+M D DMI: Hard coded reference to an absolute pathname in org.chromium.android_webview.test.ArchiveTest.testAutoBadPath()  At ArchiveTest.java
+M D DMI: Hard coded reference to an absolute pathname in org.chromium.android_webview.test.ArchiveTest.testExplicitBadPath()  At ArchiveTest.java
+M D ICAST: integral division result cast to double or float in org.chromium.content.browser.HandleView.setOrientation(int)  At HandleView.java
+M D REC: Exception is caught when Exception is not thrown in org.chromium.content.browser.test.util.UiUtils.findParentViewForIdAcrossActivities(int)  At UiUtils.java
+M D SF: Switch statement found in org.chromium.chrome.browser.ChromeBrowserProvider.insert(Uri, ContentValues) where one case falls through to the next case  At ChromeBrowserProvider.java
+M D SF: Switch statement found in org.chromium.chrome.browser.database.SQLiteCursor.fillWindow(int, CursorWindow) where default case is missing  At SQLiteCursor.java
+M D SF: Switch statement found in org.chromium.content.browser.ContentSettings$EventHandler$1.handleMessage(Message) where default case is missing  At ContentSettings.java
+M D SF: Switch statement found in org.chromium.content.browser.HandleView.onTouchEvent(MotionEvent) where default case is missing  At HandleView.java
+M D SF: Switch statement found in org.chromium.content.browser.ImeAdapter$AdapterInputConnection.performEditorAction(int) where default case is missing  At ImeAdapter.java
+M D ST: Write to static field org.chromium.net.test.util.TestWebServer.sInstance from instance method new org.chromium.net.test.util.TestWebServer(boolean)  At TestWebServer.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeBasicsTest$21.field  At JavaBridgeBasicsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.booleanField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.byteField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.charField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.customTypeField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.doubleField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.floatField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.intField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.longField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.objectField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.shortField  At JavaBridgeFieldsTest.java
+M D UrF: Unread public/protected field: org.chromium.content.browser.JavaBridgeFieldsTest$TestObject.stringField  At JavaBridgeFieldsTest.java
+M D UuF: Unused public or protected field: org.chromium.content.browser.JavaBridgeBasicsTest$19.field  In JavaBridgeBasicsTest.java
+M M IS: Inconsistent synchronization of org.chromium.content.browser.SandboxedProcessConnection.mPID; locked 66% of time  Unsynchronized access at SandboxedProcessConnection.java
+M M IS: Inconsistent synchronization of org.chromium.content.browser.SandboxedProcessConnection.mService; locked 55% of time  Unsynchronized access at SandboxedProcessConnection.java
+M M IS: Inconsistent synchronization of org.chromium.content.browser.SandboxedProcessConnection.mServiceConnectComplete; locked 60% of time  Unsynchronized access at SandboxedProcessConnection.java
+M M LI: Incorrect lazy initialization and update of static field org.chromium.base.SystemMonitor.sInstance in org.chromium.base.SystemMonitor.create(Context)  At SystemMonitor.java
+M M LI: Incorrect lazy initialization and update of static field org.chromium.content.browser.ContentVideoView.sContentVideoView in org.chromium.content.browser.ContentVideoView.createContentVideoView(int)  At ContentVideoView.java
+M M LI: Incorrect lazy initialization and update of static field org.chromium.net.test.util.TestWebServer.sReasons in org.chromium.net.test.util.TestWebServer.createResponse(int)  At TestWebServer.java
+M M LI: Incorrect lazy initialization of static field org.chromium.net.NetworkChangeNotifier.sInstance in org.chromium.net.NetworkChangeNotifier.init(Context)  At NetworkChangeNotifier.java
+M M UG: org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.getBooleanValue() is unsynchronized, org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.setBooleanValue(boolean) is synchronized  At JavaBridgeReturnValuesTest.java
+M M UG: org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.getStringValue() is unsynchronized, org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.setStringValue(String) is synchronized  At JavaBridgeReturnValuesTest.java
+M P SIC: Should org.chromium.android_webview.test.TestAwContentsClient$AddMessageToConsoleHelper be a _static_ inner class?  At TestAwContentsClient.java
+M P SIC: Should org.chromium.android_webview.test.TestContentProvider$ProviderStateCursor be a _static_ inner class?  At TestContentProvider.java
+M P SIC: Should org.chromium.content.browser.ContentViewGestureHandlerTest$GestureRecordingMotionEventDelegate$GestureEvent be a _static_ inner class?  At ContentViewGestureHandlerTest.java
+M P SIC: Should org.chromium.content.browser.JavaBridgeArrayCoercionTest$CustomType be a _static_ inner class?  At JavaBridgeArrayCoercionTest.java
+M P SIC: Should org.chromium.content.browser.JavaBridgeFieldsTest$CustomType be a _static_ inner class?  At JavaBridgeFieldsTest.java
+M P SIC: Should org.chromium.content.browser.JavaBridgeReturnValuesTest$CustomType be a _static_ inner class?  At JavaBridgeReturnValuesTest.java
+M P SIC: Should org.chromium.content.browser.PopupZoomerTest$CustomCanvasPopupZoomer be a _static_ inner class?  At PopupZoomerTest.java
+M P SS: Unread field: org.chromium.android_webview.test.util.ImagePageGenerator.IMAGE_PREFIX; should this field be static?  At ImagePageGenerator.java
+M P SS: Unread field: org.chromium.android_webview.test.util.ImagePageGenerator.IMAGE_SUFFIX; should this field be static?  At ImagePageGenerator.java
+M P SS: Unread field: org.chromium.native_test.ChromeNativeTestActivity.EXTRA_RUN_IN_SUB_THREAD; should this field be static?  At ChromeNativeTestActivity.java
+M P SS: Unread field: org.chromium.native_test.ChromeNativeTestActivity.TAG; should this field be static?  At ChromeNativeTestActivity.java
+M P UrF: Unread field: org.chromium.content.browser.ContentViewGestureHandlerTest$MockListener.mLastFling2  At ContentViewGestureHandlerTest.java
+M P UrF: Unread field: org.chromium.content.browser.ContentViewGestureHandlerTest$MockListener.mLastScroll1  At ContentViewGestureHandlerTest.java
+M P UrF: Unread field: org.chromium.content.browser.ContentViewGestureHandlerTest$MockListener.mLastScroll2  At ContentViewGestureHandlerTest.java
+M P UrF: Unread field: org.chromium.content.browser.ContentViewGestureHandlerTest$MockListener.mLastScrollDistanceX  At ContentViewGestureHandlerTest.java
+M P UrF: Unread field: org.chromium.content.browser.ContentViewGestureHandlerTest$MockListener.mLastScrollDistanceY  At ContentViewGestureHandlerTest.java
+M P UrF: Unread field: org.chromium.content.browser.HandleView.mHeight  At HandleView.java
+M P UuF: Unused field: org.chromium.content.browser.HandleView.mLongPressCallback  In HandleView.java
+M P UuF: Unused field: org.chromium.content.browser.JavaBridgeBasicsTest$19.privateField  In JavaBridgeBasicsTest.java
+M P UuF: Unused field: org.chromium.content.browser.JavaBridgeBasicsTest$23.field  In JavaBridgeBasicsTest.java
+M V EI2: new org.chromium.chrome.browser.FindMatchRectsDetails(int, RectF[], RectF) may expose internal representation by storing an externally mutable object into FindMatchRectsDetails.rects  At FindMatchRectsDetails.java
+M V EI2: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.setFavicon(byte[]) may expose internal representation by storing an externally mutable object into ChromeBrowserProvider$BookmarkNode.mFavicon  At ChromeBrowserProvider.java
+M V EI2: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.setThumbnail(byte[]) may expose internal representation by storing an externally mutable object into ChromeBrowserProvider$BookmarkNode.mThumbnail  At ChromeBrowserProvider.java
+M V EI2: org.chromium.content.browser.LoadUrlParams.setPostData(byte[]) may expose internal representation by storing an externally mutable object into LoadUrlParams.mPostData  At LoadUrlParams.java
+M V EI: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.favicon() may expose internal representation by returning ChromeBrowserProvider$BookmarkNode.mFavicon  At ChromeBrowserProvider.java
+M V EI: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.thumbnail() may expose internal representation by returning ChromeBrowserProvider$BookmarkNode.mThumbnail  At ChromeBrowserProvider.java
+M V MS: org.chromium.android_webview.AwResource.RAW_LOAD_ERROR isn't final and can't be protected from malicious code   In AwResource.java
+M V MS: org.chromium.android_webview.AwResource.RAW_NO_DOMAIN isn't final and can't be protected from malicious code   In AwResource.java
+M V MS: org.chromium.android_webview.AwResource.STRING_DEFAULT_TEXT_ENCODING isn't final and can't be protected from malicious code   In AwResource.java
+M V MS: org.chromium.content.browser.LoadUrlParams.LOAD_TYPE_BROWSER_INITIATED_HTTP_POST should be package protected  In LoadUrlParams.java
+M V MS: org.chromium.content.browser.LoadUrlParams.LOAD_TYPE_DATA isn't final and can't be protected from malicious code   In LoadUrlParams.java
+M V MS: org.chromium.content.browser.LoadUrlParams.LOAD_TYPE_DEFAULT should be package protected  In LoadUrlParams.java
+M V MS: org.chromium.content.browser.LoadUrlParams.UA_OVERRIDE_INHERIT should be package protected  In LoadUrlParams.java
+M V MS: org.chromium.content.browser.LoadUrlParams.UA_OVERRIDE_TRUE should be package protected  In LoadUrlParams.java
+M C RCN: Nullcheck of GestureDetector.mVelocityTracker at line 630 of value previously dereferenced in org.chromium.content.browser.third_party.GestureDetector.onTouchEvent(MotionEvent)  At GestureDetector.java
+M D SF: Switch statement found in org.chromium.content.browser.third_party.GestureDetector.onTouchEvent(MotionEvent) where default case is missing  At GestureDetector.java
+M D ST: Write to static field org.chromium.content.browser.ContentSettings.sAppCachePathIsSet from instance method org.chromium.content.browser.ContentSettings.setAppCachePath(String)  At ContentSettings.java
diff --git a/src/build/android/gdb_apk b/src/build/android/gdb_apk
new file mode 100755
index 0000000..7e657d6
--- /dev/null
+++ b/src/build/android/gdb_apk
@@ -0,0 +1,171 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach gdb to a running android application.  Similar to ndk-gdb.
+# Run with --annotate=3 if running under emacs (M-x gdb).
+#
+# By default it is used to debug content shell, if it is used to
+# debug other piceces, '-p' and '-l' options are needed.
+# For *unittests_apk (like base_unittests_apk), run with:
+#  "gdb_apk -p org.chromium.native_test -l out/Release/lib.target -r"
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# args: command to run
+# Prints the command's stdout on stdout
+# Returns the command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must be '%%<code>'
+  RET=$(echo "$LASTLINE" | awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result in an empty line
+  LASTLINE=$(echo "$LASTLINE" | awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code
+  sed -e '$d' $TMPOUT && echo -n "$LASTLINE"
+  # Remove temp file
+  rm -f $TMPOUT
+  # Exit with the appropriate status
+  return $RET
+}
+
+adb=$(which adb)
+if [[ "$adb" = "" ]] ; then
+  echo "Need adb in your path"
+  exit 1
+fi
+
+usage() {
+  echo "usage: ${0##*/} [-p package_name] [-l shared_lib_dir] [-g gdb] [-r]"
+  echo "-p package_name     the android APK package to be debugged"
+  echo "-l shared_lib_dir   directory containes native shared library"
+  echo "-g gdb_args         agruments for gdb, eg: -g '-n -write'"
+  echo "-r                  the target device is rooted"
+}
+
+process_options() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":p:l:g:r" OPTNAME; do
+    case "$OPTNAME" in
+      p)
+        package_name="$OPTARG"
+        ;;
+      l)
+        shared_lib_dir="$OPTARG"
+        ;;
+      g)
+        gdb_args="$OPTARG"
+		;;
+      r)
+        rooted_phone=1
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+rooted_phone=0
+
+root=$(dirname $0)/../..
+package_name=org.chromium.content_shell
+shared_lib_dir=$root/out/${BUILDTYPE:-Debug}/lib.target
+gdb_args=''
+
+#process options
+process_options "$@"
+echo "Debug package $package_name"
+echo "Assume native shared library is under $shared_lib_dir"
+
+data_dir=/data/data/$package_name
+gdb_server_on_device=$data_dir/lib/gdbserver
+
+# Kill any running gdbserver
+pid=$(adb shell ps | awk '/gdbserver/ {print $2}')
+if [[ "$pid" != "" ]] ; then
+  if [[ $rooted_phone -eq 1 ]] ; then
+    adb shell kill $pid
+  else
+    adb shell run-as $package_name kill $pid
+  fi
+fi
+
+pid=$(adb_shell ps | awk "/$package_name$/ {print \$2}")
+if [[ "$pid" = "" ]] ; then
+  echo "No $package_name running?"
+  echo "Try this: adb shell am start -a android.intent.action.VIEW " \
+    "-n $package_name/.SomethingActivity (Something might be ContentShell)"
+  exit 2
+fi
+
+no_gdb_server=$(adb shell ls $gdb_server_on_device | grep 'No such file')
+if [[ "$no_gdb_server" != "" ]] ; then
+  echo "No gdb server on device at $gdb_server_on_device"
+  echo "Please install a debug build."
+  exit 3
+fi
+
+if [[ $rooted_phone -eq 1 ]] ; then
+  adb shell $gdb_server_on_device :4321 --attach $pid &
+  adb forward tcp:4321 tcp:4321
+else
+  adb shell run-as $package_name lib/gdbserver +debug-socket --attach $pid &
+  adb forward tcp:4321 localfilesystem:$data_dir/debug-socket
+fi
+sleep 2
+
+# Pull app_process and C libraries from device if needed
+app_process=${shared_lib_dir}/app_process
+if [[ ! -f ${app_process} ]] ; then
+  adb pull /system/bin/app_process ${app_process}
+  adb pull /system/lib/libc.so ${shared_lib_dir}
+fi
+
+# gdb commands
+cmdfile=$(mktemp /tmp/gdb_android_XXXXXXXX)
+cat >$cmdfile<<EOF
+# set solib-absolute-prefix null
+set solib-search-path ${shared_lib_dir}
+file ${app_process}
+target remote :4321
+EOF
+
+gdb=$(echo $ANDROID_TOOLCHAIN/../../linux-x86/bin/*gdb)
+if [[ ! -f ${gdb} ]] ; then
+  echo "Wow no gdb in env var ANDROID_TOOLCHAIN which is $ANDROID_TOOLCHAIN"
+  exit 4
+else
+  echo Using $gdb
+fi
+
+# ${gdb} -x $cmdfile $* $app_process
+${gdb} -x $cmdfile $gdb_args
+rm $cmdfile
diff --git a/src/build/android/gdb_content_shell b/src/build/android/gdb_content_shell
new file mode 100755
index 0000000..c8cb88f
--- /dev/null
+++ b/src/build/android/gdb_content_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach gdb to a running content shell. Redirect to the shell gdb_apk
+
+ROOT=$(cd "$(dirname $0)"; pwd)
+echo "WARNING: This script is deprecated, consider using \
+adb_gdb_content_shell instead!"
+if [ $# -gt 0 ]; then
+  exec ${ROOT}/gdb_apk -r -g "$*"
+else
+  exec ${ROOT}/gdb_apk -r -p org.chromium.content_shell
+fi
diff --git a/src/build/android/gtest_filter/base_unittests_disabled b/src/build/android/gtest_filter/base_unittests_disabled
new file mode 100644
index 0000000..37bf65b
--- /dev/null
+++ b/src/build/android/gtest_filter/base_unittests_disabled
@@ -0,0 +1,27 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+ProcessUtilTest.GetAppOutputRestrictedSIGPIPE
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# TODO(zhenghao): Fail from build 6102 r123270. http://crbug.com/115612
+StackContainer.BufferAlignment
+# Failed on bot since it was first introduced.
+FileUtilProxyTest.Touch
+# Flaky?
+ScopedJavaRefTest.RefCounts
+# Death tests are not supported with apks.
+*DeathTest*
+
diff --git a/src/build/android/gtest_filter/base_unittests_emulator_additional_disabled b/src/build/android/gtest_filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..85e8fd6
--- /dev/null
+++ b/src/build/android/gtest_filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/src/build/android/gtest_filter/breakpad_unittests_disabled b/src/build/android/gtest_filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..32b07c9
--- /dev/null
+++ b/src/build/android/gtest_filter/breakpad_unittests_disabled
@@ -0,0 +1,5 @@
+FileIDStripTest.StripSelf
+# crbug.com/165916
+ExceptionHandlerTest.InstructionPointerMemory
+ExceptionHandlerTest.InstructionPointerMemoryMinBound
+ExceptionHandlerTest.InstructionPointerMemoryMaxBound
diff --git a/src/build/android/gtest_filter/content_unittests_disabled b/src/build/android/gtest_filter/content_unittests_disabled
new file mode 100644
index 0000000..61f0627
--- /dev/null
+++ b/src/build/android/gtest_filter/content_unittests_disabled
@@ -0,0 +1,21 @@
+# List of suppressions
+
+AudioRendererHostTest.CreateAndClose
+AudioRendererHostTest.CreateAndShutdown
+AudioRendererHostTest.CreatePlayAndClose
+AudioRendererHostTest.CreatePlayPauseAndClose
+AudioRendererHostTest.SetVolume
+AudioRendererHostTest.CreatePlayAndShutdown
+AudioRendererHostTest.CreatePlayPauseAndShutdown
+AudioRendererHostTest.SimulateError
+AudioRendererHostTest.SimulateErrorAndClose
+# crbug.com/104950
+DeviceOrientationProviderTest.ObserverNotRemoved
+DeviceOrientationProviderTest.StartFailing
+# crbug.com/139095
+RenderWidgetTest.OnMsgPaintAtSize
+# crbug.com/147549
+GamepadProviderTest.PollingAccess
+PepperGamepadHostTest.WaitForReply
+# crbug.com/159234
+WebContentsVideoCaptureDeviceTest.*
diff --git a/src/build/android/gtest_filter/ipc_tests_disabled b/src/build/android/gtest_filter/ipc_tests_disabled
new file mode 100644
index 0000000..e6d5f2d
--- /dev/null
+++ b/src/build/android/gtest_filter/ipc_tests_disabled
@@ -0,0 +1,15 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
+
+# MultiProcessTest related failures. These tests fail if DCHECK is enabled.
+IPCChannelPosixTest.AdvancedConnected
+IPCChannelPosixTest.ResetState
+IPCChannelPosixTest.MultiConnection
+IPCFuzzingTest.SanityTest
+IPCFuzzingTest.MsgBadPayloadArgs
+IPCFuzzingTest.MsgBadPayloadShort
+IPCChannelTest.DescriptorTest
+IPCChannelTest.ChannelTest
+IPCChannelTest.ChannelProxyTest
+IPCChannelTest.SendMessageInChannelConnected
+SyncSocketTest.SanityTest
diff --git a/src/build/android/gtest_filter/media_unittests_disabled b/src/build/android/gtest_filter/media_unittests_disabled
new file mode 100644
index 0000000..2690683
--- /dev/null
+++ b/src/build/android/gtest_filter/media_unittests_disabled
@@ -0,0 +1,11 @@
+# List of suppressions
+
+# Death tests are not supported on APK
+# http://crbug.com/138855
+CompositeFilterDeathTest.*
+
+# http://crbug.com/138833
+AesDecryptorTest.*
+
+# crbug.com/138930
+SkCanvasVideoRendererTest.*
diff --git a/src/build/android/gtest_filter/net_unittests_disabled b/src/build/android/gtest_filter/net_unittests_disabled
new file mode 100644
index 0000000..7828aec
--- /dev/null
+++ b/src/build/android/gtest_filter/net_unittests_disabled
@@ -0,0 +1,51 @@
+# List of suppressions.
+CertVerifyProcTest.ExtraneousMD5RootCert
+CertVerifyProcTest.IntermediateCARequireExplicitPolicy
+CertVerifyProcTest.PublicKeyHashes
+CertVerifyProcTest.RejectWeakKeys
+CertVerifyProcTest.VerifyReturnChainBasic
+CertVerifyProcTest.VerifyReturnChainFiltersUnrelatedCerts
+CertVerifyProcTest.VerifyReturnChainProperlyOrdered
+HTTPSCRLSetTest.ExpiredCRLSet
+HTTPSEVCRLSetTest.FreshCRLSet
+HTTPSRequestTest.ClientAuthTest
+HTTPSRequestTest.ResumeTest
+HTTPSRequestTest.SSLSessionCacheShardTest
+PythonUtils.PythonRunTime
+TransportSecurityStateTest.ValidPinsHeadersSHA1
+TransportSecurityStateTest.ValidPinsHeadersSHA256
+URLRequestTestHTTP.ProcessSTS
+URLRequestTestHTTP.ProcessSTSOnce
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/0
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/1
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/2
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/0
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/1
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/2
+# Fail only on bots.
+CertVerifyProcTest.TestKnownRoot
+CertVerifyProcTest.WithoutRevocationChecking
+HttpCache.RangeGET_Cancel
+HttpCache.RangeGET_Cancel2
+HttpCache.RangeGET_OK
+HttpCache.RangeGET_Previous200
+HttpCache.RangeGET_Revalidate2
+HttpCache.RangeGET_SyncOK
+HttpCache.TypicalGET_ConditionalRequest
+# Death tests are not supported with apks.
+*DeathTest*
+# These are death tests and thus also disabled.
+PrioritizedDispatcherTest.CancelNull
+PrioritizedDispatcherTest.CancelMissing
diff --git a/src/build/android/gtest_filter/sync_unit_tests_disabled b/src/build/android/gtest_filter/sync_unit_tests_disabled
new file mode 100644
index 0000000..cc4b72d
--- /dev/null
+++ b/src/build/android/gtest_filter/sync_unit_tests_disabled
@@ -0,0 +1,4 @@
+SyncHttpBridgeTest.*
+
+# crbug.com/144422
+OnDiskSyncableDirectory.FailInitialWrite
diff --git a/src/build/android/gtest_filter/ui_unittests_disabled b/src/build/android/gtest_filter/ui_unittests_disabled
new file mode 100644
index 0000000..6515b19
--- /dev/null
+++ b/src/build/android/gtest_filter/ui_unittests_disabled
@@ -0,0 +1,40 @@
+# List of suppressions
+# This file was automatically generated by build/android/run_tests.py
+CanvasTest.StringSizeEmptyString
+CanvasTest.StringWidth
+ClipboardTest.RTFTest
+FontListTest.FontDescString_FromFont
+FontListTest.FontDescString_FromFontVector
+FontListTest.FontDescString_FromFontWithNonNormalStyle
+FontListTest.Fonts_DeriveFontList
+FontListTest.Fonts_DeriveFontListWithSize
+FontListTest.Fonts_DescStringWithStyleInFlexibleFormat_RoundTrip
+FontListTest.Fonts_FontVector_RoundTrip
+FontListTest.Fonts_FromDescString
+FontListTest.Fonts_FromDescStringInFlexibleFormat
+FontListTest.Fonts_FromDescStringWithStyleInFlexibleFormat
+FontListTest.Fonts_FromFont
+FontListTest.Fonts_FromFontVector
+FontListTest.Fonts_FromFontWithNonNormalStyle
+FontListTest.Fonts_GetStyle
+FontTest.Ascent
+FontTest.AvgCharWidth
+FontTest.AvgWidths
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+FontTest.Widths
+ResourceBundleTest.DelegateGetFont
+TextEliderTest.ElideEmail
+TextEliderTest.ElideEmailMoreSpace
+TextEliderTest.ElideRectangleText
+TextEliderTest.ElideRectangleTextLongWords
+TextEliderTest.ElideRectangleTextPunctuation
+TextEliderTest.ElideTextLongStrings
+TextEliderTest.ElideTextSurrogatePairs
+TextEliderTest.ElideTextTruncate
+TextEliderTest.TestFileURLEliding
+TextEliderTest.TestFilenameEliding
+TextEliderTest.TestGeneralEliding
+TextEliderTest.TestMoreEliding
+TextEliderTest.TestTrailingEllipsisSlashEllipsisHack
diff --git a/src/build/android/gtest_filter/unit_tests_disabled b/src/build/android/gtest_filter/unit_tests_disabled
new file mode 100644
index 0000000..9228542
--- /dev/null
+++ b/src/build/android/gtest_filter/unit_tests_disabled
@@ -0,0 +1,121 @@
+# List of suppressions
+
+# crbug.com/139429
+BrowserMainTest.WarmConnectionFieldTrial_Invalid
+BrowserMainTest.WarmConnectionFieldTrial_Random
+BrowserMainTest.WarmConnectionFieldTrial_WarmestSocket
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+WebsiteSettingsTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# Extension support is limited on Android.
+# Some of these can be enabled if we register extension related prefs in
+# browser_prefs.cc
+ExtensionTest.*
+ExtensionAPI.*
+ExtensionFileUtil.*
+ExtensionPermissionsTest.*
+ExtensionUnpackerTest.*
+ActiveTabTest.*
+ExtensionAppsPromo.*
+ComponentLoaderTest.*
+ExtensionFromUserScript.*
+ExtensionFromWebApp.*
+ExtensionIconManagerTest.*
+ExtensionServiceTest.*
+ExtensionServiceTestSimple.*
+ExtensionSourcePriorityTest.*
+ExtensionSpecialStoragePolicyTest.*
+ExternalPolicyProviderTest.*
+MenuManagerTest.*
+PageActionControllerTest.*
+PermissionsUpdaterTest.*
+ImageLoaderTest.*
+ImageLoadingTrackerTest.*
+ScriptBadgeControllerTest.*
+ExtensionSettingsFrontendTest.*
+ExtensionSettingsSyncTest.*
+ExtensionUpdaterTest.*
+UserScriptListenerTest.*
+WebApplicationTest.GetShortcutInfoForTab
+ExtensionActionIconFactoryTest.*
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/138275
+PrerenderTest.*
+RenderWidgetTest.OnMsgPaintAtSize
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/144227
+ExtensionIconImageTest.*
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/src/build/android/gtest_filter/webkit_unit_tests_disabled b/src/build/android/gtest_filter/webkit_unit_tests_disabled
new file mode 100644
index 0000000..a0864d3
--- /dev/null
+++ b/src/build/android/gtest_filter/webkit_unit_tests_disabled
@@ -0,0 +1,8 @@
+# List of suppressions
+
+# crbug.com/159935
+ScrollingCoordinatorChromiumTest.nonFastScrollableRegion
+WebCompositorInputHandlerImplTest.gestureFlingAnimates
+WebCompositorInputHandlerImplTest.gestureFlingTransferResets
+WebPageSerializerTest.HTMLNodes
+
diff --git a/src/build/android/java_cpp_template.gypi b/src/build/android/java_cpp_template.gypi
new file mode 100644
index 0000000..3c5704e
--- /dev/null
+++ b/src/build/android/java_cpp_template.gypi
@@ -0,0 +1,70 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate Java source files from templates that are processed
+# through the host C pre-processor.
+#
+# This assumes a GNU-compatible pre-processor installed as 'cpp'.
+# Only tested on Linux.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'android_net_java_constants',
+#    'type': 'none',
+#    'sources': [
+#      'net/android/NetError.template',
+#    ],
+#    'variables': {
+#      'package_name': 'org.chromium.net',
+#      'template_deps': ['net/base/certificate_mime_type_list.h'],
+#    },
+#    'includes': [ '../build/android/java_constants.gypi' ],
+#  },
+#
+# The 'sources' entry should only list template file. The template file
+# itself should use the 'ClassName.template' format, and will generate
+# 'gen/templates/<package-name>/ClassName.java. The files which template
+# dependents on and typically included by the template should be listed
+# in template_deps variables. Any change to them will force a rebuild of
+# the template, and hence of any source that depends on it.
+#
+
+{
+  # Location where all generated Java sources will be placed.
+  'variables': {
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(package_name)'
+  },
+  # Ensure that the output directory is used in the class path
+  # when building targets that depend on this one.
+  'direct_dependent_settings': {
+    'variables': {
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+    },
+  },
+  # Define a single rule that will be apply to each .template file
+  # listed in 'sources'.
+  'rules': [
+    {
+      'rule_name': 'generate_java_constants',
+      'extension': 'template',
+      # Set template_deps as additional dependencies.
+      'inputs': ['<@(template_deps)'],
+      'outputs': [
+        '<(output_dir)/<(RULE_INPUT_ROOT).java'
+      ],
+      'action': [
+        'cpp',                 # invoke host pre-processor.
+        '-x', 'c-header',      # treat sources as C header files
+        '-P',                  # disable line markers, i.e. '#line 309'
+        '-I', '<(DEPTH)',      # Add project top-level to include path
+        '-o', '<@(_outputs)',  # Specify output file
+        '<(RULE_INPUT_PATH)',  # Specify input file
+      ],
+      'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)',
+    }
+  ],
+}
diff --git a/src/build/android/lighttpd_server.py b/src/build/android/lighttpd_server.py
new file mode 100755
index 0000000..11ae794
--- /dev/null
+++ b/src/build/android/lighttpd_server.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        ommitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temprary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  def _GetRandomPort(self):
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or not 'in use' in server_error:
+        print 'Client error:', client_error
+        print 'Server error:', server_error
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0)
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  def _KillProcessListeningOnPort(self, port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  def _GetDefaultBaseConfig(self):
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print 'Server exit code:', server.process.exitstatus
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/prepare_library_for_apk b/src/build/android/prepare_library_for_apk
new file mode 100755
index 0000000..ce414b3
--- /dev/null
+++ b/src/build/android/prepare_library_for_apk
@@ -0,0 +1,19 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a stripped copy of a library for inclusion in an apk.
+
+if [[ $# -ne 3 ]]
+then
+  echo "Usage: prepare_library_for_apk android_strip path/to/library stripped/library/output/path"
+  exit 1
+fi
+
+ANDROID_STRIP=$1
+LIBRARY=$2
+STRIPPED=$3
+
+set -ex
+$ANDROID_STRIP --strip-unneeded $LIBRARY -o $STRIPPED
diff --git a/src/build/android/process_resources.py b/src/build/android/process_resources.py
new file mode 100755
index 0000000..8a46e5c
--- /dev/null
+++ b/src/build/android/process_resources.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android library resources to generate R.java and crunched images."""
+
+import optparse
+import os
+import subprocess
+
+
+BUILD_ANDROID_DIR = os.path.dirname(__file__)
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK platform tools folder')
+  parser.add_option('--R-package', help='Java package for generated R.java')
+  parser.add_option('--R-dir', help='directory to hold generated R.java')
+  parser.add_option('--res-dir', help='directory containing resources')
+  parser.add_option('--crunched-res-dir',
+                    help='directory to hold crunched resources')
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('android_sdk', 'android_sdk_tools', 'R_package',
+                      'R_dir', 'res_dir', 'crunched_res_dir')
+  for option_name in required_options:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+  return options
+
+
+def main():
+  options = ParseArgs()
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = os.path.join(options.android_sdk_tools, 'aapt')
+  dummy_manifest = os.path.join(BUILD_ANDROID_DIR, 'AndroidManifest.xml')
+
+  # Generate R.java. This R.java contains non-final constants and is used only
+  # while compiling the library jar (e.g. chromium_content.jar). When building
+  # an apk, a new R.java file with the correct resource -> ID mappings will be
+  # generated by merging the resources from all libraries and the main apk
+  # project.
+  subprocess.check_call([aapt,
+                         'package',
+                         '-m',
+                         '--non-constant-id',
+                         '--custom-package', options.R_package,
+                         '-M', dummy_manifest,
+                         '-S', options.res_dir,
+                         '-I', android_jar,
+                         '-J', options.R_dir])
+
+  # Crunch image resources. This shrinks png files and is necessary for 9-patch
+  # images to display correctly.
+  subprocess.check_call([aapt,
+                         'crunch',
+                         '-S', options.res_dir,
+                         '-C', options.crunched_res_dir])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/pylib/__init__.py b/src/build/android/pylib/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/src/build/android/pylib/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/src/build/android/pylib/android_commands.py b/src/build/android/pylib/android_commands.py
new file mode 100644
index 0000000..66f705f
--- /dev/null
+++ b/src/build/android/pylib/android_commands.py
@@ -0,0 +1,1121 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+"""
+
+import collections
+import datetime
+import logging
+import os
+import re
+import shlex
+import subprocess
+import sys
+import tempfile
+import time
+
+import io_stats_parser
+try:
+  import pexpect
+except:
+  pexpect = None
+
+CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+sys.path.append(os.path.join(CHROME_SRC, 'third_party', 'android_testrunner'))
+import adb_interface
+
+import cmd_helper
+import errors  #  is under ../../../third_party/android_testrunner/errors.py
+
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+MEMORY_INFO_RE = re.compile('^(?P<key>\w+):\s+(?P<usage_kb>\d+) kB$')
+NVIDIA_MEMORY_INFO_RE = re.compile('^\s*(?P<user>\S+)\s*(?P<name>\S+)\s*'
+                                   '(?P<pid>\d+)\s*(?P<usage_bytes>\d+)$')
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_HOME = 3
+KEYCODE_BACK = 4
+KEYCODE_DPAD_UP = 19
+KEYCODE_DPAD_DOWN = 20
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
+MD5SUM_DEVICE_PATH = '/data/local/tmp/md5sum_bin'
+
+def GetEmulators():
+  """Returns a list of emulators.  Does not filter by status (e.g. offline).
+
+  Both devices starting with 'emulator' will be returned in below output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+    emulator-5558   device
+  """
+  re_device = re.compile('^emulator-[0-9]+', re.MULTILINE)
+  devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+  return devices
+
+
+def GetAVDs():
+  """Returns a list of AVDs."""
+  re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE)
+  avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd']))
+  return avds
+
+
+def GetAttachedDevices():
+  """Returns a list of attached, online android devices.
+
+  If a preferred device has been set with ANDROID_SERIAL, it will be first in
+  the returned list.
+
+  Example output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+  """
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+  devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+  preferred_device = os.environ.get('ANDROID_SERIAL')
+  if preferred_device in devices:
+    devices.remove(preferred_device)
+    devices.insert(0, preferred_device)
+  return devices
+
+def IsDeviceAttached(device):
+  return device in GetAttachedDevices()
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+  """Gets a list of files from `ls` command output.
+
+  Python's os.walk isn't used because it doesn't work over adb shell.
+
+  Args:
+    path: The path to list.
+    ls_output: A list of lines returned by an `ls -lR` command.
+    re_file: A compiled regular expression which parses a line into named groups
+        consisting of at minimum "filename", "date", "time", "size" and
+        optionally "timezone".
+    utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+        2-digit string giving the number of UTC offset hours, and MM is a
+        2-digit string giving the number of UTC offset minutes. If the input
+        utc_offset is None, will try to look for the value of "timezone" if it
+        is specified in re_file.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...} where:
+      name: The file name relative to |path|'s directory.
+      size: The file size in bytes (0 for directories).
+      lastmod: The file last modification date in UTC.
+  """
+  re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+  path_dir = os.path.dirname(path)
+
+  current_dir = ''
+  files = {}
+  for line in ls_output:
+    directory_match = re_directory.match(line)
+    if directory_match:
+      current_dir = directory_match.group('dir')
+      continue
+    file_match = re_file.match(line)
+    if file_match:
+      filename = os.path.join(current_dir, file_match.group('filename'))
+      if filename.startswith(path_dir):
+        filename = filename[len(path_dir)+1:]
+      lastmod = datetime.datetime.strptime(
+          file_match.group('date') + ' ' + file_match.group('time')[:5],
+          '%Y-%m-%d %H:%M')
+      if not utc_offset and 'timezone' in re_file.groupindex:
+        utc_offset = file_match.group('timezone')
+      if isinstance(utc_offset, str) and len(utc_offset) == 5:
+        utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+                                       minutes=int(utc_offset[3:5]))
+        if utc_offset[0:1] == '-':
+          utc_delta = -utc_delta
+        lastmod -= utc_delta
+      files[filename] = (int(file_match.group('size')), lastmod)
+  return files
+
+def _ComputeFileListHash(md5sum_output):
+  """Returns a list of MD5 strings from the provided md5sum output."""
+  return [line.split('  ')[0] for line in md5sum_output]
+
+def _HasAdbPushSucceeded(command_output):
+  """Returns whether adb push has succeeded from the provided output."""
+  if not command_output:
+    return False
+  # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+  # Errors look like this: "failed to copy  ... "
+  if not re.search('^[0-9]', command_output.splitlines()[-1]):
+    logging.critical('PUSH FAILED: ' + command_output)
+    return False
+  return True
+
+def GetLogTimestamp(log_line, year):
+  """Returns the timestamp of the given |log_line| in the given year."""
+  try:
+    return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]),
+                                      '%Y-%m-%d %H:%M:%S.%f')
+  except (ValueError, IndexError):
+    logging.critical('Error reading timestamp from ' + log_line)
+    return None
+
+
+class AndroidCommands(object):
+  """Helper class for communicating with Android device via adb.
+
+  Args:
+    device: If given, adb commands are only send to the device of this ID.
+        Otherwise commands are sent to all attached devices.
+  """
+
+  def __init__(self, device=None):
+    self._adb = adb_interface.AdbInterface()
+    if device:
+      self._adb.SetTargetSerial(device)
+    self._device = device
+    self._logcat = None
+    self.logcat_process = None
+    self._pushed_files = []
+    self._device_utc_offset = self.RunShellCommand('date +%z')[0]
+    self._md5sum_path = ''
+    self._external_storage = ''
+
+  def Adb(self):
+    """Returns our AdbInterface to avoid us wrapping all its methods."""
+    return self._adb
+
+  def IsRootEnabled(self):
+    """Checks if root is enabled on the device."""
+    root_test_output = self.RunShellCommand('ls /root') or ['']
+    return not 'Permission denied' in root_test_output[0]
+
+  def EnableAdbRoot(self):
+    """Enables adb root on the device.
+
+    Returns:
+      True: if output from executing adb root was as expected.
+      False: otherwise.
+    """
+    return_value = self._adb.EnableAdbRoot()
+    # EnableAdbRoot inserts a call for wait-for-device only when adb logcat
+    # output matches what is expected. Just to be safe add a call to
+    # wait-for-device.
+    self._adb.SendCommand('wait-for-device')
+    return return_value
+
+  def GetDeviceYear(self):
+    """Returns the year information of the date on device."""
+    return self.RunShellCommand('date +%Y')[0]
+
+  def GetExternalStorage(self):
+    if not self._external_storage:
+      self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0]
+      assert self._external_storage, 'Unable to find $EXTERNAL_STORAGE'
+    return self._external_storage
+
+  def WaitForDevicePm(self):
+    """Blocks until the device's package manager is available.
+
+    To workaround http://b/5201039, we restart the shell and retry if the
+    package manager isn't back after 120 seconds.
+
+    Raises:
+      errors.WaitForResponseTimedOutError after max retries reached.
+    """
+    last_err = None
+    retries = 3
+    while retries:
+      try:
+        self._adb.WaitForDevicePm()
+        return  # Success
+      except errors.WaitForResponseTimedOutError as e:
+        last_err = e
+        logging.warning('Restarting and retrying after timeout: %s', e)
+        retries -= 1
+        self.RestartShell()
+    raise last_err  # Only reached after max retries, re-raise the last error.
+
+  def RestartShell(self):
+    """Restarts the shell on the device. Does not block for it to return."""
+    self.RunShellCommand('stop')
+    self.RunShellCommand('start')
+
+  def Reboot(self, full_reboot=True):
+    """Reboots the device and waits for the package manager to return.
+
+    Args:
+      full_reboot: Whether to fully reboot the device or just restart the shell.
+    """
+    # TODO(torne): hive can't reboot the device either way without breaking the
+    # connection; work out if we can handle this better
+    if os.environ.get('USING_HIVE'):
+      logging.warning('Ignoring reboot request as we are on hive')
+      return
+    if full_reboot or not self.IsRootEnabled():
+      self._adb.SendCommand('reboot')
+      timeout = 300
+    else:
+      self.RestartShell()
+      timeout = 120
+    # To run tests we need at least the package manager and the sd card (or
+    # other external storage) to be ready.
+    self.WaitForDevicePm()
+    self.WaitForSdCardReady(timeout)
+
+  def Uninstall(self, package):
+    """Uninstalls the specified package from the device.
+
+    Args:
+      package: Name of the package to remove.
+
+    Returns:
+      A status string returned by adb uninstall
+    """
+    uninstall_command = 'uninstall %s' % package
+
+    logging.info('>>> $' + uninstall_command)
+    return self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+  def Install(self, package_file_path, reinstall=False):
+    """Installs the specified package to the device.
+
+    Args:
+      package_file_path: Path to .apk file to install.
+      reinstall: Reinstall an existing apk, keeping the data.
+
+    Returns:
+      A status string returned by adb install
+    """
+    assert os.path.isfile(package_file_path), ('<%s> is not file' %
+                                               package_file_path)
+
+    install_cmd = ['install']
+
+    if reinstall:
+      install_cmd.append('-r')
+
+    install_cmd.append(package_file_path)
+    install_cmd = ' '.join(install_cmd)
+
+    logging.info('>>> $' + install_cmd)
+    return self._adb.SendCommand(install_cmd, timeout_time=2*60, retry_count=0)
+
+  def ManagedInstall(self, apk_path, keep_data=False, package_name=None,
+                     reboots_on_failure=2):
+    """Installs specified package and reboots device on timeouts.
+
+    Args:
+      apk_path: Path to .apk file to install.
+      keep_data: Reinstalls instead of uninstalling first, preserving the
+        application data.
+      package_name: Package name (only needed if keep_data=False).
+      reboots_on_failure: number of time to reboot if package manager is frozen.
+
+    Returns:
+      A status string returned by adb install
+    """
+    reboots_left = reboots_on_failure
+    while True:
+      try:
+        if not keep_data:
+          assert package_name
+          self.Uninstall(package_name)
+        install_status = self.Install(apk_path, reinstall=keep_data)
+        if 'Success' in install_status:
+          return install_status
+      except errors.WaitForResponseTimedOutError:
+        print '@@@STEP_WARNINGS@@@'
+        logging.info('Timeout on installing %s' % apk_path)
+
+      if reboots_left <= 0:
+        raise Exception('Install failure')
+
+      # Force a hard reboot on last attempt
+      self.Reboot(full_reboot=(reboots_left == 1))
+      reboots_left -= 1
+
+  def MakeSystemFolderWritable(self):
+    """Remounts the /system folder rw."""
+    out = self._adb.SendCommand('remount')
+    if out.strip() != 'remount succeeded':
+      raise errors.MsgException('Remount failed: %s' % out)
+
+  def RestartAdbServer(self):
+    """Restart the adb server."""
+    self.KillAdbServer()
+    self.StartAdbServer()
+
+  def KillAdbServer(self):
+    """Kill adb server."""
+    adb_cmd = ['adb', 'kill-server']
+    return cmd_helper.RunCmd(adb_cmd)
+
+  def StartAdbServer(self):
+    """Start adb server."""
+    adb_cmd = ['adb', 'start-server']
+    return cmd_helper.RunCmd(adb_cmd)
+
+  def WaitForSystemBootCompleted(self, wait_time):
+    """Waits for targeted system's boot_completed flag to be set.
+
+    Args:
+      wait_time: time in seconds to wait
+
+    Raises:
+      WaitForResponseTimedOutError if wait_time elapses and flag still not
+      set.
+    """
+    logging.info('Waiting for system boot completed...')
+    self._adb.SendCommand('wait-for-device')
+    # Now the device is there, but system not boot completed.
+    # Query the sys.boot_completed flag with a basic command
+    boot_completed = False
+    attempts = 0
+    wait_period = 5
+    while not boot_completed and (attempts * wait_period) < wait_time:
+      output = self._adb.SendShellCommand('getprop sys.boot_completed',
+                                          retry_count=1)
+      output = output.strip()
+      if output == '1':
+        boot_completed = True
+      else:
+        # If 'error: xxx' returned when querying the flag, it means
+        # adb server lost the connection to the emulator, so restart the adb
+        # server.
+        if 'error:' in output:
+          self.RestartAdbServer()
+        time.sleep(wait_period)
+        attempts += 1
+    if not boot_completed:
+      raise errors.WaitForResponseTimedOutError(
+          'sys.boot_completed flag was not set after %s seconds' % wait_time)
+
+  def WaitForSdCardReady(self, timeout_time):
+    """Wait for the SD card ready before pushing data into it."""
+    logging.info('Waiting for SD card ready...')
+    sdcard_ready = False
+    attempts = 0
+    wait_period = 5
+    external_storage = self.GetExternalStorage()
+    while not sdcard_ready and attempts * wait_period < timeout_time:
+      output = self.RunShellCommand('ls ' + external_storage)
+      if output:
+        sdcard_ready = True
+      else:
+        time.sleep(wait_period)
+        attempts += 1
+    if not sdcard_ready:
+      raise errors.WaitForResponseTimedOutError(
+          'SD card not ready after %s seconds' % timeout_time)
+
+  # It is tempting to turn this function into a generator, however this is not
+  # possible without using a private (local) adb_shell instance (to ensure no
+  # other command interleaves usage of it), which would defeat the main aim of
+  # being able to reuse the adb shell instance across commands.
+  def RunShellCommand(self, command, timeout_time=20, log_result=False):
+    """Send a command to the adb shell and return the result.
+
+    Args:
+      command: String containing the shell command to send. Must not include
+               the single quotes as we use them to escape the whole command.
+      timeout_time: Number of seconds to wait for command to respond before
+        retrying, used by AdbInterface.SendShellCommand.
+      log_result: Boolean to indicate whether we should log the result of the
+                  shell command.
+
+    Returns:
+      list containing the lines of output received from running the command
+    """
+    logging.info('>>> $' + command)
+    if "'" in command: logging.warning(command + " contains ' quotes")
+    result = self._adb.SendShellCommand(
+        "'%s'" % command, timeout_time).splitlines()
+    if ['error: device not found'] == result:
+      raise errors.DeviceUnresponsiveError('device not found')
+    if log_result:
+      logging.info('\n>>> '.join(result))
+    return result
+
+  def GetShellCommandStatusAndOutput(self, command, timeout_time=20,
+                                     log_result=False):
+    """See RunShellCommand() above.
+
+    Returns:
+      The tuple (exit code, list of output lines).
+    """
+    lines = self.RunShellCommand(
+        command + '; echo %$?', timeout_time, log_result)
+    last_line = lines[-1]
+    status_pos = last_line.rfind('%')
+    assert status_pos >= 0
+    status = int(last_line[status_pos + 1:])
+    if status_pos == 0:
+      lines = lines[:-1]
+    else:
+      lines = lines[:-1] + last_line[:status_pos]
+    return (status, lines)
+
+  def KillAll(self, process):
+    """Android version of killall, connected via adb.
+
+    Args:
+      process: name of the process to kill off
+
+    Returns:
+      the number of processes killed
+    """
+    pids = self.ExtractPid(process)
+    if pids:
+      self.RunShellCommand('kill ' + ' '.join(pids))
+    return len(pids)
+
+  def KillAllBlocking(self, process, timeout_sec):
+    """Blocking version of killall, connected via adb.
+
+    This waits until no process matching the corresponding name appears in ps'
+    output anymore.
+
+    Args:
+      process: name of the process to kill off
+      timeout_sec: the timeout in seconds
+
+    Returns:
+      the number of processes killed
+    """
+    processes_killed = self.KillAll(process)
+    if processes_killed:
+      elapsed = 0
+      wait_period = 0.1
+      # Note that this doesn't take into account the time spent in ExtractPid().
+      while self.ExtractPid(process) and elapsed < timeout_sec:
+        time.sleep(wait_period)
+        elapsed += wait_period
+      if elapsed >= timeout_sec:
+        return 0
+    return processes_killed
+
+  def StartActivity(self, package, activity, wait_for_completion=False,
+                    action='android.intent.action.VIEW',
+                    category=None, data=None,
+                    extras=None, trace_file_name=None,
+                    force_stop=False):
+    """Starts |package|'s activity on the device.
+
+    Args:
+      package: Name of package to start (e.g. 'com.google.android.apps.chrome').
+      activity: Name of activity (e.g. '.Main' or
+        'com.google.android.apps.chrome.Main').
+      wait_for_completion: wait for the activity to finish launching (-W flag).
+      action: string (e.g. "android.intent.action.MAIN"). Default is VIEW.
+      category: string (e.g. "android.intent.category.HOME")
+      data: Data string to pass to activity (e.g. 'http://www.example.com/').
+      extras: Dict of extras to pass to activity. Values are significant.
+      trace_file_name: If used, turns on and saves the trace to this file name.
+      force_stop: force stop the target app before starting the activity (-S
+        flag).
+    """
+    cmd = 'am start -a %s' % action
+    if force_stop:
+      cmd += ' -S'
+    if wait_for_completion:
+      cmd += ' -W'
+    if category:
+      cmd += ' -c %s' % category
+    if package and activity:
+      cmd += ' -n %s/%s' % (package, activity)
+    if data:
+      cmd += ' -d "%s"' % data
+    if extras:
+      for key in extras:
+        value = extras[key]
+        if isinstance(value, str):
+          cmd += ' --es'
+        elif isinstance(value, bool):
+          cmd += ' --ez'
+        elif isinstance(value, int):
+          cmd += ' --ei'
+        else:
+          raise NotImplementedError(
+              'Need to teach StartActivity how to pass %s extras' % type(value))
+        cmd += ' %s %s' % (key, value)
+    if trace_file_name:
+      cmd += ' --start-profiler ' + trace_file_name
+    self.RunShellCommand(cmd)
+
+  def GoHome(self):
+    """Tell the device to return to the home screen. Blocks until completion."""
+    self.RunShellCommand('am start -W '
+        '-a android.intent.action.MAIN -c android.intent.category.HOME')
+
+  def CloseApplication(self, package):
+    """Attempt to close down the application, using increasing violence.
+
+    Args:
+      package: Name of the process to kill off, e.g.
+      com.google.android.apps.chrome
+    """
+    self.RunShellCommand('am force-stop ' + package)
+
+  def ClearApplicationState(self, package):
+    """Closes and clears all state for the given |package|."""
+    self.CloseApplication(package)
+    self.RunShellCommand('rm -r /data/data/%s/app_*' % package)
+    self.RunShellCommand('rm -r /data/data/%s/cache/*' % package)
+    self.RunShellCommand('rm -r /data/data/%s/files/*' % package)
+    self.RunShellCommand('rm -r /data/data/%s/shared_prefs/*' % package)
+
+  def SendKeyEvent(self, keycode):
+    """Sends keycode to the device.
+
+    Args:
+      keycode: Numeric keycode to send (see "enum" at top of file).
+    """
+    self.RunShellCommand('input keyevent %d' % keycode)
+
+  def PushIfNeeded(self, local_path, device_path):
+    """Pushes |local_path| to |device_path|.
+
+    Works for files and directories. This method skips copying any paths in
+    |test_data_paths| that already exist on the device with the same hash.
+
+    All pushed files can be removed by calling RemovePushedFiles().
+    """
+    assert os.path.exists(local_path), 'Local path not found %s' % local_path
+
+    if not self._md5sum_path:
+      default_build_type = os.environ.get('BUILD_TYPE', 'Debug')
+      md5sum_path = '%s/%s/md5sum_bin' % (cmd_helper.OutDirectory.get(),
+          default_build_type)
+      if not os.path.exists(md5sum_path):
+        md5sum_path = '%s/Release/md5sum_bin' % cmd_helper.OutDirectory.get()
+        if not os.path.exists(md5sum_path):
+          print >> sys.stderr, 'Please build md5sum.'
+          sys.exit(1)
+      command = 'push %s %s' % (md5sum_path, MD5SUM_DEVICE_PATH)
+      assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+      self._md5sum_path = md5sum_path
+
+    self._pushed_files.append(device_path)
+    hashes_on_device = _ComputeFileListHash(
+        self.RunShellCommand(MD5SUM_DEVICE_PATH + ' ' + device_path))
+    assert os.path.exists(local_path), 'Local path not found %s' % local_path
+    hashes_on_host = _ComputeFileListHash(
+        subprocess.Popen(
+            '%s_host %s' % (self._md5sum_path, local_path),
+            stdout=subprocess.PIPE, shell=True).stdout)
+    if hashes_on_device == hashes_on_host:
+      return
+
+    # They don't match, so remove everything first and then create it.
+    if os.path.isdir(local_path):
+      self.RunShellCommand('rm -r %s' % device_path, timeout_time=2*60)
+      self.RunShellCommand('mkdir -p %s' % device_path)
+
+    # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout of
+    # 60 seconds which isn't sufficient for a lot of users of this method.
+    push_command = 'push %s %s' % (local_path, device_path)
+    logging.info('>>> $' + push_command)
+    output = self._adb.SendCommand(push_command, timeout_time=30*60)
+    assert _HasAdbPushSucceeded(output)
+
+
+  def GetFileContents(self, filename, log_result=False):
+    """Gets contents from the file specified by |filename|."""
+    return self.RunShellCommand('if [ -f "' + filename + '" ]; then cat "' +
+                                filename + '"; fi', log_result=log_result)
+
+  def SetFileContents(self, filename, contents):
+    """Writes |contents| to the file specified by |filename|."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(contents)
+      f.flush()
+      self._adb.Push(f.name, filename)
+
+  def RemovePushedFiles(self):
+    """Removes all files pushed with PushIfNeeded() from the device."""
+    for p in self._pushed_files:
+      self.RunShellCommand('rm -r %s' % p, timeout_time=2*60)
+
+  def ListPathContents(self, path):
+    """Lists files in all subdirectories of |path|.
+
+    Args:
+      path: The path to list.
+
+    Returns:
+      A dict of {"name": (size, lastmod), ...}.
+    """
+    # Example output:
+    # /foo/bar:
+    # -rw-r----- 1 user group   102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+    re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+                         '(?P<user>[^\s]+)\s+'
+                         '(?P<group>[^\s]+)\s+'
+                         '(?P<size>[^\s]+)\s+'
+                         '(?P<date>[^\s]+)\s+'
+                         '(?P<time>[^\s]+)\s+'
+                         '(?P<filename>[^\s]+)$')
+    return _GetFilesFromRecursiveLsOutput(
+        path, self.RunShellCommand('ls -lR %s' % path), re_file,
+        self._device_utc_offset)
+
+  def SetJavaAssertsEnabled(self, enable):
+    """Sets or removes the device java assertions property.
+
+    Args:
+      enable: If True the property will be set.
+
+    Returns:
+      True if the file was modified (reboot is required for it to take effect).
+    """
+    # First ensure the desired property is persisted.
+    temp_props_file = tempfile.NamedTemporaryFile()
+    properties = ''
+    if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+      properties = file(temp_props_file.name).read()
+    re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                           r'\s*=\s*all\s*$', re.MULTILINE)
+    if enable != bool(re.search(re_search, properties)):
+      re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                              r'\s*=\s*\w+\s*$', re.MULTILINE)
+      properties = re.sub(re_replace, '', properties)
+      if enable:
+        properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+      file(temp_props_file.name, 'w').write(properties)
+      self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    was_set = 'all' in self.RunShellCommand('getprop ' + JAVA_ASSERT_PROPERTY)
+    if was_set == enable:
+      return False
+
+    self.RunShellCommand('setprop %s "%s"' % (JAVA_ASSERT_PROPERTY,
+                                              enable and 'all' or ''))
+    return True
+
+  def GetBuildId(self):
+    """Returns the build ID of the system (e.g. JRM79C)."""
+    build_id = self.RunShellCommand('getprop ro.build.id')[0]
+    assert build_id
+    return build_id
+
+  def GetBuildType(self):
+    """Returns the build type of the system (e.g. eng)."""
+    build_type = self.RunShellCommand('getprop ro.build.type')[0]
+    assert build_type
+    return build_type
+
+  def StartMonitoringLogcat(self, clear=True, timeout=10, logfile=None,
+                            filters=None):
+    """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+    Args:
+      clear: If True the existing logcat output will be cleared, to avoiding
+             matching historical output lurking in the log.
+      timeout: How long WaitForLogMatch will wait for the given match
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self.RunShellCommand('logcat -c')
+    args = []
+    if self._adb._target_arg:
+      args += shlex.split(self._adb._target_arg)
+    args += ['logcat', '-v', 'threadtime']
+    if filters:
+      args.extend(filters)
+    else:
+      args.append('*:v')
+
+    if logfile:
+      logfile = NewLineNormalizer(logfile)
+
+    # Spawn logcat and syncronize with it.
+    for _ in range(4):
+      self._logcat = pexpect.spawn('adb', args, timeout=timeout,
+                                   logfile=logfile)
+      self.RunShellCommand('log startup_sync')
+      if self._logcat.expect(['startup_sync', pexpect.EOF,
+                              pexpect.TIMEOUT]) == 0:
+        break
+      self._logcat.close(force=True)
+    else:
+      logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+      sys.exit(1)
+
+  def GetMonitoredLogCat(self):
+    """Returns an "adb logcat" command as created by pexpected.spawn."""
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    return self._logcat
+
+  def WaitForLogMatch(self, success_re, error_re, clear=False):
+    """Blocks until a matching line is logged or a timeout occurs.
+
+    Args:
+      success_re: A compiled re to search each line for.
+      error_re: A compiled re which, if found, terminates the search for
+          |success_re|. If None is given, no error condition will be detected.
+      clear: If True the existing logcat output will be cleared, defaults to
+          false.
+
+    Raises:
+      pexpect.TIMEOUT upon the timeout specified by StartMonitoringLogcat().
+
+    Returns:
+      The re match object if |success_re| is matched first or None if |error_re|
+      is matched first.
+    """
+    logging.info('<<< Waiting for logcat:' + str(success_re.pattern))
+    t0 = time.time()
+    while True:
+      if not self._logcat:
+        self.StartMonitoringLogcat(clear)
+      try:
+        while True:
+          # Note this will block for upto the timeout _per log line_, so we need
+          # to calculate the overall timeout remaining since t0.
+          time_remaining = t0 + self._logcat.timeout - time.time()
+          if time_remaining < 0: raise pexpect.TIMEOUT(self._logcat)
+          self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+          line = self._logcat.match.group(1)
+          if error_re:
+            error_match = error_re.search(line)
+            if error_match:
+              return None
+          success_match = success_re.search(line)
+          if success_match:
+            return success_match
+          logging.info('<<< Skipped Logcat Line:' + str(line))
+      except pexpect.TIMEOUT:
+        raise pexpect.TIMEOUT(
+            'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+            'to debug)' %
+            (self._logcat.timeout, success_re.pattern))
+      except pexpect.EOF:
+        # It seems that sometimes logcat can end unexpectedly. This seems
+        # to happen during Chrome startup after a reboot followed by a cache
+        # clean. I don't understand why this happens, but this code deals with
+        # getting EOF in logcat.
+        logging.critical('Found EOF in adb logcat. Restarting...')
+        # Rerun spawn with original arguments. Note that self._logcat.args[0] is
+        # the path of adb, so we don't want it in the arguments.
+        self._logcat = pexpect.spawn('adb',
+                                     self._logcat.args[1:],
+                                     timeout=self._logcat.timeout,
+                                     logfile=self._logcat.logfile)
+
+  def StartRecordingLogcat(self, clear=True, filters=['*:v']):
+    """Starts recording logcat output to eventually be saved as a string.
+
+    This call should come before some series of tests are run, with either
+    StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self._adb.SendCommand('logcat -c')
+    logcat_command = 'adb %s logcat -v threadtime %s' % (self._adb._target_arg,
+                                                         ' '.join(filters))
+    self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+                                           stdout=subprocess.PIPE)
+
+  def StopRecordingLogcat(self):
+    """Stops an existing logcat recording subprocess and returns output.
+
+    Returns:
+      The logcat output as a string or an empty string if logcat was not
+      being recorded at the time.
+    """
+    if not self.logcat_process:
+      return ''
+    # Cannot evaluate directly as 0 is a possible value.
+    # Better to read the self.logcat_process.stdout before killing it,
+    # Otherwise the communicate may return incomplete output due to pipe break.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    (output, _) = self.logcat_process.communicate()
+    self.logcat_process = None
+    return output
+
+  def SearchLogcatRecord(self, record, message, thread_id=None, proc_id=None,
+                         log_level=None, component=None):
+    """Searches the specified logcat output and returns results.
+
+    This method searches through the logcat output specified by record for a
+    certain message, narrowing results by matching them against any other
+    specified criteria.  It returns all matching lines as described below.
+
+    Args:
+      record: A string generated by Start/StopRecordingLogcat to search.
+      message: An output string to search for.
+      thread_id: The thread id that is the origin of the message.
+      proc_id: The process that is the origin of the message.
+      log_level: The log level of the message.
+      component: The name of the component that would create the message.
+
+    Returns:
+      A list of dictionaries represeting matching entries, each containing keys
+      thread_id, proc_id, log_level, component, and message.
+    """
+    if thread_id:
+      thread_id = str(thread_id)
+    if proc_id:
+      proc_id = str(proc_id)
+    results = []
+    reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+                     re.MULTILINE)
+    log_list = reg.findall(record)
+    for (tid, pid, log_lev, comp, msg) in log_list:
+      if ((not thread_id or thread_id == tid) and
+          (not proc_id or proc_id == pid) and
+          (not log_level or log_level == log_lev) and
+          (not component or component == comp) and msg.find(message) > -1):
+        match = dict({'thread_id': tid, 'proc_id': pid,
+                      'log_level': log_lev, 'component': comp,
+                      'message': msg})
+        results.append(match)
+    return results
+
+  def ExtractPid(self, process_name):
+    """Extracts Process Ids for a given process name from Android Shell.
+
+    Args:
+      process_name: name of the process on the device.
+
+    Returns:
+      List of all the process ids (as strings) that match the given name.
+      If the name of a process exactly matches the given name, the pid of
+      that process will be inserted to the front of the pid list.
+    """
+    pids = []
+    for line in self.RunShellCommand('ps', log_result=False):
+      data = line.split()
+      try:
+        if process_name in data[-1]:  # name is in the last column
+          if process_name == data[-1]:
+            pids.insert(0, data[1])  # PID is in the second column
+          else:
+            pids.append(data[1])
+      except IndexError:
+        pass
+    return pids
+
+  def GetIoStats(self):
+    """Gets cumulative disk IO stats since boot (for all processes).
+
+    Returns:
+      Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+      was an error.
+    """
+    for line in self.GetFileContents('/proc/diskstats', log_result=False):
+      stats = io_stats_parser.ParseIoStatsLine(line)
+      if stats.device == 'mmcblk0':
+        return {
+            'num_reads': stats.num_reads_issued,
+            'num_writes': stats.num_writes_completed,
+            'read_ms': stats.ms_spent_reading,
+            'write_ms': stats.ms_spent_writing,
+        }
+    logging.warning('Could not find disk IO stats.')
+    return None
+
+  def GetMemoryUsageForPid(self, pid):
+    """Returns the memory usage for given pid.
+
+    Args:
+      pid: The pid number of the specific process running on device.
+
+    Returns:
+      A tuple containg:
+      [0]: Dict of {metric:usage_kb}, for the process which has specified pid.
+      The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+      Shared_Dirty, Private_Clean, Private_Dirty, Referenced, Swap,
+      KernelPageSize, MMUPageSize, Nvidia (tablet only).
+      [1]: Detailed /proc/[PID]/smaps information.
+    """
+    usage_dict = collections.defaultdict(int)
+    smaps = collections.defaultdict(dict)
+    current_smap = ''
+    for line in self.GetFileContents('/proc/%s/smaps' % pid, log_result=False):
+      items = line.split()
+      # See man 5 proc for more details. The format is:
+      # address perms offset dev inode pathname
+      if len(items) > 5:
+        current_smap = ' '.join(items[5:])
+      elif len(items) > 3:
+        current_smap = ' '.join(items[3:])
+      match = re.match(MEMORY_INFO_RE, line)
+      if match:
+        key = match.group('key')
+        usage_kb = int(match.group('usage_kb'))
+        usage_dict[key] += usage_kb
+        if key not in smaps[current_smap]:
+          smaps[current_smap][key] = 0
+        smaps[current_smap][key] += usage_kb
+    if not usage_dict or not any(usage_dict.values()):
+      # Presumably the process died between ps and calling this method.
+      logging.warning('Could not find memory usage for pid ' + str(pid))
+
+    for line in self.GetFileContents('/d/nvmap/generic-0/clients',
+                                     log_result=False):
+      match = re.match(NVIDIA_MEMORY_INFO_RE, line)
+      if match and match.group('pid') == pid:
+        usage_bytes = int(match.group('usage_bytes'))
+        usage_dict['Nvidia'] = int(round(usage_bytes / 1000.0))  # kB
+        break
+
+    return (usage_dict, smaps)
+
+  def GetMemoryUsageForPackage(self, package):
+    """Returns the memory usage for all processes whose name contains |pacakge|.
+
+    Args:
+      package: A string holding process name to lookup pid list for.
+
+    Returns:
+      A tuple containg:
+      [0]: Dict of {metric:usage_kb}, summed over all pids associated with
+           |name|.
+      The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+      Shared_Dirty, Private_Clean, Private_Dirty, Referenced, Swap,
+      KernelPageSize, MMUPageSize, Nvidia (tablet only).
+      [1]: a list with detailed /proc/[PID]/smaps information.
+    """
+    usage_dict = collections.defaultdict(int)
+    pid_list = self.ExtractPid(package)
+    smaps = collections.defaultdict(dict)
+
+    for pid in pid_list:
+      usage_dict_per_pid, smaps_per_pid = self.GetMemoryUsageForPid(pid)
+      smaps[pid] = smaps_per_pid
+      for (key, value) in usage_dict_per_pid.items():
+        usage_dict[key] += value
+
+    return usage_dict, smaps
+
+  def ProcessesUsingDevicePort(self, device_port):
+    """Lists processes using the specified device port on loopback interface.
+
+    Args:
+      device_port: Port on device we want to check.
+
+    Returns:
+      A list of (pid, process_name) tuples using the specified port.
+    """
+    tcp_results = self.RunShellCommand('cat /proc/net/tcp', log_result=False)
+    tcp_address = '0100007F:%04X' % device_port
+    pids = []
+    for single_connect in tcp_results:
+      connect_results = single_connect.split()
+      # Column 1 is the TCP port, and Column 9 is the inode of the socket
+      if connect_results[1] == tcp_address:
+        socket_inode = connect_results[9]
+        socket_name = 'socket:[%s]' % socket_inode
+        lsof_results = self.RunShellCommand('lsof', log_result=False)
+        for single_process in lsof_results:
+          process_results = single_process.split()
+          # Ignore the line if it has less than nine columns in it, which may
+          # be the case when a process stops while lsof is executing.
+          if len(process_results) <= 8:
+            continue
+          # Column 0 is the executable name
+          # Column 1 is the pid
+          # Column 8 is the Inode in use
+          if process_results[8] == socket_name:
+            pids.append((int(process_results[1]), process_results[0]))
+        break
+    logging.info('PidsUsingDevicePort: %s', pids)
+    return pids
+
+  def FileExistsOnDevice(self, file_name):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      file_name: Full path of file to check.
+
+    Returns:
+      True if the file exists, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -e "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+
+      status = self._adb.SendShellCommand(
+          '\'ls "%s" >/dev/null 2>&1; echo $?\'' % (file_name))
+      return int(status) == 0
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  def TakeScreenshot(self, host_file):
+    """Saves a screenshot image to |host_file| on the host.
+
+    Args:
+      host_file: Absolute path to the image file to store on the host.
+    """
+    host_dir = os.path.dirname(host_file)
+    if not os.path.exists(host_dir):
+      os.makedirs(host_dir)
+    device_file = '%s/screenshot.png' % self.GetExternalStorage()
+    self.RunShellCommand('/system/bin/screencap -p %s' % device_file)
+    assert self._adb.Pull(device_file, host_file)
+    assert os.path.exists(host_file)
+
+
+class NewLineNormalizer(object):
+  """A file-like object to normalize EOLs to '\n'.
+
+  Pexpect runs adb within a pseudo-tty device (see
+  http://www.noah.org/wiki/pexpect), so any '\n' printed by adb is written
+  as '\r\n' to the logfile. Since adb already uses '\r\n' to terminate
+  lines, the log ends up having '\r\r\n' at the end of each line. This
+  filter replaces the above with a single '\n' in the data stream.
+  """
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    data = data.replace('\r\r\n', '\n')
+    self._output.write(data)
+
+  def flush(self):
+    self._output.flush()
diff --git a/src/build/android/pylib/apk_info.py b/src/build/android/pylib/apk_info.py
new file mode 100644
index 0000000..00b30dd
--- /dev/null
+++ b/src/build/android/pylib/apk_info.py
@@ -0,0 +1,186 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gathers information about APKs."""
+
+import collections
+import logging
+import os
+import pickle
+import re
+
+import cmd_helper
+
+# If you change the cached output of proguard, increment this number
+PICKLE_FORMAT_VERSION = 1
+
+def GetPackageNameForApk(apk_path):
+  """Returns the package name of the apk file."""
+  aapt_output = cmd_helper.GetCmdOutput(
+      ['aapt', 'dump', 'badging', apk_path]).split('\n')
+  package_name_re = re.compile(r'package: .*name=\'(\S*)\'')
+  for line in aapt_output:
+    m = package_name_re.match(line)
+    if m:
+      return m.group(1)
+  raise Exception('Failed to determine package name of %s' % apk_path)
+
+
+class ApkInfo(object):
+  """Helper class for inspecting APKs."""
+
+  def __init__(self, apk_path, jar_path):
+    self._PROGUARD_PATH = os.path.join(os.environ['ANDROID_SDK_ROOT'],
+                                       'tools/proguard/bin/proguard.sh')
+    if not os.path.exists(self._PROGUARD_PATH):
+      self._PROGUARD_PATH = os.path.join(os.environ['ANDROID_BUILD_TOP'],
+                                         'external/proguard/bin/proguard.sh')
+    self._PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+    self._PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+    self._PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+    self._PROGUARD_ANNOTATION_CONST_RE = (
+        re.compile(r'\s*?- Constant element value.*$'))
+    self._PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+    self._apk_path = apk_path
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+    self._jar_path = jar_path
+    self._annotation_map = collections.defaultdict(list)
+    self._pickled_proguard_name = self._jar_path + '-proguard.pickle'
+    self._test_methods = []
+    self._Initialize()
+
+  def _Initialize(self):
+    if not self._GetCachedProguardData():
+      self._GetProguardData()
+
+  def _GetCachedProguardData(self):
+    if (os.path.exists(self._pickled_proguard_name) and
+        (os.path.getmtime(self._pickled_proguard_name) >
+         os.path.getmtime(self._jar_path))):
+      logging.info('Loading cached proguard output from %s',
+                   self._pickled_proguard_name)
+      try:
+        with open(self._pickled_proguard_name, 'r') as r:
+          d = pickle.loads(r.read())
+        if d['VERSION'] == PICKLE_FORMAT_VERSION:
+          self._annotation_map = d['ANNOTATION_MAP']
+          self._test_methods = d['TEST_METHODS']
+          return True
+      except:
+        logging.warning('PICKLE_FORMAT_VERSION has changed, ignoring cache')
+    return False
+
+  def _GetProguardData(self):
+    proguard_output = cmd_helper.GetCmdOutput([self._PROGUARD_PATH,
+                                               '-injars', self._jar_path,
+                                               '-dontshrink',
+                                               '-dontoptimize',
+                                               '-dontobfuscate',
+                                               '-dontpreverify',
+                                               '-dump',
+                                              ]).split('\n')
+    clazz = None
+    method = None
+    annotation = None
+    has_value = False
+    qualified_method = None
+    for line in proguard_output:
+      m = self._PROGUARD_CLASS_RE.match(line)
+      if m:
+        clazz = m.group(1).replace('/', '.')  # Change package delim.
+        annotation = None
+        continue
+
+      m = self._PROGUARD_METHOD_RE.match(line)
+      if m:
+        method = m.group(1)
+        annotation = None
+        qualified_method = clazz + '#' + method
+        if method.startswith('test') and clazz.endswith('Test'):
+          self._test_methods += [qualified_method]
+        continue
+
+      if not qualified_method:
+        # Ignore non-method annotations.
+        continue
+
+      m = self._PROGUARD_ANNOTATION_RE.match(line)
+      if m:
+        annotation = m.group(1).split('/')[-1]  # Ignore the annotation package.
+        self._annotation_map[qualified_method].append(annotation)
+        has_value = False
+        continue
+      if annotation:
+        if not has_value:
+          m = self._PROGUARD_ANNOTATION_CONST_RE.match(line)
+          if m:
+            has_value = True
+        else:
+          m = self._PROGUARD_ANNOTATION_VALUE_RE.match(line)
+          if m:
+            value = m.group(1)
+            self._annotation_map[qualified_method].append(
+                annotation + ':' + value)
+            has_value = False
+
+    logging.info('Storing proguard output to %s', self._pickled_proguard_name)
+    d = {'VERSION': PICKLE_FORMAT_VERSION,
+         'ANNOTATION_MAP': self._annotation_map,
+         'TEST_METHODS': self._test_methods}
+    with open(self._pickled_proguard_name, 'w') as f:
+      f.write(pickle.dumps(d))
+
+  def _GetAnnotationMap(self):
+    return self._annotation_map
+
+  def _IsTestMethod(self, test):
+    class_name, method = test.split('#')
+    return class_name.endswith('Test') and method.startswith('test')
+
+  def GetApkPath(self):
+    return self._apk_path
+
+  def GetPackageName(self):
+    """Returns the package name of this APK."""
+    return GetPackageNameForApk(self._apk_path)
+
+  def GetTestAnnotations(self, test):
+    """Returns a list of all annotations for the given |test|. May be empty."""
+    if not self._IsTestMethod(test):
+      return []
+    return self._GetAnnotationMap()[test]
+
+  def _AnnotationsMatchFilters(self, annotation_filter_list, annotations):
+    """Checks if annotations match any of the filters."""
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if key + ':' + value in annotations:
+            return True
+      elif annotation_filter in annotations:
+        return True
+    return False
+
+  def GetAnnotatedTests(self, annotation_filter_list):
+    """Returns a list of all tests that match the given annotation filters."""
+    return [test for test, annotations in self._GetAnnotationMap().iteritems()
+            if self._IsTestMethod(test) and self._AnnotationsMatchFilters(
+                annotation_filter_list, annotations)]
+
+  def GetTestMethods(self):
+    """Returns a list of all test methods in this apk as Class#testMethod."""
+    return self._test_methods
+
+  @staticmethod
+  def IsPythonDrivenTest(test):
+    return 'pythonDrivenTests' in test
diff --git a/src/build/android/pylib/base_test_runner.py b/src/build/android/pylib/base_test_runner.py
new file mode 100644
index 0000000..5355633
--- /dev/null
+++ b/src/build/android/pylib/base_test_runner.py
@@ -0,0 +1,210 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import httplib
+import logging
+import os
+import tempfile
+import time
+
+import android_commands
+import constants
+from chrome_test_server_spawner import SpawningServer
+import constants
+from flag_changer import FlagChanger
+from forwarder import Forwarder
+import lighttpd_server
+import ports
+from valgrind_tools import CreateTool
+
+
+# A file on device to store ports of net test server. The format of the file is
+# test-spawner-server-port:test-server-port
+NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
+
+
+class BaseTestRunner(object):
+  """Base class for running tests on a single device.
+
+  A subclass should implement RunTests() with no parameter, so that calling
+  the Run() method will set up tests, run them and tear them down.
+  """
+
+  def __init__(self, device, tool, shard_index, build_type):
+    """
+      Args:
+        device: Tests will run on the device of this ID.
+        shard_index: Index number of the shard on which the test suite will run.
+        build_type: 'Release' or 'Debug'.
+    """
+    self.device = device
+    self.adb = android_commands.AndroidCommands(device=device)
+    self.tool = CreateTool(tool, self.adb)
+    self._http_server = None
+    self._forwarder = None
+    self._forwarder_device_port = 8000
+    self.forwarder_base_url = ('http://localhost:%d' %
+        self._forwarder_device_port)
+    self.flags = FlagChanger(self.adb)
+    self.shard_index = shard_index
+    self.flags.AddFlags(['--disable-fre'])
+    self._spawning_server = None
+    self._spawner_forwarder = None
+    # We will allocate port for test server spawner when calling method
+    # LaunchChromeTestServerSpawner and allocate port for test server when
+    # starting it in TestServerThread.
+    self.test_server_spawner_port = 0
+    self.test_server_port = 0
+    self.build_type = build_type
+
+  def _PushTestServerPortInfoToDevice(self):
+    """Pushes the latest port information to device."""
+    self.adb.SetFileContents(self.adb.GetExternalStorage() + '/' +
+                             NET_TEST_SERVER_PORT_INFO_FILE,
+                             '%d:%d' % (self.test_server_spawner_port,
+                                        self.test_server_port))
+
+  def Run(self):
+    """Calls subclass functions to set up tests, run them and tear them down.
+
+    Returns:
+      Test results returned from RunTests().
+    """
+    if not self.HasTests():
+      return True
+    self.SetUp()
+    try:
+      return self.RunTests()
+    finally:
+      self.TearDown()
+
+  def SetUp(self):
+    """Called before tests run."""
+    Forwarder.KillDevice(self.adb)
+
+  def HasTests(self):
+    """Whether the test suite has tests to run."""
+    return True
+
+  def RunTests(self):
+    """Runs the tests. Need to be overridden."""
+    raise NotImplementedError
+
+  def TearDown(self):
+    """Called when tests finish running."""
+    self.ShutdownHelperToolsForTestSuite()
+
+  def CopyTestData(self, test_data_paths, dest_dir):
+    """Copies |test_data_paths| list of files/directories to |dest_dir|.
+
+    Args:
+      test_data_paths: A list of files or directories relative to |dest_dir|
+          which should be copied to the device. The paths must exist in
+          |CHROME_DIR|.
+      dest_dir: Absolute path to copy to on the device.
+    """
+    for p in test_data_paths:
+      self.adb.PushIfNeeded(
+          os.path.join(constants.CHROME_DIR, p),
+          os.path.join(dest_dir, p))
+
+  def LaunchTestHttpServer(self, document_root, port=None,
+                           extra_config_contents=None):
+    """Launches an HTTP server to serve HTTP tests.
+
+    Args:
+      document_root: Document root of the HTTP server.
+      port: port on which we want to the http server bind.
+      extra_config_contents: Extra config contents for the HTTP server.
+    """
+    self._http_server = lighttpd_server.LighttpdServer(
+        document_root, port=port, extra_config_contents=extra_config_contents)
+    if self._http_server.StartupHttpServer():
+      logging.info('http server started: http://localhost:%s',
+                   self._http_server.port)
+    else:
+      logging.critical('Failed to start http server')
+    self.StartForwarderForHttpServer()
+    return (self._forwarder_device_port, self._http_server.port)
+
+  def _CreateAndRunForwarder(
+      self, adb, port_pairs, tool, host_name, build_type):
+    """Creates and run a forwarder."""
+    forwarder = Forwarder(adb, build_type)
+    forwarder.Run(port_pairs, tool, host_name)
+    return forwarder
+
+  def StartForwarder(self, port_pairs):
+    """Starts TCP traffic forwarding for the given |port_pairs|.
+
+    Args:
+      host_port_pairs: A list of (device_port, local_port) tuples to forward.
+    """
+    if self._forwarder:
+      self._forwarder.Close()
+    self._forwarder = self._CreateAndRunForwarder(
+        self.adb, port_pairs, self.tool, '127.0.0.1', self.build_type)
+
+  def StartForwarderForHttpServer(self):
+    """Starts a forwarder for the HTTP server.
+
+    The forwarder forwards HTTP requests and responses between host and device.
+    """
+    self.StartForwarder([(self._forwarder_device_port, self._http_server.port)])
+
+  def RestartHttpServerForwarderIfNecessary(self):
+    """Restarts the forwarder if it's not open."""
+    # Checks to see if the http server port is being used.  If not forwards the
+    # request.
+    # TODO(dtrainor): This is not always reliable because sometimes the port
+    # will be left open even after the forwarder has been killed.
+    if not ports.IsDevicePortUsed(self.adb,
+        self._forwarder_device_port):
+      self.StartForwarderForHttpServer()
+
+  def ShutdownHelperToolsForTestSuite(self):
+    """Shuts down the server and the forwarder."""
+    # Forwarders should be killed before the actual servers they're forwarding
+    # to as they are clients potentially with open connections and to allow for
+    # proper hand-shake/shutdown.
+    Forwarder.KillDevice(self.adb)
+    if self._http_server:
+      self._http_server.ShutdownHttpServer()
+    if self._spawning_server:
+      self._spawning_server.Stop()
+    self.flags.Restore()
+
+  def LaunchChromeTestServerSpawner(self):
+    """Launches test server spawner."""
+    server_ready = False
+    error_msgs = []
+    # Try 3 times to launch test spawner server.
+    for i in xrange(0, 3):
+      # Do not allocate port for test server here. We will allocate
+      # different port for individual test in TestServerThread.
+      self.test_server_spawner_port = ports.AllocateTestServerPort()
+      self._spawning_server = SpawningServer(self.test_server_spawner_port,
+                                             self.adb,
+                                             self.tool,
+                                             self.build_type)
+      self._spawning_server.Start()
+      server_ready, error_msg = ports.IsHttpServerConnectable(
+          '127.0.0.1', self.test_server_spawner_port, path='/ping',
+          expected_read='ready')
+      if server_ready:
+        break
+      else:
+        error_msgs.append(error_msg)
+      self._spawning_server.Stop()
+      # Wait for 2 seconds then restart.
+      time.sleep(2)
+    if not server_ready:
+      logging.error(';'.join(error_msgs))
+      raise Exception('Can not start the test spawner server.')
+    self._PushTestServerPortInfoToDevice()
+    self._spawner_forwarder = self._CreateAndRunForwarder(
+        self.adb,
+        [(self.test_server_spawner_port, self.test_server_spawner_port)],
+        self.tool, '127.0.0.1', self.build_type)
diff --git a/src/build/android/pylib/base_test_sharder.py b/src/build/android/pylib/base_test_sharder.py
new file mode 100644
index 0000000..1c4559a
--- /dev/null
+++ b/src/build/android/pylib/base_test_sharder.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import android_commands
+import logging
+import multiprocessing
+
+from android_commands import errors
+from forwarder import Forwarder
+from test_result import TestResults
+
+
+def _ShardedTestRunnable(test):
+  """Standalone function needed by multiprocessing.Pool."""
+  log_format = '[' + test.device + '] # %(asctime)-15s: %(message)s'
+  if logging.getLogger().handlers:
+    logging.getLogger().handlers[0].setFormatter(logging.Formatter(log_format))
+  else:
+    logging.basicConfig(format=log_format)
+  # Handle SystemExit here since python has a bug to exit current process
+  try:
+    return test.Run()
+  except SystemExit:
+    return TestResults()
+
+
+def SetTestsContainer(tests_container):
+  """Sets tests container.
+
+  multiprocessing.Queue can't be pickled across processes, so we need to set
+  this as a 'global', per process, via multiprocessing.Pool.
+  """
+  BaseTestSharder.tests_container = tests_container
+
+
+class BaseTestSharder(object):
+  """Base class for sharding tests across multiple devices.
+
+  Args:
+    attached_devices: A list of attached devices.
+  """
+  # See more in SetTestsContainer.
+  tests_container = None
+
+  def __init__(self, attached_devices, build_type='Debug'):
+    self.attached_devices = attached_devices
+    # Worst case scenario: a device will drop offline per run, so we need
+    # to retry until we're out of devices.
+    self.retries = len(self.attached_devices)
+    self.tests = []
+    self.build_type = build_type
+
+  def CreateShardedTestRunner(self, device, index):
+    """Factory function to create a suite-specific test runner.
+
+    Args:
+      device: Device serial where this shard will run
+      index: Index of this device in the pool.
+
+    Returns:
+      An object of BaseTestRunner type (that can provide a "Run()" method).
+    """
+    pass
+
+  def SetupSharding(self, tests):
+    """Called before starting the shards."""
+    pass
+
+  def OnTestsCompleted(self, test_runners, test_results):
+    """Notifies that we completed the tests."""
+    pass
+
+  def _KillHostForwarder(self):
+    Forwarder.KillHost(self.build_type)
+
+  def RunShardedTests(self):
+    """Runs the tests in all connected devices.
+
+    Returns:
+      A TestResults object.
+    """
+    logging.warning('*' * 80)
+    logging.warning('Sharding in ' + str(len(self.attached_devices)) +
+                    ' devices.')
+    logging.warning('Note that the output is not synchronized.')
+    logging.warning('Look for the "Final result" banner in the end.')
+    logging.warning('*' * 80)
+    final_results = TestResults()
+    self._KillHostForwarder()
+    for retry in xrange(self.retries):
+      logging.warning('Try %d of %d', retry + 1, self.retries)
+      self.SetupSharding(self.tests)
+      test_runners = []
+
+      # Try to create N shards, and retrying on failure.
+      try:
+        for index, device in enumerate(self.attached_devices):
+          logging.warning('*' * 80)
+          logging.warning('Creating shard %d for %s', index, device)
+          logging.warning('*' * 80)
+          test_runner = self.CreateShardedTestRunner(device, index)
+          test_runners += [test_runner]
+      except errors.DeviceUnresponsiveError as e:
+        logging.critical('****Failed to create a shard: [%s]', e)
+        self.attached_devices.remove(device)
+        continue
+
+      logging.warning('Starting...')
+      pool = multiprocessing.Pool(len(self.attached_devices),
+                                  SetTestsContainer,
+                                  [BaseTestSharder.tests_container])
+      # map can't handle KeyboardInterrupt exception. It's a python bug.
+      # So use map_async instead.
+      async_results = pool.map_async(_ShardedTestRunnable, test_runners)
+      try:
+        results_lists = async_results.get(999999)
+      except errors.DeviceUnresponsiveError as e:
+        logging.critical('****Failed to run test: [%s]', e)
+        self.attached_devices = android_commands.GetAttachedDevices()
+        continue
+      test_results = TestResults.FromTestResults(results_lists)
+      # Re-check the attached devices for some devices may
+      # become offline
+      retry_devices = set(android_commands.GetAttachedDevices())
+      # Remove devices that had exceptions.
+      retry_devices -= TestResults.DeviceExceptions(results_lists)
+      # Retry on devices that didn't have any exception.
+      self.attached_devices = list(retry_devices)
+      if (retry == self.retries - 1 or
+          len(self.attached_devices) == 0):
+        all_passed = final_results.ok + test_results.ok
+        final_results = test_results
+        final_results.ok = all_passed
+        break
+      else:
+        final_results.ok += test_results.ok
+        self.tests = []
+        for t in test_results.GetAllBroken():
+          self.tests += [t.name]
+        if not self.tests:
+          break
+    else:
+      # We ran out retries, possibly out of healthy devices.
+      # There's no recovery at this point.
+      raise Exception('Unrecoverable error while retrying test runs.')
+    self.OnTestsCompleted(test_runners, final_results)
+    self._KillHostForwarder()
+    return final_results
diff --git a/src/build/android/pylib/buildbot_report.py b/src/build/android/pylib/buildbot_report.py
new file mode 100644
index 0000000..8e7db8d
--- /dev/null
+++ b/src/build/android/pylib/buildbot_report.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to print buildbot messages."""
+
+def PrintLink(label, url):
+  """Adds a link with name |label| linking to |url| to current buildbot step.
+
+  Args:
+    label: A string with the name of the label.
+    url: A string of the URL.
+  """
+  print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def PrintMsg(msg):
+  """Appends |msg| to the current buildbot step text.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_TEXT@%s@@@' % msg
+
+
+def PrintSummaryText(msg):
+  """Appends |msg| to main build summary. Visible from waterfall.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
+
+
+def PrintError():
+  """Marks the current step as failed."""
+  print '@@@STEP_FAILURE@@@'
+
+
+def PrintWarning():
+  """Marks the current step with a warning."""
+  print '@@@STEP_WARNINGS@@@'
+
+
+def PrintNamedStep(step):
+  print '@@@BUILD_STEP %s@@@' % step
+
+
+def PrintStepResultIfNeeded(options, result):
+  if result:
+    if options.buildbot_step_failure:
+      PrintError()
+    else:
+      PrintWarning()
diff --git a/src/build/android/pylib/chrome_test_server_spawner.py b/src/build/android/pylib/chrome_test_server_spawner.py
new file mode 100644
index 0000000..8206ca0
--- /dev/null
+++ b/src/build/android/pylib/chrome_test_server_spawner.py
@@ -0,0 +1,402 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import threading
+import time
+import urlparse
+
+import constants
+from forwarder import Forwarder
+import ports
+
+
+# Path that are needed to import necessary modules when running testserver.py.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + ':%s:%s:%s:%s' % (
+    os.path.join(constants.CHROME_DIR, 'third_party'),
+    os.path.join(constants.CHROME_DIR, 'third_party', 'tlslite'),
+    os.path.join(constants.CHROME_DIR, 'third_party', 'pyftpdlib', 'src'),
+    os.path.join(constants.CHROME_DIR, 'net', 'tools', 'testserver'))
+
+
+SERVER_TYPES = {
+    'http': '',
+    'ftp': '-f',
+    'sync': '--sync',
+    'tcpecho': '--tcp-echo',
+    'udpecho': '--udp-echo',
+}
+
+
+# The timeout (in seconds) of starting up the Python test server.
+TEST_SERVER_STARTUP_TIMEOUT = 10
+
+
+def _CheckPortStatus(port, expected_status):
+  """Returns True if port has expected_status.
+
+  Args:
+    port: the port number.
+    expected_status: boolean of expected status.
+
+  Returns:
+    Returns True if the status is expected. Otherwise returns False.
+  """
+  for timeout in range(1, 5):
+    if ports.IsHostPortUsed(port) == expected_status:
+      return True
+    time.sleep(timeout)
+  return False
+
+
+def _GetServerTypeCommandLine(server_type):
+  """Returns the command-line by the given server type.
+
+  Args:
+    server_type: the server type to be used (e.g. 'http').
+
+  Returns:
+    A string containing the command-line argument.
+  """
+  if server_type not in SERVER_TYPES:
+    raise NotImplementedError('Unknown server type: %s' % server_type)
+  if server_type == 'udpecho':
+    raise Exception('Please do not run UDP echo tests because we do not have '
+                    'a UDP forwarder tool.')
+  return SERVER_TYPES[server_type]
+
+
+class TestServerThread(threading.Thread):
+  """A thread to run the test server in a separate process."""
+
+  def __init__(self, ready_event, arguments, adb, tool, build_type):
+    """Initialize TestServerThread with the following argument.
+
+    Args:
+      ready_event: event which will be set when the test server is ready.
+      arguments: dictionary of arguments to run the test server.
+      adb: instance of AndroidCommands.
+      tool: instance of runtime error detection tool.
+      build_type: 'Release' or 'Debug'.
+    """
+    threading.Thread.__init__(self)
+    self.wait_event = threading.Event()
+    self.stop_flag = False
+    self.ready_event = ready_event
+    self.ready_event.clear()
+    self.arguments = arguments
+    self.adb = adb
+    self.tool = tool
+    self.test_server_process = None
+    self.is_ready = False
+    self.host_port = self.arguments['port']
+    assert isinstance(self.host_port, int)
+    self._test_server_forwarder = None
+    # The forwarder device port now is dynamically allocated.
+    self.forwarder_device_port = 0
+    # Anonymous pipe in order to get port info from test server.
+    self.pipe_in = None
+    self.pipe_out = None
+    self.command_line = []
+    self.build_type = build_type
+
+  def _WaitToStartAndGetPortFromTestServer(self):
+    """Waits for the Python test server to start and gets the port it is using.
+
+    The port information is passed by the Python test server with a pipe given
+    by self.pipe_out. It is written as a result to |self.host_port|.
+
+    Returns:
+      Whether the port used by the test server was successfully fetched.
+    """
+    assert self.host_port == 0 and self.pipe_out and self.pipe_in
+    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+                                   TEST_SERVER_STARTUP_TIMEOUT)
+    if len(in_fds) == 0:
+      logging.error('Failed to wait to the Python test server to be started.')
+      return False
+    # First read the data length as an unsigned 4-byte value.  This
+    # is _not_ using network byte ordering since the Python test server packs
+    # size as native byte order and all Chromium platforms so far are
+    # configured to use little-endian.
+    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+    # use a unified byte order (either big-endian or little-endian).
+    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+    if data_length:
+      (data_length,) = struct.unpack('=L', data_length)
+      assert data_length
+    if not data_length:
+      logging.error('Failed to get length of server data.')
+      return False
+    port_json = os.read(self.pipe_in, data_length)
+    if not port_json:
+      logging.error('Failed to get server data.')
+      return False
+    logging.info('Got port json data: %s', port_json)
+    port_json = json.loads(port_json)
+    if port_json.has_key('port') and isinstance(port_json['port'], int):
+      self.host_port = port_json['port']
+      return _CheckPortStatus(self.host_port, True)
+    logging.error('Failed to get port information from the server data.')
+    return False
+
+  def _GenerateCommandLineArguments(self):
+    """Generates the command line to run the test server.
+
+    Note that all options are processed by following the definitions in
+    testserver.py.
+    """
+    if self.command_line:
+      return
+    # The following arguments must exist.
+    type_cmd = _GetServerTypeCommandLine(self.arguments['server-type'])
+    if type_cmd:
+      self.command_line.append(type_cmd)
+    self.command_line.append('--port=%d' % self.host_port)
+    # Use a pipe to get the port given by the instance of Python test server
+    # if the test does not specify the port.
+    if self.host_port == 0:
+      (self.pipe_in, self.pipe_out) = os.pipe()
+      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+    self.command_line.append('--host=%s' % self.arguments['host'])
+    data_dir = self.arguments['data-dir'] or 'chrome/test/data'
+    if not os.path.isabs(data_dir):
+      data_dir = os.path.join(constants.CHROME_DIR, data_dir)
+    self.command_line.append('--data-dir=%s' % data_dir)
+    # The following arguments are optional depending on the individual test.
+    if self.arguments.has_key('log-to-console'):
+      self.command_line.append('--log-to-console')
+    if self.arguments.has_key('auth-token'):
+      self.command_line.append('--auth-token=%s' % self.arguments['auth-token'])
+    if self.arguments.has_key('https'):
+      self.command_line.append('--https')
+      if self.arguments.has_key('cert-and-key-file'):
+        self.command_line.append('--cert-and-key-file=%s' % os.path.join(
+            constants.CHROME_DIR, self.arguments['cert-and-key-file']))
+      if self.arguments.has_key('ocsp'):
+        self.command_line.append('--ocsp=%s' % self.arguments['ocsp'])
+      if self.arguments.has_key('https-record-resume'):
+        self.command_line.append('--https-record-resume')
+      if self.arguments.has_key('ssl-client-auth'):
+        self.command_line.append('--ssl-client-auth')
+      if self.arguments.has_key('tls-intolerant'):
+        self.command_line.append('--tls-intolerant=%s' %
+                                 self.arguments['tls-intolerant'])
+      if self.arguments.has_key('ssl-client-ca'):
+        for ca in self.arguments['ssl-client-ca']:
+          self.command_line.append('--ssl-client-ca=%s' %
+                                   os.path.join(constants.CHROME_DIR, ca))
+      if self.arguments.has_key('ssl-bulk-cipher'):
+        for bulk_cipher in self.arguments['ssl-bulk-cipher']:
+          self.command_line.append('--ssl-bulk-cipher=%s' % bulk_cipher)
+
+  def run(self):
+    logging.info('Start running the thread!')
+    self.wait_event.clear()
+    self._GenerateCommandLineArguments()
+    command = [os.path.join(constants.CHROME_DIR, 'net', 'tools',
+                            'testserver', 'testserver.py')] + self.command_line
+    logging.info('Running: %s', command)
+    self.process = subprocess.Popen(command)
+    if self.process:
+      if self.pipe_out:
+        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+      else:
+        self.is_ready = _CheckPortStatus(self.host_port, True)
+    if self.is_ready:
+      self._test_server_forwarder = Forwarder(self.adb, self.build_type)
+      self._test_server_forwarder.Run(
+          [(0, self.host_port)], self.tool, '127.0.0.1')
+      # Check whether the forwarder is ready on the device.
+      self.is_ready = False
+      device_port = self._test_server_forwarder.DevicePortForHostPort(
+          self.host_port)
+      if device_port:
+        for timeout in range(1, 5):
+          if ports.IsDevicePortUsed(self.adb, device_port, 'LISTEN'):
+            self.is_ready = True
+            self.forwarder_device_port = device_port
+            break
+          time.sleep(timeout)
+    # Wake up the request handler thread.
+    self.ready_event.set()
+    # Keep thread running until Stop() gets called.
+    while not self.stop_flag:
+      time.sleep(1)
+    if self.process.poll() is None:
+      self.process.kill()
+    if self._test_server_forwarder:
+      self._test_server_forwarder.Close()
+    self.process = None
+    self.is_ready = False
+    if self.pipe_out:
+      os.close(self.pipe_in)
+      os.close(self.pipe_out)
+      self.pipe_in = None
+      self.pipe_out = None
+    logging.info('Test-server has died.')
+    self.wait_event.set()
+
+  def Stop(self):
+    """Blocks until the loop has finished.
+
+    Note that this must be called in another thread.
+    """
+    if not self.process:
+      return
+    self.stop_flag = True
+    self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET/POST request."""
+
+  def _SendResponse(self, response_code, response_reason, additional_headers,
+                    contents):
+    """Generates a response sent to the client from the provided parameters.
+
+    Args:
+      response_code: number of the response status.
+      response_reason: string of reason description of the response.
+      additional_headers: dict of additional headers. Each key is the name of
+                          the header, each value is the content of the header.
+      contents: string of the contents we want to send to client.
+    """
+    self.send_response(response_code, response_reason)
+    self.send_header('Content-Type', 'text/html')
+    # Specify the content-length as without it the http(s) response will not
+    # be completed properly (and the browser keeps expecting data).
+    self.send_header('Content-Length', len(contents))
+    for header_name in additional_headers:
+      self.send_header(header_name, additional_headers[header_name])
+    self.end_headers()
+    self.wfile.write(contents)
+    self.wfile.flush()
+
+  def _StartTestServer(self):
+    """Starts the test server thread."""
+    logging.info('Handling request to spawn a test server.')
+    content_type = self.headers.getheader('content-type')
+    if content_type != 'application/json':
+      raise Exception('Bad content-type for start request.')
+    content_length = self.headers.getheader('content-length')
+    if not content_length:
+      content_length = 0
+    try:
+      content_length = int(content_length)
+    except:
+      raise Exception('Bad content-length for start request.')
+    logging.info(content_length)
+    test_server_argument_json = self.rfile.read(content_length)
+    logging.info(test_server_argument_json)
+    assert not self.server.test_server_instance
+    ready_event = threading.Event()
+    self.server.test_server_instance = TestServerThread(
+        ready_event,
+        json.loads(test_server_argument_json),
+        self.server.adb,
+        self.server.tool,
+        self.server.build_type)
+    self.server.test_server_instance.setDaemon(True)
+    self.server.test_server_instance.start()
+    ready_event.wait()
+    if self.server.test_server_instance.is_ready:
+      self._SendResponse(200, 'OK', {}, json.dumps(
+          {'port': self.server.test_server_instance.forwarder_device_port,
+           'message': 'started'}))
+      logging.info('Test server is running on port: %d.',
+                   self.server.test_server_instance.host_port)
+    else:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during starting a test server.')
+
+  def _KillTestServer(self):
+    """Stops the test server instance."""
+    # There should only ever be one test server at a time. This may do the
+    # wrong thing if we try and start multiple test servers.
+    if not self.server.test_server_instance:
+      return
+    port = self.server.test_server_instance.host_port
+    logging.info('Handling request to kill a test server on port: %d.', port)
+    self.server.test_server_instance.Stop()
+    # Make sure the status of test server is correct before sending response.
+    if _CheckPortStatus(port, False):
+      self._SendResponse(200, 'OK', {}, 'killed')
+      logging.info('Test server on port %d is killed', port)
+    else:
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during killing a test server.')
+    self.server.test_server_instance = None
+
+  def do_POST(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    logging.info('Action for POST method is: %s.', action)
+    if action == '/start':
+      self._StartTestServer()
+    else:
+      self._SendResponse(400, 'Unknown request.', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+  def do_GET(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+    logging.info('Action for GET method is: %s.', action)
+    for param in params:
+      logging.info('%s=%s', param, params[param][0])
+    if action == '/kill':
+      self._KillTestServer()
+    elif action == '/ping':
+      # The ping handler is used to check whether the spawner server is ready
+      # to serve the requests. We don't need to test the status of the test
+      # server when handling ping request.
+      self._SendResponse(200, 'OK', {}, 'ready')
+      logging.info('Handled ping request and sent response.')
+    else:
+      self._SendResponse(400, 'Unknown request', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server."""
+
+  def __init__(self, test_server_spawner_port, adb, tool, build_type):
+    logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
+    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+                                            SpawningServerRequestHandler)
+    self.port = test_server_spawner_port
+    self.server.adb = adb
+    self.server.tool = tool
+    self.server.test_server_instance = None
+    self.server.build_type = build_type
+
+  def _Listen(self):
+    logging.info('Starting test server spawner')
+    self.server.serve_forever()
+
+  def Start(self):
+    listener_thread = threading.Thread(target=self._Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+    time.sleep(1)
+
+  def Stop(self):
+    if self.server.test_server_instance:
+      self.server.test_server_instance.Stop()
+    self.server.shutdown()
diff --git a/src/build/android/pylib/cmd_helper.py b/src/build/android/pylib/cmd_helper.py
new file mode 100644
index 0000000..b6bbac7
--- /dev/null
+++ b/src/build/android/pylib/cmd_helper.py
@@ -0,0 +1,77 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A wrapper for subprocess to make calling shell commands easier."""
+
+import os
+import logging
+import subprocess
+
+import constants
+
+def RunCmd(args, cwd=None):
+  """Opens a subprocess to execute a program and returns its return value.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+
+  Returns:
+    Return code from the command execution.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  p = subprocess.Popen(args=args, cwd=cwd)
+  return p.wait()
+
+
+def GetCmdOutput(args, cwd=None, shell=False):
+  """Open a subprocess to execute a program and returns its output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    Captures and returns the command's stdout.
+    Prints the command's stderr to logger (which defaults to stdout).
+  """
+  (_, output) = GetCmdStatusAndOutput(args, cwd, shell)
+  return output
+
+def GetCmdStatusAndOutput(args, cwd=None, shell=False):
+  """Executes a subprocess and returns its exit code and output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    The tuple (exit code, output).
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  p = subprocess.Popen(args=args, cwd=cwd, stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE, shell=shell)
+  stdout, stderr = p.communicate()
+  exit_code = p.returncode
+  if stderr:
+    logging.critical(stderr)
+  logging.info(stdout[:4096])  # Truncate output longer than 4k.
+  return (exit_code, stdout)
+
+class OutDirectory(object):
+  _out_directory = os.path.join(constants.CHROME_DIR, 'out')
+  @staticmethod
+  def set(out_directory):
+    OutDirectory._out_directory = out_directory
+  @staticmethod
+  def get():
+    return OutDirectory._out_directory
diff --git a/src/build/android/pylib/constants.py b/src/build/android/pylib/constants.py
new file mode 100644
index 0000000..69c9f30
--- /dev/null
+++ b/src/build/android/pylib/constants.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+import os
+
+
+CHROME_PACKAGE = 'com.google.android.apps.chrome'
+CHROME_ACTIVITY = 'com.google.android.apps.chrome.Main'
+CHROME_TESTS_PACKAGE = 'com.google.android.apps.chrome.tests'
+LEGACY_BROWSER_PACKAGE = 'com.google.android.browser'
+LEGACY_BROWSER_ACTIVITY = 'com.android.browser.BrowserActivity'
+CONTENT_SHELL_PACKAGE = "org.chromium.content_shell"
+CONTENT_SHELL_ACTIVITY = "org.chromium.content_shell.ContentShellActivity"
+CHROME_SHELL_PACKAGE = 'org.chromium.chrome.browser.test'
+CHROMIUM_TEST_SHELL_PACKAGE = 'org.chromium.chrome.testshell'
+
+CHROME_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                          '..', '..', '..'))
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+
+# The net test server is started from 10000. Reserve 20000 ports for the all
+# test-server based tests should be enough for allocating different port for
+# individual test-server based test.
+TEST_SERVER_PORT_FIRST = 10000
+TEST_SERVER_PORT_LAST = 30000
+# A file to record next valid port of test server.
+TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
+TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = '/data/data/' + CHROME_PACKAGE + '/files'
+
+# Directory on host where screensohts are saved.
+SCREENSHOTS_DIR = os.path.join(CHROME_DIR, 'out_screenshots')
diff --git a/src/build/android/pylib/debug_info.py b/src/build/android/pylib/debug_info.py
new file mode 100644
index 0000000..6f0f55a
--- /dev/null
+++ b/src/build/android/pylib/debug_info.py
@@ -0,0 +1,196 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collect debug info for a test."""
+
+import datetime
+import logging
+import os
+import re
+import shutil
+import string
+import subprocess
+import tempfile
+
+import cmd_helper
+
+
+TOMBSTONE_DIR = '/data/tombstones/'
+
+
+class GTestDebugInfo(object):
+  """A helper class to collect related debug information for a gtest.
+
+  Debug info is collected in two steps:
+  - first, object(s) of this class (one per device), accumulate logs
+  and screenshots in tempdir.
+  - once the test has finished, call ZipAndCleanResults to create
+  a zip containing the logs from all devices, and clean them up.
+
+  Args:
+    adb: ADB interface the tests are using.
+    device: Serial# of the Android device in which the specified gtest runs.
+    testsuite_name: Name of the specified gtest.
+    gtest_filter: Test filter used by the specified gtest.
+  """
+
+  def __init__(self, adb, device, testsuite_name, gtest_filter):
+    """Initializes the DebugInfo class for a specified gtest."""
+    self.adb = adb
+    self.device = device
+    self.testsuite_name = testsuite_name
+    self.gtest_filter = gtest_filter
+    self.logcat_process = None
+    self.has_storage = False
+    self.log_dir = os.path.join(tempfile.gettempdir(),
+                                'gtest_debug_info',
+                                self.testsuite_name,
+                                self.device)
+    if not os.path.exists(self.log_dir):
+      os.makedirs(self.log_dir)
+    self.log_file_name = os.path.join(self.log_dir,
+                                      self._GeneratePrefixName() + '_log.txt')
+    self.old_crash_files = self._ListCrashFiles()
+
+  def _GetSignatureFromGTestFilter(self):
+    """Gets a signature from gtest_filter.
+
+    Signature is used to identify the tests from which we collect debug
+    information.
+
+    Returns:
+      A signature string. Returns 'all' if there is no gtest filter.
+    """
+    if not self.gtest_filter:
+      return 'all'
+    filename_chars = "-_()%s%s" % (string.ascii_letters, string.digits)
+    signature = ''.join(c for c in self.gtest_filter if c in filename_chars)
+    if len(signature) > 64:
+      # The signature can't be too long, as it'll be part of a file name.
+      signature = signature[:64]
+    return signature
+
+  def _GeneratePrefixName(self):
+    """Generates a prefix name for debug information of the test.
+
+    The prefix name consists of the following:
+    (1) root name of test_suite_base.
+    (2) device serial number.
+    (3) prefix of filter signature generate from gtest_filter.
+    (4) date & time when calling this method.
+
+    Returns:
+      Name of the log file.
+    """
+    return (os.path.splitext(self.testsuite_name)[0] + '_' + self.device + '_' +
+            self._GetSignatureFromGTestFilter() + '_' +
+            datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S-%f'))
+
+  def StartRecordingLog(self, clear=True, filters=['*:v']):
+    """Starts recording logcat output to a file.
+
+    This call should come before running test, with calling StopRecordingLog
+    following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    self.StopRecordingLog()
+    if clear:
+      cmd_helper.RunCmd(['adb', '-s', self.device, 'logcat', '-c'])
+    logging.info('Start dumping log to %s ...', self.log_file_name)
+    command = 'adb -s %s logcat -v threadtime %s > %s' % (self.device,
+                                                          ' '.join(filters),
+                                                          self.log_file_name)
+    self.logcat_process = subprocess.Popen(command, shell=True)
+
+  def StopRecordingLog(self):
+    """Stops an existing logcat recording subprocess."""
+    if not self.logcat_process:
+      return
+    # Cannot evaluate directly as 0 is a possible value.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    self.logcat_process = None
+    logging.info('Finish log dump.')
+
+  def TakeScreenshot(self, identifier_mark):
+    """Takes a screen shot from current specified device.
+
+    Args:
+      identifier_mark: A string to identify the screen shot DebugInfo will take.
+                       It will be part of filename of the screen shot. Empty
+                       string is acceptable.
+    Returns:
+      Returns the file name on the host of the screenshot if successful,
+      None otherwise.
+    """
+    assert isinstance(identifier_mark, str)
+    screenshot_path = os.path.join(os.getenv('ANDROID_HOST_OUT', ''),
+                                   'bin',
+                                   'screenshot2')
+    if not os.path.exists(screenshot_path):
+      logging.error('Failed to take screen shot from device %s', self.device)
+      return None
+    shot_path = os.path.join(self.log_dir, ''.join([self._GeneratePrefixName(),
+                                                    identifier_mark,
+                                                    '_screenshot.png']))
+    re_success = re.compile(re.escape('Success.'), re.MULTILINE)
+    if re_success.findall(cmd_helper.GetCmdOutput([screenshot_path, '-s',
+                                                   self.device, shot_path])):
+      logging.info('Successfully took a screen shot to %s', shot_path)
+      return shot_path
+    logging.error('Failed to take screen shot from device %s', self.device)
+    return None
+
+  def _ListCrashFiles(self):
+    """Collects crash files from current specified device.
+
+    Returns:
+      A dict of crash files in format {"name": (size, lastmod), ...}.
+    """
+    return self.adb.ListPathContents(TOMBSTONE_DIR)
+
+  def ArchiveNewCrashFiles(self):
+    """Archives the crash files newly generated until calling this method."""
+    current_crash_files = self._ListCrashFiles()
+    files = []
+    for f in current_crash_files:
+      if f not in self.old_crash_files:
+        files += [f]
+      elif current_crash_files[f] != self.old_crash_files[f]:
+        # Tombstones dir can only have maximum 10 files, so we need to compare
+        # size and timestamp information of file if the file exists.
+        files += [f]
+    if files:
+      logging.info('New crash file(s):%s' % ' '.join(files))
+      for f in files:
+        self.adb.Adb().Pull(TOMBSTONE_DIR + f,
+                            os.path.join(self.log_dir, f))
+
+  @staticmethod
+  def ZipAndCleanResults(dest_dir, dump_file_name):
+    """A helper method to zip all debug information results into a dump file.
+
+    Args:
+      dest_dir: Dir path in where we put the dump file.
+      dump_file_name: Desired name of the dump file. This method makes sure
+                      '.zip' will be added as ext name.
+    """
+    if not dest_dir or not dump_file_name:
+      return
+    cmd_helper.RunCmd(['mkdir', '-p', dest_dir])
+    log_basename = os.path.basename(dump_file_name)
+    log_zip_file = os.path.join(dest_dir,
+                                os.path.splitext(log_basename)[0] + '.zip')
+    logging.info('Zipping debug dumps into %s ...', log_zip_file)
+    # Add new dumps into the zip file. The zip may exist already if previous
+    # gtest also dumps the debug information. It's OK since we clean up the old
+    # dumps in each build step.
+    log_src_dir = os.path.join(tempfile.gettempdir(), 'gtest_debug_info')
+    cmd_helper.RunCmd(['zip', '-q', '-r', log_zip_file, log_src_dir])
+    assert os.path.exists(log_zip_file)
+    assert os.path.exists(log_src_dir)
+    shutil.rmtree(log_src_dir)
diff --git a/src/build/android/pylib/device_stats_monitor.html b/src/build/android/pylib/device_stats_monitor.html
new file mode 100644
index 0000000..b3abbb0
--- /dev/null
+++ b/src/build/android/pylib/device_stats_monitor.html
@@ -0,0 +1,143 @@
+<!DOCTYPE html>
+<!--
+ * Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+ * source code is governed by a BSD-style license that can be found in the
+ * LICENSE file.
+-->
+<html>
+<head>
+  <title>Device Stats Monitor</title>
+  <script type="text/javascript" src="http://www.google.com/jsapi"></script>
+  <style>
+  body {
+    font-family: sans-serif
+  }
+  </style>
+</head>
+<body>
+<h2>Device Stats Monitor</h2>
+<ul>
+<li>Pass path to trace data via the <code>results</code> querystring param.
+<li>Combine charts with the <code>combine</code> querystring param (e.g. <code>&combine=sectors_read,sectors_written</code>).
+<li>Use <code>stacked=true</code> to stack combined charts instead of overlaying (default).
+</ul>
+</body>
+<script>
+google.load("visualization", "1", {packages:["corechart"]});
+
+/**
+ * @returns The querystring param value for |name| or an empty string.
+ */
+function getQuerystringParam(name) {
+  name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]");
+  var regexS = "[\\?&]" + name + "=([^&#]*)";
+  var regex = new RegExp(regexS);
+  var results = regex.exec(window.location.search);
+  if (results == null)
+    return "";
+  else
+    return decodeURIComponent(results[1].replace(/\+/g, " "));
+}
+
+/**
+ * @returns An array of keys in |obj| sorted by value.
+ */
+function sortedKeys(obj) {
+  var keys = [];
+  for (var key in obj) {
+    keys.push(key);
+  }
+  keys.sort();
+  return keys;
+}
+
+/**
+ * Removes by value all params from array.
+ */
+Array.prototype.remove = function() {
+  var what, a = arguments, l = a.length, ax;
+  while (l && this.length) {
+    what = a[--l];
+    while ((ax = this.indexOf(what)) != -1) {
+      this.splice(ax, 1);
+    }
+  }
+  return this;
+}
+
+/**
+ * Displays a new chart.
+ *
+ * @param {Number} hz Number of sample per second of the data.
+ * @param {String} name Name to display on top of chart.
+ * @param {Number[][]} values Array of value arrays to display.
+ * @param {Boolean} stacked Whether to display values as stacked.
+ */
+function displayChart(hz, name, values, units, stacked) {
+  var data = new google.visualization.DataTable();
+  data.addColumn('number', 'ms');
+  var names = name.split(',');
+  for (var i = 0; i < names.length; i++) {
+    data.addColumn('number', names[i]);
+  }
+
+  var rows = [];
+  var interval = 1000.0 / hz;
+  for (var i = 0; i < values[0].length; i++) {
+    var row = [i*interval];
+    for (var j = 0; j < values.length; j++) {
+      row.push(values[j][i]);
+    }
+    rows.push(row);
+  }
+  data.addRows(rows);
+
+  var options = {
+    hAxis: {title: 'ms (' + hz + 'hz)'},
+    isStacked: stacked,
+    legend: {position: 'top'},
+    vAxis: {title: units},
+  };
+
+  var elem = document.createElement('DIV');
+  elem.style = 'width:100%;height:500px';
+  document.body.appendChild(elem);
+  var chart = new google.visualization.AreaChart(elem);
+  chart.draw(data, options);
+}
+
+/**
+ * Displays all charts.
+ *
+ * Invoked by the results script. JSONP is used to avoid security
+ * restrictions on XHRs for file:// URLs.
+ */
+function display(hz, results, units) {
+  var combine = getQuerystringParam('combine');
+  var keys = sortedKeys(results);
+  for (var i = 0; i < keys.length; i++) {
+    var key = keys[i];
+    var name = key;
+    var values = [results[key]];
+    var unit = units[key];
+    if (combine.indexOf(key) >= 0) {
+      i--;
+      name = combine;
+      values = [];
+      var combined_keys = combine.split(',');
+      for (var j = 0; j < combined_keys.length; j++) {
+        values.push(results[combined_keys[j]]);
+        keys.remove(combined_keys[j]);
+      }
+    }
+    displayChart(hz, name, values, unit, !!getQuerystringParam('stacked'));
+  }
+}
+
+var resultsPath = getQuerystringParam('results');
+if (resultsPath)
+  document.write("<script src='" + resultsPath + "'></"+"script>");
+else
+  document.write("Please specify results querystring param.");
+</script>
+</html>
diff --git a/src/build/android/pylib/device_stats_monitor.py b/src/build/android/pylib/device_stats_monitor.py
new file mode 100644
index 0000000..8be4efa
--- /dev/null
+++ b/src/build/android/pylib/device_stats_monitor.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for iotop/top style profiling for android."""
+
+import collections
+import json
+import os
+import subprocess
+import sys
+import urllib
+
+import constants
+import io_stats_parser
+
+
+class DeviceStatsMonitor(object):
+  """Class for collecting device stats such as IO/CPU usage.
+
+  Args:
+      adb: Instance of AndroidComannds.
+      hz: Frequency at which to sample device stats.
+  """
+
+  DEVICE_PATH = constants.TEST_EXECUTABLE_DIR + '/device_stats_monitor'
+  PROFILE_PATH = (constants.DEVICE_PERF_OUTPUT_DIR +
+      '/device_stats_monitor.profile')
+  RESULT_VIEWER_PATH = os.path.abspath(os.path.join(
+      os.path.dirname(os.path.realpath(__file__)), 'device_stats_monitor.html'))
+
+  def __init__(self, adb, hz, build_type):
+    self._adb = adb
+    host_path = os.path.abspath(os.path.join(
+        constants.CHROME_DIR, 'out', build_type, 'device_stats_monitor'))
+    self._adb.PushIfNeeded(host_path, DeviceStatsMonitor.DEVICE_PATH)
+    self._hz = hz
+
+  def Start(self):
+    """Starts device stats monitor on the device."""
+    self._adb.SetFileContents(DeviceStatsMonitor.PROFILE_PATH, '')
+    self._process = subprocess.Popen(
+        ['adb', 'shell', '%s --hz=%d %s' % (
+            DeviceStatsMonitor.DEVICE_PATH, self._hz,
+            DeviceStatsMonitor.PROFILE_PATH)])
+
+  def StopAndCollect(self, output_path):
+    """Stops monitoring and saves results.
+
+    Args:
+      output_path: Path to save results.
+
+    Returns:
+      String of URL to load results in browser.
+    """
+    assert self._process
+    self._adb.KillAll(DeviceStatsMonitor.DEVICE_PATH)
+    self._process.wait()
+    profile = self._adb.GetFileContents(DeviceStatsMonitor.PROFILE_PATH)
+
+    results = collections.defaultdict(list)
+    last_io_stats = None
+    last_cpu_stats = None
+    for line in profile:
+      if ' mmcblk0 ' in line:
+        stats = io_stats_parser.ParseIoStatsLine(line)
+        if last_io_stats:
+          results['sectors_read'].append(stats.num_sectors_read -
+                                         last_io_stats.num_sectors_read)
+          results['sectors_written'].append(stats.num_sectors_written -
+                                            last_io_stats.num_sectors_written)
+        last_io_stats = stats
+      elif line.startswith('cpu '):
+        stats = self._ParseCpuStatsLine(line)
+        if last_cpu_stats:
+          results['user'].append(stats.user - last_cpu_stats.user)
+          results['nice'].append(stats.nice - last_cpu_stats.nice)
+          results['system'].append(stats.system - last_cpu_stats.system)
+          results['idle'].append(stats.idle - last_cpu_stats.idle)
+          results['iowait'].append(stats.iowait - last_cpu_stats.iowait)
+          results['irq'].append(stats.irq - last_cpu_stats.irq)
+          results['softirq'].append(stats.softirq- last_cpu_stats.softirq)
+        last_cpu_stats = stats
+    units = {
+      'sectors_read': 'sectors',
+      'sectors_written': 'sectors',
+      'user': 'jiffies',
+      'nice': 'jiffies',
+      'system': 'jiffies',
+      'idle': 'jiffies',
+      'iowait': 'jiffies',
+      'irq': 'jiffies',
+      'softirq': 'jiffies',
+    }
+    with open(output_path, 'w') as f:
+      f.write('display(%d, %s, %s);' % (self._hz, json.dumps(results), units))
+    return 'file://%s?results=file://%s' % (
+        DeviceStatsMonitor.RESULT_VIEWER_PATH, urllib.quote(output_path))
+
+
+  @staticmethod
+  def _ParseCpuStatsLine(line):
+    """Parses a line of cpu stats into a CpuStats named tuple."""
+    # Field definitions: http://www.linuxhowtos.org/System/procstat.htm
+    cpu_stats = collections.namedtuple('CpuStats',
+                                       ['device',
+                                        'user',
+                                        'nice',
+                                        'system',
+                                        'idle',
+                                        'iowait',
+                                        'irq',
+                                        'softirq',
+                                       ])
+    fields = line.split()
+    return cpu_stats._make([fields[0]] + [int(f) for f in fields[1:8]])
diff --git a/src/build/android/pylib/fake_dns.py b/src/build/android/pylib/fake_dns.py
new file mode 100644
index 0000000..1c64490
--- /dev/null
+++ b/src/build/android/pylib/fake_dns.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import android_commands
+import constants
+import logging
+import os
+import subprocess
+import time
+
+
+class FakeDns(object):
+  """Wrapper class for the fake_dns tool."""
+  _FAKE_DNS_PATH = constants.TEST_EXECUTABLE_DIR + '/fake_dns'
+
+  def __init__(self, adb, build_type):
+    """
+      Args:
+        adb: the AndroidCommands to use.
+        build_type: 'Release' or 'Debug'.
+    """
+    self._adb = adb
+    self._build_type = build_type
+    self._fake_dns = None
+    self._original_dns = None
+
+  def _PushAndStartFakeDns(self):
+    """Starts the fake_dns server that replies all name queries 127.0.0.1.
+
+    Returns:
+      subprocess instance connected to the fake_dns process on the device.
+    """
+    self._adb.PushIfNeeded(
+        os.path.join(constants.CHROME_DIR, 'out', self._build_type, 'fake_dns'),
+        FakeDns._FAKE_DNS_PATH)
+    return subprocess.Popen(
+        ['adb', '-s', self._adb._adb.GetSerialNumber(),
+         'shell', '%s -D' % FakeDns._FAKE_DNS_PATH])
+
+  def SetUp(self):
+    """Configures the system to point to a DNS server that replies 127.0.0.1.
+
+    This can be used in combination with the forwarder to forward all web
+    traffic to a replay server.
+
+    The TearDown() method will perform all cleanup.
+    """
+    self._adb.RunShellCommand('ip route add 8.8.8.0/24 via 127.0.0.1 dev lo')
+    self._fake_dns = self._PushAndStartFakeDns()
+    self._original_dns = self._adb.RunShellCommand('getprop net.dns1')[0]
+    self._adb.RunShellCommand('setprop net.dns1 127.0.0.1')
+    time.sleep(2)  # Time for server to start and the setprop to take effect.
+
+  def TearDown(self):
+    """Shuts down the fake_dns."""
+    if self._fake_dns:
+      if not self._original_dns or self._original_dns == '127.0.0.1':
+        logging.warning('Bad original DNS, falling back to Google DNS.')
+        self._original_dns = '8.8.8.8'
+      self._adb.RunShellCommand('setprop net.dns1 %s' % self._original_dns)
+      self._fake_dns.kill()
+      self._adb.RunShellCommand('ip route del 8.8.8.0/24 via 127.0.0.1 dev lo')
diff --git a/src/build/android/pylib/findbugs.py b/src/build/android/pylib/findbugs.py
new file mode 100755
index 0000000..996c0ee
--- /dev/null
+++ b/src/build/android/pylib/findbugs.py
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import re
+import shlex
+import subprocess
+import sys
+
+
+def _PrintMessage(warnings, title, action, known_bugs_file):
+  if warnings:
+    print
+    print '*' * 80
+    print '%s warnings.' % title
+    print '%s %s' % (action, known_bugs_file)
+    print '-' * 80
+    for warning in warnings:
+      print warning
+    print '-' * 80
+    print
+
+
+def _StripLineNumbers(current_warnings):
+  re_line = r':\[line.*?\]$'
+  return [re.sub(re_line, '', x) for x in current_warnings]
+
+
+def _DiffKnownWarnings(current_warnings_set, known_bugs_file):
+  with open(known_bugs_file, 'r') as known_bugs:
+    known_bugs_set = set(known_bugs.read().splitlines())
+
+  new_warnings = current_warnings_set - known_bugs_set
+  _PrintMessage(sorted(new_warnings), 'New', 'Please fix, or perhaps add to',
+                known_bugs_file)
+
+  obsolete_warnings = known_bugs_set - current_warnings_set
+  _PrintMessage(sorted(obsolete_warnings), 'Obsolete', 'Please remove from',
+                known_bugs_file)
+
+  count = len(new_warnings) + len(obsolete_warnings)
+  if count:
+    print '*** %d FindBugs warning%s! ***' % (count, 's' * (count > 1))
+    if len(new_warnings):
+      print '*** %d: new ***' % len(new_warnings)
+    if len(obsolete_warnings):
+      print '*** %d: obsolete ***' % len(obsolete_warnings)
+    print
+    print 'Alternatively,  rebaseline with --rebaseline command option'
+    print
+  else:
+    print 'No new FindBugs warnings.'
+  print
+  return count
+
+
+def _Rebaseline(current_warnings_set, known_bugs_file):
+  with file(known_bugs_file, 'w') as known_bugs:
+    for warning in sorted(current_warnings_set):
+      print >>known_bugs, warning
+  return 0
+
+
+def _GetChromeClasses(release_version):
+  chrome_src = os.getenv('CHROME_SRC')
+  version = 'Debug'
+  if release_version:
+    version = 'Release'
+  path = os.path.join(chrome_src, 'out', version)
+  cmd = 'find %s -name "*.class"' % path
+  proc = subprocess.Popen(shlex.split(cmd),
+                          stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  out, err = proc.communicate()
+
+  if not out:
+    print 'No classes found in %s' % path
+  return out
+
+
+def _Run(exclude, known_bugs, classes_to_analyze, auxiliary_classes,
+        rebaseline, release_version, findbug_args):
+  """Run the FindBugs.
+
+  Args:
+    exclude: the exclude xml file, refer to FindBugs's -exclude command option.
+    known_bugs: the text file of known bugs. The bugs in it will not be
+                reported.
+    classes_to_analyze: the list of classes need to analyze, refer to FindBug's
+                        -onlyAnalyze command line option.
+    auxiliary_classes: the classes help to analyze, refer to FindBug's
+                       -auxclasspath command line option.
+    rebaseline: True if the known_bugs file needs rebaseline.
+    release_version: True if the release version needs check, otherwise check
+                     debug version.
+    findbug_args: addtional command line options needs pass to Findbugs.
+  """
+
+  chrome_src = os.getenv('CHROME_SRC')
+  sdk_root = os.getenv('ANDROID_SDK_ROOT')
+  sdk_version = os.getenv('ANDROID_SDK_VERSION')
+
+  system_classes = []
+  system_classes.append(os.path.join(sdk_root, 'platforms',
+                                     'android-%s' % sdk_version, 'android.jar'))
+  if auxiliary_classes:
+    for classes in auxiliary_classes:
+      system_classes.append(os.path.abspath(classes))
+
+  cmd = '%s -textui -sortByClass ' % os.path.join(chrome_src, 'third_party',
+                                                  'findbugs', 'bin', 'findbugs')
+  cmd = '%s -pluginList %s' % (cmd, os.path.join(chrome_src, 'tools', 'android',
+                                                 'findbugs_plugin', 'lib',
+                                                 'chromiumPlugin.jar'))
+  if len(system_classes):
+    cmd = '%s -auxclasspath %s ' % (cmd, ':'.join(system_classes))
+
+  if classes_to_analyze:
+    cmd = '%s -onlyAnalyze %s ' % (cmd, classes_to_analyze)
+
+  if exclude:
+    cmd = '%s -exclude %s ' % (cmd, os.path.abspath(exclude))
+
+  if findbug_args:
+    cmd = '%s %s ' % (cmd, fingbug_args)
+
+
+  chrome_classes = _GetChromeClasses(release_version)
+  if not chrome_classes:
+    return 1
+  cmd = '%s %s ' % (cmd, chrome_classes)
+
+  proc = subprocess.Popen(shlex.split(cmd),
+                          stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  out, err = proc.communicate()
+  current_warnings_set = set(_StripLineNumbers(filter(None, out.splitlines())))
+
+  if rebaseline:
+    return _Rebaseline(current_warnings_set, known_bugs)
+  else:
+    return _DiffKnownWarnings(current_warnings_set, known_bugs)
+
+def Run(options):
+  exclude_file = None
+  known_bugs_file = None
+
+  if options.exclude:
+    exclude_file = options.exclude
+  elif options.base_dir:
+    exclude_file = os.path.join(options.base_dir, 'findbugs_exclude.xml')
+
+  if options.known_bugs:
+    known_bugs_file = options.known_bugs
+  elif options.base_dir:
+    known_bugs_file = os.path.join(options.base_dir, 'findbugs_known_bugs.txt')
+
+  auxclasspath = None
+  if options.auxclasspath:
+    auxclasspath = options.auxclasspath.split(':')
+  return _Run(exclude_file, known_bugs_file, options.only_analyze, auxclasspath,
+              options.rebaseline, options.release_build, options.findbug_args)
+
+
+def GetCommonParser():
+  parser = optparse.OptionParser()
+  parser.add_option('-r',
+                    '--rebaseline',
+                    action='store_true',
+                    dest='rebaseline',
+                    help='Rebaseline known findbugs issues.')
+
+  parser.add_option('-a',
+                    '--auxclasspath',
+                    action='store',
+                    default=None,
+                    dest='auxclasspath',
+                    help='Set aux classpath for analysis.')
+
+  parser.add_option('-o',
+                    '--only-analyze',
+                    action='store',
+                    default=None,
+                    dest='only_analyze',
+                    help='Only analyze the given classes and packages.')
+
+  parser.add_option('-e',
+                    '--exclude',
+                    action='store',
+                    default=None,
+                    dest='exclude',
+                    help='Exclude bugs matching given filter.')
+
+  parser.add_option('-k',
+                    '--known-bugs',
+                    action='store',
+                    default=None,
+                    dest='known_bugs',
+                    help='Not report the bugs in the given file.')
+
+  parser.add_option('-l',
+                    '--release-build',
+                    action='store_true',
+                    dest='release_build',
+                    help='Analyze release build instead of debug.')
+
+  parser.add_option('-f',
+                    '--findbug-args',
+                    action='store',
+                    default=None,
+                    dest='findbug_args',
+                    help='Additional findbug arguments.')
+
+  parser.add_option('-b',
+                    '--base-dir',
+                    action='store',
+                    default=None,
+                    dest='base_dir',
+                    help='Base directory for configuration file.')
+
+  return parser
+
+def CheckEnvironment():
+  if not (os.getenv('CHROME_SRC') and os.getenv('ANDROID_SDK_ROOT') and
+          os.getenv('ANDROID_SDK_VERSION')):
+    print 'Your build environment is not set up correctly.'
+    print 'Please source build/android/envsetup.sh.'
+    return False
+  return True
+
+def main(argv):
+  parser = GetCommonParser()
+  options, _ = parser.parse_args()
+
+  return Run(options)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/pylib/flag_changer.py b/src/build/android/pylib/flag_changer.py
new file mode 100644
index 0000000..8b8dbca
--- /dev/null
+++ b/src/build/android/pylib/flag_changer.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import constants
+import traceback
+import warnings
+
+
+# Location where chrome reads command line flags from
+CHROME_COMMAND_FILE = '/data/local/chrome-command-line'
+
+class FlagChanger(object):
+  """Changes the flags Chrome runs with.
+
+  There are two different use cases for this file:
+  * Flags are permanently set by calling Set().
+  * Flags can be temporarily set for a particular set of unit tests.  These
+    tests should call Restore() to revert the flags to their original state
+    once the tests have completed.
+  """
+
+  def __init__(self, android_cmd):
+    self._android_cmd = android_cmd
+
+    # Save the original flags.
+    self._orig_line = self._android_cmd.GetFileContents(CHROME_COMMAND_FILE)
+    if self._orig_line:
+      self._orig_line = self._orig_line[0].strip()
+
+    # Parse out the flags into a list to facilitate adding and removing flags.
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+
+  def Get(self):
+    """Returns list of current flags."""
+    return self._current_flags
+
+  def Set(self, flags):
+    """Replaces all flags on the current command line with the flags given.
+
+    Args:
+      flags: A list of flags to set, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    self._current_flags = flags
+    self._UpdateCommandLineFile()
+
+  def AddFlags(self, flags):
+    """Appends flags to the command line if they aren't already there.
+
+    Args:
+      flags: A list of flags to add on, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    # Avoid appending flags that are already present.
+    for flag in flags:
+      if flag not in self._current_flags:
+        self._current_flags.append(flag)
+    self._UpdateCommandLineFile()
+
+  def RemoveFlags(self, flags):
+    """Removes flags from the command line, if they exist.
+
+    Args:
+      flags: A list of flags to remove, eg. ['--single-process'].  Note that we
+             expect a complete match when removing flags; if you want to remove
+             a switch with a value, you must use the exact string used to add
+             it in the first place.
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    for flag in flags:
+      if flag in self._current_flags:
+        self._current_flags.remove(flag)
+    self._UpdateCommandLineFile()
+
+  def Restore(self):
+    """Restores the flags to their original state."""
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+    self._UpdateCommandLineFile()
+
+  def _UpdateCommandLineFile(self):
+    """Writes out the command line to the file, or removes it if empty."""
+    print "Current flags: ", self._current_flags
+
+    if self._current_flags:
+      self._android_cmd.SetFileContents(CHROME_COMMAND_FILE,
+                                        'chrome ' +
+                                        ' '.join(self._current_flags))
+    else:
+      self._android_cmd.RunShellCommand('rm ' + CHROME_COMMAND_FILE)
+
+  def _TokenizeFlags(self, line):
+    """Changes the string containing the command line into a list of flags.
+
+    Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
+    * Flags are split using whitespace, unless the whitespace is within a
+      pair of quotation marks.
+    * Unlike the Java version, we keep the quotation marks around switch
+      values since we need them to re-create the file when new flags are
+      appended.
+
+    Args:
+      line: A string containing the entire command line.  The first token is
+            assumed to be the program name.
+    """
+    if not line:
+      return []
+
+    tokenized_flags = []
+    current_flag = ""
+    within_quotations = False
+
+    # Move through the string character by character and build up each flag
+    # along the way.
+    for c in line.strip():
+      if c is '"':
+        if len(current_flag) > 0 and current_flag[-1] == '\\':
+          # Last char was a backslash; pop it, and treat this " as a literal.
+          current_flag = current_flag[0:-1] + '"'
+        else:
+          within_quotations = not within_quotations
+          current_flag += c
+      elif not within_quotations and (c is ' ' or c is '\t'):
+        if current_flag is not "":
+          tokenized_flags.append(current_flag)
+          current_flag = ""
+      else:
+        current_flag += c
+
+    # Tack on the last flag.
+    if not current_flag:
+      if within_quotations:
+        warnings.warn("Unterminated quoted string: " + current_flag)
+    else:
+      tokenized_flags.append(current_flag)
+
+    # Return everything but the program name.
+    return tokenized_flags[1:]
diff --git a/src/build/android/pylib/flakiness_dashboard_results_uploader.py b/src/build/android/pylib/flakiness_dashboard_results_uploader.py
new file mode 100644
index 0000000..900af4c
--- /dev/null
+++ b/src/build/android/pylib/flakiness_dashboard_results_uploader.py
@@ -0,0 +1,158 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+sys.path.append(os.path.join(sys.path[0], '..', '..', 'third_party',
+                             'WebKit', 'Tools', 'Scripts'))
+from webkitpy.common.system import executive, filesystem
+from webkitpy.layout_tests.layout_package import json_results_generator
+
+
+# The JSONResultsGenerator gets the filesystem.join operation from the Port
+# object. Creating a Port object requires specifying information that only
+# makes sense for running WebKit layout tests, so we provide a dummy object
+# that contains the fields required by the generator.
+class PortDummy(object):
+  def __init__(self):
+    self._executive = executive.Executive()
+    self._filesystem = filesystem.FileSystem()
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, port, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        port=port,
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _get_modifier_char(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _get_svn_revision(self, in_directory):
+    """Returns the git revision for the given directory.
+
+    Args:
+      in_directory: The directory where git is to be run.
+    """
+    git_dir =  self._filesystem.join(os.environ.get('CHROME_SRC'),
+                                     in_directory,
+                                     '.git')
+    if self._filesystem.exists(git_dir):
+      # Note: Not thread safe: http://bugs.python.org/issue2320
+      output = subprocess.Popen(
+          ['git', '--git-dir=%s' % git_dir, 'show-ref', '--head',
+           '--hash=10', 'HEAD'],
+          stdout=subprocess.PIPE).communicate()[0].strip()
+      return output
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+    self._build_name = 'chromium-android'
+
+    if not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+    if not buildbot_branch:
+      buildbot_branch = 'master'
+    self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    conversion_map = [
+        (test_results.ok, False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.failed, True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.crashed, True,
+            "C"),
+        (test_results.unknown, True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.name,
+            failed=failed,
+            elapsed_time=single_test_result.dur / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.name] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          port=PortDummy(),
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.generate_json_output()
+      results_generator.generate_times_ms_file()
+      results_generator.upload_json_files(json_files)
+    except Exception as e:
+      logging.error("Uploading results to test server failed: %s." % e);
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(flakiness_dashboard_server, test_type, results):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+    results: test results.
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/src/build/android/pylib/forwarder.py b/src/build/android/pylib/forwarder.py
new file mode 100644
index 0000000..460a3dc
--- /dev/null
+++ b/src/build/android/pylib/forwarder.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import sys
+import time
+
+import android_commands
+import cmd_helper
+import constants
+import ports
+
+from pylib import pexpect
+
+
+def _MakeBinaryPath(build_type, binary_name):
+  return os.path.join(cmd_helper.OutDirectory.get(), build_type, binary_name)
+
+
+class Forwarder(object):
+  """Class to manage port forwards from the device to the host."""
+
+  # Unix Abstract socket path:
+  _DEVICE_ADB_CONTROL_PORT = 'chrome_device_forwarder'
+  _TIMEOUT_SECS = 30
+
+  _DEVICE_FORWARDER_PATH = constants.TEST_EXECUTABLE_DIR + '/device_forwarder'
+
+  def __init__(self, adb, build_type):
+    """Forwards TCP ports on the device back to the host.
+
+    Works like adb forward, but in reverse.
+
+    Args:
+      adb: Instance of AndroidCommands for talking to the device.
+      build_type: 'Release' or 'Debug'.
+    """
+    assert build_type in ('Release', 'Debug')
+    self._adb = adb
+    self._host_to_device_port_map = dict()
+    self._device_process = None
+    self._host_forwarder_path = _MakeBinaryPath(build_type, 'host_forwarder')
+    self._device_forwarder_path = _MakeBinaryPath(
+        build_type, 'device_forwarder')
+
+  def Run(self, port_pairs, tool, host_name):
+    """Runs the forwarder.
+
+    Args:
+      port_pairs: A list of tuples (device_port, host_port) to forward. Note
+                 that you can specify 0 as a device_port, in which case a
+                 port will by dynamically assigned on the device. You can
+                 get the number of the assigned port using the
+                 DevicePortForHostPort method.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+      host_name: Address to forward to, must be addressable from the
+                 host machine. Usually use loopback '127.0.0.1'.
+
+    Raises:
+      Exception on failure to forward the port.
+    """
+    host_adb_control_port = ports.AllocateTestServerPort()
+    if not host_adb_control_port:
+      raise Exception('Failed to allocate a TCP port in the host machine.')
+    self._adb.PushIfNeeded(
+        self._device_forwarder_path, Forwarder._DEVICE_FORWARDER_PATH)
+    redirection_commands = [
+        '%d:%d:%d:%s' % (host_adb_control_port, device, host,
+                         host_name) for device, host in port_pairs]
+    logging.info('Command format: <ADB port>:<Device port>' +
+                 '[:<Forward to port>:<Forward to address>]')
+    logging.info('Forwarding using commands: %s', redirection_commands)
+    if cmd_helper.RunCmd(
+        ['adb', '-s', self._adb._adb.GetSerialNumber(), 'forward',
+         'tcp:%s' % host_adb_control_port,
+         'localabstract:%s' % Forwarder._DEVICE_ADB_CONTROL_PORT]) != 0:
+      raise Exception('Error while running adb forward.')
+
+    (exit_code, output) = self._adb.GetShellCommandStatusAndOutput(
+        '%s %s' % (Forwarder._DEVICE_FORWARDER_PATH,
+                   Forwarder._DEVICE_ADB_CONTROL_PORT))
+    if exit_code != 0:
+      raise Exception(
+          'Failed to start device forwarder:\n%s' % '\n'.join(output))
+
+    for redirection_command in redirection_commands:
+      (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+          [self._host_forwarder_path, redirection_command])
+      if exit_code != 0:
+        raise Exception('%s exited with %d:\n%s' % (
+            self._host_forwarder_path, exit_code, '\n'.join(output)))
+      tokens = output.split(':')
+      if len(tokens) != 2:
+        raise Exception('Unexpected host forwarder output "%s", ' +
+                        'expected "device_port:host_port"' % output)
+      device_port = int(tokens[0])
+      host_port = int(tokens[1])
+      self._host_to_device_port_map[host_port] = device_port
+      logging.info('Forwarding device port: %d to host port: %d.', device_port,
+                   host_port)
+
+  @staticmethod
+  def KillHost(build_type):
+    logging.info('Killing host_forwarder.')
+    host_forwarder_path = _MakeBinaryPath(build_type, 'host_forwarder')
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [host_forwarder_path, 'kill-server'])
+    if exit_code != 0:
+      (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+          ['pkill', 'host_forwarder'])
+      if exit_code != 0:
+        raise Exception('%s exited with %d:\n%s' % (
+              host_forwarder_path, exit_code, '\n'.join(output)))
+
+  @staticmethod
+  def KillDevice(adb):
+    logging.info('Killing device_forwarder.')
+    if not adb.FileExistsOnDevice(Forwarder._DEVICE_FORWARDER_PATH):
+      return
+    (exit_code, output) = adb.GetShellCommandStatusAndOutput(
+        '%s kill-server' % Forwarder._DEVICE_FORWARDER_PATH)
+    # TODO(pliard): Remove the following call to KillAllBlocking() when we are
+    # sure that the old version of device_forwarder (not supporting
+    # 'kill-server') is not running on the bots anymore.
+    timeout_sec = 5
+    processes_killed = adb.KillAllBlocking('device_forwarder', timeout_sec)
+    if not processes_killed:
+      pids = adb.ExtractPid('device_forwarder')
+      if pids:
+        raise Exception('Timed out while killing device_forwarder')
+
+  def DevicePortForHostPort(self, host_port):
+    """Get the device port that corresponds to a given host port."""
+    return self._host_to_device_port_map.get(host_port)
+
+  def Close(self):
+    """Terminate the forwarder process."""
+    if self._device_process:
+      self._device_process.close()
+      self._device_process = None
diff --git a/src/build/android/pylib/io_stats_parser.py b/src/build/android/pylib/io_stats_parser.py
new file mode 100644
index 0000000..89097ab
--- /dev/null
+++ b/src/build/android/pylib/io_stats_parser.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+"""
+
+
+import collections
+
+
+def ParseIoStatsLine(line):
+  """Parses a line of io stats into a IoStats named tuple."""
+  # Field definitions: http://www.kernel.org/doc/Documentation/iostats.txt
+  IoStats = collections.namedtuple('IoStats',
+                                   ['device',
+                                    'num_reads_issued',
+                                    'num_reads_merged',
+                                    'num_sectors_read',
+                                    'ms_spent_reading',
+                                    'num_writes_completed',
+                                    'num_writes_merged',
+                                    'num_sectors_written',
+                                    'ms_spent_writing',
+                                    'num_ios_in_progress',
+                                    'ms_spent_doing_io',
+                                    'ms_spent_doing_io_weighted',
+                                    ])
+  fields = line.split()
+  return IoStats._make([fields[2]] + [int(f) for f in fields[3:]])
diff --git a/src/build/android/pylib/java_unittest_utils.py b/src/build/android/pylib/java_unittest_utils.py
new file mode 100644
index 0000000..b5446dc
--- /dev/null
+++ b/src/build/android/pylib/java_unittest_utils.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This file is imported by python tests ran by run_python_tests.py."""
+
+import os
+
+import android_commands
+from run_java_tests import TestRunner
+
+
+def _GetPackageName(fname):
+  """Extracts the package name from the test file path."""
+  base_root = os.path.join('com', 'google', 'android')
+  dirname = os.path.dirname(fname)
+  package = dirname[dirname.rfind(base_root):]
+  return package.replace(os.sep, '.')
+
+
+def RunJavaTest(fname, suite, test, ports_to_forward):
+  device = android_commands.GetAttachedDevices()[0]
+  package_name = _GetPackageName(fname)
+  test = package_name + '.' + suite + '#' + test
+  java_test_runner = TestRunner(False, device, [test], False, False, False,
+                                False, 0, ports_to_forward)
+  return java_test_runner.Run()
diff --git a/src/build/android/pylib/json_perf_parser.py b/src/build/android/pylib/json_perf_parser.py
new file mode 100644
index 0000000..1a8e617
--- /dev/null
+++ b/src/build/android/pylib/json_perf_parser.py
@@ -0,0 +1,160 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = filter(EntryFilter, json_data)
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0: result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/src/build/android/pylib/perf_tests_helper.py b/src/build/android/pylib/perf_tests_helper.py
new file mode 100644
index 0000000..ca9023b
--- /dev/null
+++ b/src/build/android/pylib/perf_tests_helper.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+import android_commands
+import json
+import math
+
+# Valid values of result type.
+RESULT_TYPES = {'unimportant': 'RESULT ',
+                'default': '*RESULT ',
+                'informational': '',
+                'unimportant-histogram': 'HISTOGRAM ',
+                'histogram': '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  # Colons (:), equal signs (=) and slashes (/) are not allowed.
+  return re.sub('[\:|=/]', '_', s)
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([str(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ", ".join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPerfResult(measurement, trace, values, units, result_type='default',
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+    trace: A description of the particular data point, e.g. "reference".
+    values: A list of numeric measured values.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of RESULT_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert result_type in RESULT_TYPES, 'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if result_type in ['unimportant', 'default', 'informational']:
+    assert isinstance(values, list)
+    assert len(values)
+    assert '/' not in measurement
+    value, avg, sd = _MeanAndStdDevFromList(values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert(result_type in ['histogram', 'unimportant-histogram'])
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    measurement += '.' + trace_name
+    output = '%s%s: %s= %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        _EscapePerfResult(measurement),
+        value)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print output
+  return output
+
+
+class PerfTestSetup(object):
+  """Provides methods for setting up a device for perf testing."""
+  _DROP_CACHES = '/proc/sys/vm/drop_caches'
+  _SCALING_GOVERNOR = '/sys/devices/system/cpu/cpu%d/cpufreq/scaling_governor'
+
+  def __init__(self, adb):
+    self._adb = adb
+    num_cpus = self._adb.GetFileContents('/sys/devices/system/cpu/online',
+                                         log_result=False)
+    assert num_cpus, 'Unable to find /sys/devices/system/cpu/online'
+    self._num_cpus = int(num_cpus[0].split('-')[-1])
+    self._original_scaling_governor = None
+
+  def DropRamCaches(self):
+    """Drops the filesystem ram caches for performance testing."""
+    if not self._adb.IsRootEnabled():
+      self._adb.EnableAdbRoot()
+    self._adb.RunShellCommand('sync')
+    self._adb.RunShellCommand('echo 3 > ' + PerfTestSetup._DROP_CACHES)
+
+  def SetUp(self):
+    """Sets up performance tests."""
+    if not self._original_scaling_governor:
+      self._original_scaling_governor = self._adb.GetFileContents(
+          PerfTestSetup._SCALING_GOVERNOR % 0,
+          log_result=False)[0]
+      self._SetScalingGovernorInternal('performance')
+    self.DropRamCaches()
+
+  def TearDown(self):
+    """Tears down performance tests."""
+    if self._original_scaling_governor:
+      self._SetScalingGovernorInternal(self._original_scaling_governor)
+    self._original_scaling_governor = None
+
+  def _SetScalingGovernorInternal(self, value):
+    for cpu in range(self._num_cpus):
+      self._adb.RunShellCommand(
+          ('echo %s > ' + PerfTestSetup._SCALING_GOVERNOR) % (value, cpu))
diff --git a/src/build/android/pylib/pexpect.py b/src/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..f566f1c
--- /dev/null
+++ b/src/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import *
+except:
+  pass
diff --git a/src/build/android/pylib/ports.py b/src/build/android/pylib/ports.py
new file mode 100644
index 0000000..74c84c1
--- /dev/null
+++ b/src/build/android/pylib/ports.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions that deal with local and device ports."""
+
+import contextlib
+import fcntl
+import httplib
+import logging
+import os
+import re
+import socket
+import traceback
+
+import cmd_helper
+import constants
+
+
+# The following two methods are used to allocate the port source for various
+# types of test servers. Because some net-related tests can be run on shards at
+# same time, it's important to have a mechanism to allocate the port
+# process-safe. In here, we implement the safe port allocation by leveraging
+# flock.
+def ResetTestServerPortAllocation():
+  """Resets the port allocation to start from TEST_SERVER_PORT_FIRST.
+
+  Returns:
+    Returns True if reset successes. Otherwise returns False.
+  """
+  try:
+    with open(constants.TEST_SERVER_PORT_FILE, 'w') as fp:
+      fp.write('%d' % constants.TEST_SERVER_PORT_FIRST)
+    if os.path.exists(constants.TEST_SERVER_PORT_LOCKFILE):
+      os.unlink(constants.TEST_SERVER_PORT_LOCKFILE)
+    return True
+  except Exception as e:
+    logging.error(e)
+  return False
+
+
+def AllocateTestServerPort():
+  """Allocates a port incrementally.
+
+  Returns:
+    Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
+    TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
+  """
+  port = 0
+  ports_tried = []
+  try:
+    fp_lock = open(constants.TEST_SERVER_PORT_LOCKFILE, 'w')
+    fcntl.flock(fp_lock, fcntl.LOCK_EX)
+    # Get current valid port and calculate next valid port.
+    if not os.path.exists(constants.TEST_SERVER_PORT_FILE):
+      ResetTestServerPortAllocation()
+    with open(constants.TEST_SERVER_PORT_FILE, 'r+') as fp:
+      port = int(fp.read())
+      ports_tried.append(port)
+      while IsHostPortUsed(port):
+        port += 1
+        ports_tried.append(port)
+      if (port > constants.TEST_SERVER_PORT_LAST or
+          port < constants.TEST_SERVER_PORT_FIRST):
+        port = 0
+      else:
+        fp.seek(0, os.SEEK_SET)
+        fp.write('%d' % (port + 1))
+  except Exception as e:
+    logging.info(e)
+  finally:
+    if fp_lock:
+      fcntl.flock(fp_lock, fcntl.LOCK_UN)
+      fp_lock.close()
+  if port:
+    logging.info('Allocate port %d for test server.', port)
+  else:
+    logging.error('Could not allocate port for test server. '
+                  'List of ports tried: %s', str(ports_tried))
+  return port
+
+
+def IsHostPortUsed(host_port):
+  """Checks whether the specified host port is used or not.
+
+  Uses -n -P to inhibit the conversion of host/port numbers to host/port names.
+
+  Args:
+    host_port: Port on host we want to check.
+
+  Returns:
+    True if the port on host is already used, otherwise returns False.
+  """
+  port_info = '(\*)|(127\.0\.0\.1)|(localhost):%d' % host_port
+  # TODO(jnd): Find a better way to filter the port. Note that connecting to the
+  # socket and closing it would leave it in the TIME_WAIT state. Setting
+  # SO_LINGER on it and then closing it makes the Python HTTP server crash.
+  re_port = re.compile(port_info, re.MULTILINE)
+  if re_port.search(cmd_helper.GetCmdOutput(['lsof', '-nPi:%d' % host_port])):
+    return True
+  return False
+
+
+def IsDevicePortUsed(adb, device_port, state=''):
+  """Checks whether the specified device port is used or not.
+
+  Args:
+    adb: Instance of AndroidCommands for talking to the device.
+    device_port: Port on device we want to check.
+    state: String of the specified state. Default is empty string, which
+           means any state.
+
+  Returns:
+    True if the port on device is already used, otherwise returns False.
+  """
+  base_url = '127.0.0.1:%d' % device_port
+  netstat_results = adb.RunShellCommand('netstat', log_result=False)
+  for single_connect in netstat_results:
+    # Column 3 is the local address which we want to check with.
+    connect_results = single_connect.split()
+    if connect_results[0] != 'tcp':
+      continue
+    if len(connect_results) < 6:
+      raise Exception('Unexpected format while parsing netstat line: ' +
+                      single_connect)
+    is_state_match = connect_results[5] == state if state else True
+    if connect_results[3] == base_url and is_state_match:
+      return True
+  return False
+
+
+def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
+                            expected_read='', timeout=2):
+  """Checks whether the specified http server is ready to serve request or not.
+
+  Args:
+    host: Host name of the HTTP server.
+    port: Port number of the HTTP server.
+    tries: How many times we want to test the connection. The default value is
+           3.
+    command: The http command we use to connect to HTTP server. The default
+             command is 'GET'.
+    path: The path we use when connecting to HTTP server. The default path is
+          '/'.
+    expected_read: The content we expect to read from the response. The default
+                   value is ''.
+    timeout: Timeout (in seconds) for each http connection. The default is 2s.
+
+  Returns:
+    Tuple of (connect status, client error). connect status is a boolean value
+    to indicate whether the server is connectable. client_error is the error
+    message the server returns when connect status is false.
+  """
+  assert tries >= 1
+  for i in xrange(0, tries):
+    client_error = None
+    try:
+      with contextlib.closing(httplib.HTTPConnection(
+          host, port, timeout=timeout)) as http:
+        # Output some debug information when we have tried more than 2 times.
+        http.set_debuglevel(i >= 2)
+        http.request(command, path)
+        r = http.getresponse()
+        content = r.read()
+        if r.status == 200 and r.reason == 'OK' and content == expected_read:
+          return (True, '')
+        client_error = ('Bad response: %s %s version %s\n  ' %
+                        (r.status, r.reason, r.version) +
+                        '\n  '.join([': '.join(h) for h in r.getheaders()]))
+    except (httplib.HTTPException, socket.error) as e:
+      # Probably too quick connecting: try again.
+      exception_error_msgs = traceback.format_exception_only(type(e), e)
+      if exception_error_msgs:
+        client_error = ''.join(exception_error_msgs)
+  # Only returns last client_error.
+  return (False, client_error or 'Timeout')
diff --git a/src/build/android/pylib/python_test_base.py b/src/build/android/pylib/python_test_base.py
new file mode 100644
index 0000000..d2cdfb0
--- /dev/null
+++ b/src/build/android/pylib/python_test_base.py
@@ -0,0 +1,172 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for Android Python-driven tests.
+
+This test case is intended to serve as the base class for any Python-driven
+tests. It is similar to the Python unitttest module in that the user's tests
+inherit from this case and add their tests in that case.
+
+When a PythonTestBase object is instantiated, its purpose is to run only one of
+its tests. The test runner gives it the name of the test the instance will
+run. The test runner calls SetUp with the Android device ID which the test will
+run against. The runner runs the test method itself, collecting the result,
+and calls TearDown.
+
+Tests can basically do whatever they want in the test methods, such as call
+Java tests using _RunJavaTests. Those methods have the advantage of massaging
+the Java test results into Python test results.
+"""
+
+import logging
+import os
+import time
+
+import android_commands
+import apk_info
+from run_java_tests import TestRunner
+from test_result import SingleTestResult, TestResults
+
+
+# aka the parent of com.google.android
+BASE_ROOT = 'src' + os.sep
+
+
+class PythonTestBase(object):
+  """Base class for Python-driven tests."""
+
+  def __init__(self, test_name):
+    # test_name must match one of the test methods defined on a subclass which
+    # inherits from this class.
+    # It's stored so we can do the attr lookup on demand, allowing this class
+    # to be pickled, a requirement for the multiprocessing module.
+    self.test_name = test_name
+    class_name = self.__class__.__name__
+    self.qualified_name = class_name + '.' + self.test_name
+
+  def SetUp(self, options):
+    self.options = options
+    self.shard_index = self.options.shard_index
+    self.device_id = self.options.device_id
+    self.adb = android_commands.AndroidCommands(self.device_id)
+    self.ports_to_forward = []
+
+  def TearDown(self):
+    pass
+
+  def GetOutDir(self):
+    return os.path.join(os.environ['CHROME_SRC'], 'out',
+        self.options.build_type)
+
+  def Run(self):
+    logging.warning('Running Python-driven test: %s', self.test_name)
+    return getattr(self, self.test_name)()
+
+  def _RunJavaTest(self, fname, suite, test):
+    """Runs a single Java test with a Java TestRunner.
+
+    Args:
+      fname: filename for the test (e.g. foo/bar/baz/tests/FooTest.py)
+      suite: name of the Java test suite (e.g. FooTest)
+      test: name of the test method to run (e.g. testFooBar)
+
+    Returns:
+      TestResults object with a single test result.
+    """
+    test = self._ComposeFullTestName(fname, suite, test)
+    apks = [apk_info.ApkInfo(self.options.test_apk_path,
+            self.options.test_apk_jar_path)]
+    java_test_runner = TestRunner(self.options, self.device_id, [test], False,
+                                  self.shard_index,
+                                  apks,
+                                  self.ports_to_forward)
+    return java_test_runner.Run()
+
+  def _RunJavaTests(self, fname, tests):
+    """Calls a list of tests and stops at the first test failure.
+
+    This method iterates until either it encounters a non-passing test or it
+    exhausts the list of tests. Then it returns the appropriate Python result.
+
+    Args:
+      fname: filename for the Python test
+      tests: a list of Java test names which will be run
+
+    Returns:
+      A TestResults object containing a result for this Python test.
+    """
+    start_ms = int(time.time()) * 1000
+
+    result = None
+    for test in tests:
+      # We're only running one test at a time, so this TestResults object will
+      # hold only one result.
+      suite, test_name = test.split('.')
+      result = self._RunJavaTest(fname, suite, test_name)
+      # A non-empty list means the test did not pass.
+      if result.GetAllBroken():
+        break
+
+    duration_ms = int(time.time()) * 1000 - start_ms
+
+    # Do something with result.
+    return self._ProcessResults(result, start_ms, duration_ms)
+
+  def _ProcessResults(self, result, start_ms, duration_ms):
+    """Translates a Java test result into a Python result for this test.
+
+    The TestRunner class that we use under the covers will return a test result
+    for that specific Java test. However, to make reporting clearer, we have
+    this method to abstract that detail and instead report that as a failure of
+    this particular test case while still including the Java stack trace.
+
+    Args:
+      result: TestResults with a single Java test result
+      start_ms: the time the test started
+      duration_ms: the length of the test
+
+    Returns:
+      A TestResults object containing a result for this Python test.
+    """
+    test_results = TestResults()
+
+    # If our test is in broken, then it crashed/failed.
+    broken = result.GetAllBroken()
+    if broken:
+      # Since we have run only one test, take the first and only item.
+      single_result = broken[0]
+
+      log = single_result.log
+      if not log:
+        log = 'No logging information.'
+
+      python_result = SingleTestResult(self.qualified_name, start_ms,
+                                       duration_ms,
+                                       log)
+
+      # Figure out where the test belonged. There's probably a cleaner way of
+      # doing this.
+      if single_result in result.crashed:
+        test_results.crashed = [python_result]
+      elif single_result in result.failed:
+        test_results.failed = [python_result]
+      elif single_result in result.unknown:
+        test_results.unknown = [python_result]
+
+    else:
+      python_result = SingleTestResult(self.qualified_name, start_ms,
+                                       duration_ms)
+      test_results.ok = [python_result]
+
+    return test_results
+
+  def _ComposeFullTestName(self, fname, suite, test):
+    package_name = self._GetPackageName(fname)
+    return package_name + '.' + suite + '#' + test
+
+  def _GetPackageName(self, fname):
+    """Extracts the package name from the test file path."""
+    dirname = os.path.dirname(fname)
+    package = dirname[dirname.rfind(BASE_ROOT) + len(BASE_ROOT):]
+    return package.replace(os.sep, '.')
diff --git a/src/build/android/pylib/python_test_caller.py b/src/build/android/pylib/python_test_caller.py
new file mode 100644
index 0000000..882b892
--- /dev/null
+++ b/src/build/android/pylib/python_test_caller.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper module for calling python-based tests."""
+
+
+import logging
+import sys
+import time
+
+from test_result import TestResults
+
+
+def CallPythonTest(test, options):
+  """Invokes a test function and translates Python exceptions into test results.
+
+  This method invokes SetUp()/TearDown() on the test. It is intended to be
+  resilient to exceptions in SetUp(), the test itself, and TearDown(). Any
+  Python exception means the test is marked as failed, and the test result will
+  contain information about the exception.
+
+  If SetUp() raises an exception, the test is not run.
+
+  If TearDown() raises an exception, the test is treated as a failure. However,
+  if the test itself raised an exception beforehand, that stack trace will take
+  precedence whether or not TearDown() also raised an exception.
+
+  shard_index is not applicable in single-device scenarios, when test execution
+  is serial rather than parallel. Tests can use this to bring up servers with
+  unique port numbers, for example. See also python_test_sharder.
+
+  Args:
+    test: an object which is ostensibly a subclass of PythonTestBase.
+    options: Options to use for setting up tests.
+
+  Returns:
+    A TestResults object which contains any results produced by the test or, in
+    the case of a Python exception, the Python exception info.
+  """
+
+  start_date_ms = int(time.time()) * 1000
+  failed = False
+
+  try:
+    test.SetUp(options)
+  except Exception:
+    failed = True
+    logging.exception(
+        'Caught exception while trying to run SetUp() for test: ' +
+        test.qualified_name)
+    # Tests whose SetUp() method has failed are likely to fail, or at least
+    # yield invalid results.
+    exc_info = sys.exc_info()
+    return TestResults.FromPythonException(test.qualified_name, start_date_ms,
+                                           exc_info)
+
+  try:
+    result = test.Run()
+  except Exception:
+    # Setting this lets TearDown() avoid stomping on our stack trace from Run()
+    # should TearDown() also raise an exception.
+    failed = True
+    logging.exception('Caught exception while trying to run test: ' +
+                      test.qualified_name)
+    exc_info = sys.exc_info()
+    result = TestResults.FromPythonException(test.qualified_name, start_date_ms,
+                                             exc_info)
+
+  try:
+    test.TearDown()
+  except Exception:
+    logging.exception(
+        'Caught exception while trying run TearDown() for test: ' +
+        test.qualified_name)
+    if not failed:
+      # Don't stomp the error during the test if TearDown blows up. This is a
+      # trade-off: if the test fails, this will mask any problem with TearDown
+      # until the test is fixed.
+      exc_info = sys.exc_info()
+      result = TestResults.FromPythonException(test.qualified_name,
+                                               start_date_ms, exc_info)
+
+  return result
diff --git a/src/build/android/pylib/python_test_sharder.py b/src/build/android/pylib/python_test_sharder.py
new file mode 100644
index 0000000..e27096d
--- /dev/null
+++ b/src/build/android/pylib/python_test_sharder.py
@@ -0,0 +1,203 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes care of sharding the python-drive tests in multiple devices."""
+
+import copy
+import logging
+import multiprocessing
+
+from python_test_caller import CallPythonTest
+from run_java_tests import FatalTestException
+import sharded_tests_queue
+from test_result import TestResults
+
+
+def SetTestsContainer(tests_container):
+  """Sets PythonTestSharder as a top-level field.
+
+  PythonTestSharder uses multiprocessing.Pool, which creates a pool of
+  processes. This is used to initialize each worker in the pool, ensuring that
+  each worker has access to this shared pool of tests.
+
+  The multiprocessing module requires that this be a top-level method.
+
+  Args:
+    tests_container: the container for all the tests.
+  """
+  PythonTestSharder.tests_container = tests_container
+
+
+def _DefaultRunnable(test_runner):
+  """A default runnable for a PythonTestRunner.
+
+  Args:
+    test_runner: A PythonTestRunner which will run tests.
+
+  Returns:
+    The test results.
+  """
+  return test_runner.RunTests()
+
+
+class PythonTestRunner(object):
+  """Thin wrapper around a list of PythonTestBase instances.
+
+  This is meant to be a long-lived object which can run multiple Python tests
+  within its lifetime. Tests will receive the device_id and shard_index.
+
+  The shard index affords the ability to create unique port numbers (e.g.
+  DEFAULT_PORT + shard_index) if the test so wishes.
+  """
+
+  def __init__(self, options):
+    """Constructor.
+
+    Args:
+      options: Options to use for setting up tests.
+    """
+    self.options = options
+
+  def RunTests(self):
+    """Runs tests from the shared pool of tests, aggregating results.
+
+    Returns:
+      A list of test results for all of the tests which this runner executed.
+    """
+    tests = PythonTestSharder.tests_container
+
+    results = []
+    for t in tests:
+      res = CallPythonTest(t, self.options)
+      results.append(res)
+
+    return TestResults.FromTestResults(results)
+
+
+class PythonTestSharder(object):
+  """Runs Python tests in parallel on multiple devices.
+
+  This is lifted more or less wholesale from BaseTestRunner.
+
+  Under the covers, it creates a pool of long-lived PythonTestRunners, which
+  execute tests from the pool of tests.
+
+  Args:
+    attached_devices: a list of device IDs attached to the host.
+    available_tests: a list of tests to run which subclass PythonTestBase.
+    options: Options to use for setting up tests.
+
+  Returns:
+    An aggregated list of test results.
+  """
+  tests_container = None
+
+  def __init__(self, attached_devices, available_tests, options):
+    self.options = options
+    self.attached_devices = attached_devices
+    self.retries = options.shard_retries
+    self.tests = available_tests
+
+  def _SetupSharding(self, tests):
+    """Creates the shared pool of tests and makes it available to test runners.
+
+    Args:
+      tests: the list of tests which will be consumed by workers.
+    """
+    SetTestsContainer(sharded_tests_queue.ShardedTestsQueue(
+        len(self.attached_devices), tests))
+
+  def RunShardedTests(self):
+    """Runs tests in parallel using a pool of workers.
+
+    Returns:
+      A list of test results aggregated from all test runs.
+    """
+    logging.warning('*' * 80)
+    logging.warning('Sharding in ' + str(len(self.attached_devices)) +
+                    ' devices.')
+    logging.warning('Note that the output is not synchronized.')
+    logging.warning('Look for the "Final result" banner in the end.')
+    logging.warning('*' * 80)
+    all_passed = []
+    test_results = TestResults()
+    tests_to_run = self.tests
+    for retry in xrange(self.retries):
+      logging.warning('Try %d of %d', retry + 1, self.retries)
+      self._SetupSharding(self.tests)
+      test_runners = self._MakeTestRunners(self.attached_devices)
+      logging.warning('Starting...')
+      pool = multiprocessing.Pool(len(self.attached_devices),
+                                  SetTestsContainer,
+                                  [PythonTestSharder.tests_container])
+
+      # List of TestResults objects from each test execution.
+      try:
+        results_lists = pool.map(_DefaultRunnable, test_runners)
+      except Exception:
+        logging.exception('Unable to run tests. Something with the '
+                          'PythonTestRunners has gone wrong.')
+        raise FatalTestException('PythonTestRunners were unable to run tests.')
+
+      test_results = TestResults.FromTestResults(results_lists)
+      # Accumulate passing results.
+      all_passed += test_results.ok
+      # If we have failed tests, map them to tests to retry.
+      failed_tests = test_results.GetAllBroken()
+      tests_to_run = self._GetTestsToRetry(self.tests,
+                                           failed_tests)
+
+      # Bail out early if we have no more tests. This can happen if all tests
+      # pass before we're out of retries, for example.
+      if not tests_to_run:
+        break
+
+    final_results = TestResults()
+    # all_passed has accumulated all passing test results.
+    # test_results will have the results from the most recent run, which could
+    # include a variety of failure modes (unknown, crashed, failed, etc).
+    final_results = test_results
+    final_results.ok = all_passed
+
+    return final_results
+
+  def _MakeTestRunners(self, attached_devices):
+    """Initialize and return a list of PythonTestRunners.
+
+    Args:
+      attached_devices: list of device IDs attached to host.
+
+    Returns:
+      A list of PythonTestRunners, one for each device.
+    """
+    test_runners = []
+    for index, device in enumerate(attached_devices):
+      logging.warning('*' * 80)
+      logging.warning('Creating shard %d for %s', index, device)
+      logging.warning('*' * 80)
+      # Bind the PythonTestRunner to a device & shard index. Give it the
+      # runnable which it will use to actually execute the tests.
+      test_options = copy.deepcopy(self.options)
+      test_options.ensure_value('device_id', device)
+      test_options.ensure_value('shard_index', index)
+      test_runner = PythonTestRunner(test_options)
+      test_runners.append(test_runner)
+
+    return test_runners
+
+  def _GetTestsToRetry(self, available_tests, failed_tests):
+    """Infers a list of tests to retry from failed tests and available tests.
+
+    Args:
+      available_tests: a list of tests which subclass PythonTestBase.
+      failed_tests: a list of SingleTestResults representing failed tests.
+
+    Returns:
+      A list of test objects which correspond to test names found in
+      failed_tests, or an empty list if there is no correspondence.
+    """
+    failed_test_names = map(lambda t: t.test_name, failed_tests)
+    tests_to_retry = [t for t in available_tests
+                      if t.qualified_name in failed_test_names]
+    return tests_to_retry
diff --git a/src/build/android/pylib/run_java_tests.py b/src/build/android/pylib/run_java_tests.py
new file mode 100644
index 0000000..07b45e0
--- /dev/null
+++ b/src/build/android/pylib/run_java_tests.py
@@ -0,0 +1,593 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the Java tests. See more information on run_instrumentation_tests.py."""
+
+import fnmatch
+import logging
+import os
+import re
+import shutil
+import sys
+import time
+
+import android_commands
+import apk_info
+from base_test_runner import BaseTestRunner
+from base_test_sharder import BaseTestSharder, SetTestsContainer
+import cmd_helper
+import constants
+import errors
+from forwarder import Forwarder
+from json_perf_parser import GetAverageRunInfoFromJSONString
+from perf_tests_helper import PrintPerfResult
+import sharded_tests_queue
+from test_result import SingleTestResult, TestResults
+import valgrind_tools
+
+_PERF_TEST_ANNOTATION = 'PerfTest'
+
+
+class FatalTestException(Exception):
+  """A fatal test exception."""
+  pass
+
+
+def _TestNameToExpectation(test_name):
+  # A test name is a Package.Path.Class#testName; convert to what we use in
+  # the expectation file.
+  return '.'.join(test_name.replace('#', '.').split('.')[-2:])
+
+
+def FilterTests(test_names, pattern_list, inclusive):
+  """Filters |test_names| using a list of patterns.
+
+  Args:
+    test_names: A list of test names.
+    pattern_list: A list of patterns.
+    inclusive: If True, returns the tests that match any pattern. if False,
+               returns the tests that do not match any pattern.
+  Returns:
+    A list of test names.
+  """
+  ret = []
+  for t in test_names:
+    has_match = False
+    for pattern in pattern_list:
+      has_match = has_match or fnmatch.fnmatch(_TestNameToExpectation(t),
+                                               pattern)
+    if has_match == inclusive:
+      ret += [t]
+  return ret
+
+
+class TestRunner(BaseTestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  _DEVICE_DATA_DIR = 'chrome/test/data'
+  _EMMA_JAR = os.path.join(os.environ.get('ANDROID_BUILD_TOP', ''),
+                           'external/emma/lib/emma.jar')
+  _COVERAGE_MERGED_FILENAME = 'unittest_coverage.es'
+  _COVERAGE_WEB_ROOT_DIR = os.environ.get('EMMA_WEB_ROOTDIR')
+  _COVERAGE_FILENAME = 'coverage.ec'
+  _COVERAGE_RESULT_PATH = ('/data/data/com.google.android.apps.chrome/files/' +
+                           _COVERAGE_FILENAME)
+  _COVERAGE_META_INFO_PATH = os.path.join(os.environ.get('ANDROID_BUILD_TOP',
+                                                         ''),
+                                          'out/target/common/obj/APPS',
+                                          'Chrome_intermediates/coverage.em')
+  _HOSTMACHINE_PERF_OUTPUT_FILE = '/tmp/chrome-profile'
+  _DEVICE_PERF_OUTPUT_SEARCH_PREFIX = (constants.DEVICE_PERF_OUTPUT_DIR +
+                                       '/chrome-profile*')
+  _DEVICE_HAS_TEST_FILES = {}
+
+  def __init__(self, options, device, tests_iter, coverage, shard_index, apks,
+               ports_to_forward):
+    """Create a new TestRunner.
+
+    Args:
+      options: An options object with the following required attributes:
+      -  build_type: 'Release' or 'Debug'.
+      -  install_apk: Re-installs the apk if opted.
+      -  save_perf_json: Whether or not to save the JSON file from UI perf
+            tests.
+      -  screenshot_failures: Take a screenshot for a test failure
+      -  tool: Name of the Valgrind tool.
+      -  wait_for_debugger: blocks until the debugger is connected.
+      -  disable_assertions: Whether to disable java assertions on the device.
+      device: Attached android device.
+      tests_iter: A list of tests to be run.
+      coverage: Collects coverage information if opted.
+      shard_index: shard # for this TestRunner, used to create unique port
+          numbers.
+      apks: A list of ApkInfo objects need to be installed. The first element
+            should be the tests apk, the rests could be the apks used in test.
+            The default is ChromeTest.apk.
+      ports_to_forward: A list of port numbers for which to set up forwarders.
+                        Can be optionally requested by a test case.
+    Raises:
+      FatalTestException: if coverage metadata is not available.
+    """
+    BaseTestRunner.__init__(
+        self, device, options.tool, shard_index, options.build_type)
+
+    if not apks:
+      apks = [apk_info.ApkInfo(options.test_apk_path,
+                               options.test_apk_jar_path)]
+
+    self.build_type = options.build_type
+    self.install_apk = options.install_apk
+    self.test_data = options.test_data
+    self.save_perf_json = options.save_perf_json
+    self.screenshot_failures = options.screenshot_failures
+    self.wait_for_debugger = options.wait_for_debugger
+    self.disable_assertions = options.disable_assertions
+
+    self.tests_iter = tests_iter
+    self.coverage = coverage
+    self.apks = apks
+    self.test_apk = apks[0]
+    self.instrumentation_class_path = self.test_apk.GetPackageName()
+    self.ports_to_forward = ports_to_forward
+
+    self.test_results = TestResults()
+    self.forwarder = None
+
+    if self.coverage:
+      if os.path.exists(TestRunner._COVERAGE_MERGED_FILENAME):
+        os.remove(TestRunner._COVERAGE_MERGED_FILENAME)
+      if not os.path.exists(TestRunner._COVERAGE_META_INFO_PATH):
+        raise FatalTestException('FATAL ERROR in ' + sys.argv[0] +
+                                 ' : Coverage meta info [' +
+                                 TestRunner._COVERAGE_META_INFO_PATH +
+                                 '] does not exist.')
+      if (not TestRunner._COVERAGE_WEB_ROOT_DIR or
+          not os.path.exists(TestRunner._COVERAGE_WEB_ROOT_DIR)):
+        raise FatalTestException('FATAL ERROR in ' + sys.argv[0] +
+                                 ' : Path specified in $EMMA_WEB_ROOTDIR [' +
+                                 TestRunner._COVERAGE_WEB_ROOT_DIR +
+                                 '] does not exist.')
+
+  def _GetTestsIter(self):
+    if not self.tests_iter:
+      # multiprocessing.Queue can't be pickled across processes if we have it as
+      # a member set during constructor.  Grab one here instead.
+      self.tests_iter = (BaseTestSharder.tests_container)
+    assert self.tests_iter
+    return self.tests_iter
+
+  def CopyTestFilesOnce(self):
+    """Pushes the test data files to the device. Installs the apk if opted."""
+    if TestRunner._DEVICE_HAS_TEST_FILES.get(self.device, False):
+      logging.warning('Already copied test files to device %s, skipping.',
+                      self.device)
+      return
+    for dest_host_pair in self.test_data:
+      dst_src = dest_host_pair.split(':',1)
+      dst_layer = dst_src[0]
+      host_src = dst_src[1]
+      host_test_files_path = constants.CHROME_DIR + '/' + host_src
+      if os.path.exists(host_test_files_path):
+        self.adb.PushIfNeeded(host_test_files_path,
+                              self.adb.GetExternalStorage() + '/' +
+                              TestRunner._DEVICE_DATA_DIR + '/' + dst_layer)
+    if self.install_apk:
+      for apk in self.apks:
+        self.adb.ManagedInstall(apk.GetApkPath(),
+                                package_name=apk.GetPackageName())
+    self.tool.CopyFiles()
+    TestRunner._DEVICE_HAS_TEST_FILES[self.device] = True
+
+  def SaveCoverageData(self, test):
+    """Saves the Emma coverage data before it's overwritten by the next test.
+
+    Args:
+      test: the test whose coverage data is collected.
+    """
+    if not self.coverage:
+      return
+    if not self.adb.Adb().Pull(TestRunner._COVERAGE_RESULT_PATH,
+                               constants.CHROME_DIR):
+      logging.error('ERROR: Unable to find file ' +
+                    TestRunner._COVERAGE_RESULT_PATH +
+                    ' on the device for test ' + test)
+    pulled_coverage_file = os.path.join(constants.CHROME_DIR,
+                                        TestRunner._COVERAGE_FILENAME)
+    if os.path.exists(TestRunner._COVERAGE_MERGED_FILENAME):
+      cmd = ['java', '-classpath', TestRunner._EMMA_JAR, 'emma', 'merge',
+             '-in', pulled_coverage_file,
+             '-in', TestRunner._COVERAGE_MERGED_FILENAME,
+             '-out', TestRunner._COVERAGE_MERGED_FILENAME]
+      cmd_helper.RunCmd(cmd)
+    else:
+      shutil.copy(pulled_coverage_file,
+                  TestRunner._COVERAGE_MERGED_FILENAME)
+    os.remove(pulled_coverage_file)
+
+  def GenerateCoverageReportIfNeeded(self):
+    """Uses the Emma to generate a coverage report and a html page."""
+    if not self.coverage:
+      return
+    cmd = ['java', '-classpath', TestRunner._EMMA_JAR,
+           'emma', 'report', '-r', 'html',
+           '-in', TestRunner._COVERAGE_MERGED_FILENAME,
+           '-in', TestRunner._COVERAGE_META_INFO_PATH]
+    cmd_helper.RunCmd(cmd)
+    new_dir = os.path.join(TestRunner._COVERAGE_WEB_ROOT_DIR,
+                           time.strftime('Coverage_for_%Y_%m_%d_%a_%H:%M'))
+    shutil.copytree('coverage', new_dir)
+
+    latest_dir = os.path.join(TestRunner._COVERAGE_WEB_ROOT_DIR,
+                              'Latest_Coverage_Run')
+    if os.path.exists(latest_dir):
+      shutil.rmtree(latest_dir)
+    os.mkdir(latest_dir)
+    webserver_new_index = os.path.join(new_dir, 'index.html')
+    webserver_new_files = os.path.join(new_dir, '_files')
+    webserver_latest_index = os.path.join(latest_dir, 'index.html')
+    webserver_latest_files = os.path.join(latest_dir, '_files')
+    # Setup new softlinks to last result.
+    os.symlink(webserver_new_index, webserver_latest_index)
+    os.symlink(webserver_new_files, webserver_latest_files)
+    cmd_helper.RunCmd(['chmod', '755', '-R', latest_dir, new_dir])
+
+  def _GetInstrumentationArgs(self):
+    ret = {}
+    if self.coverage:
+      ret['coverage'] = 'true'
+    if self.wait_for_debugger:
+      ret['debug'] = 'true'
+    return ret
+
+  def _TakeScreenshot(self, test):
+    """Takes a screenshot from the device."""
+    screenshot_name = os.path.join(constants.SCREENSHOTS_DIR, test + '.png')
+    logging.info('Taking screenshot named %s', screenshot_name)
+    self.adb.TakeScreenshot(screenshot_name)
+
+  def SetUp(self):
+    """Sets up the test harness and device before all tests are run."""
+    super(TestRunner, self).SetUp()
+    if not self.adb.IsRootEnabled():
+      logging.warning('Unable to enable java asserts for %s, non rooted device',
+                      self.device)
+    else:
+      if self.adb.SetJavaAssertsEnabled(enable=not self.disable_assertions):
+        self.adb.Reboot(full_reboot=False)
+
+    # We give different default value to launch HTTP server based on shard index
+    # because it may have race condition when multiple processes are trying to
+    # launch lighttpd with same port at same time.
+    http_server_ports = self.LaunchTestHttpServer(
+        os.path.join(constants.CHROME_DIR),
+        (constants.LIGHTTPD_RANDOM_PORT_FIRST + self.shard_index))
+    if self.ports_to_forward:
+      port_pairs = [(port, port) for port in self.ports_to_forward]
+      # We need to remember which ports the HTTP server is using, since the
+      # forwarder will stomp on them otherwise.
+      port_pairs.append(http_server_ports)
+      self.forwarder = Forwarder(self.adb, self.build_type)
+      self.forwarder.Run(port_pairs, self.tool, '127.0.0.1')
+    self.CopyTestFilesOnce()
+    self.flags.AddFlags(['--enable-test-intents'])
+
+  def TearDown(self):
+    """Cleans up the test harness and saves outstanding data from test run."""
+    if self.forwarder:
+      self.forwarder.Close()
+    self.GenerateCoverageReportIfNeeded()
+    super(TestRunner, self).TearDown()
+
+  def TestSetup(self, test):
+    """Sets up the test harness for running a particular test.
+
+    Args:
+      test: The name of the test that will be run.
+    """
+    self.SetupPerfMonitoringIfNeeded(test)
+    self._SetupIndividualTestTimeoutScale(test)
+    self.tool.SetupEnvironment()
+
+    # Make sure the forwarder is still running.
+    self.RestartHttpServerForwarderIfNecessary()
+
+  def _IsPerfTest(self, test):
+    """Determines whether a test is a performance test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the test is annotated as a performance test.
+    """
+    return _PERF_TEST_ANNOTATION in self.test_apk.GetTestAnnotations(test)
+
+  def SetupPerfMonitoringIfNeeded(self, test):
+    """Sets up performance monitoring if the specified test requires it.
+
+    Args:
+      test: The name of the test to be run.
+    """
+    if not self._IsPerfTest(test):
+      return
+    self.adb.Adb().SendCommand('shell rm ' +
+                               TestRunner._DEVICE_PERF_OUTPUT_SEARCH_PREFIX)
+    self.adb.StartMonitoringLogcat()
+
+  def TestTeardown(self, test, test_result):
+    """Cleans up the test harness after running a particular test.
+
+    Depending on the options of this TestRunner this might handle coverage
+    tracking or performance tracking.  This method will only be called if the
+    test passed.
+
+    Args:
+      test: The name of the test that was just run.
+      test_result: result for this test.
+    """
+
+    self.tool.CleanUpEnvironment()
+
+    # The logic below relies on the test passing.
+    if not test_result or test_result.GetStatusCode():
+      return
+
+    self.TearDownPerfMonitoring(test)
+    self.SaveCoverageData(test)
+
+  def TearDownPerfMonitoring(self, test):
+    """Cleans up performance monitoring if the specified test required it.
+
+    Args:
+      test: The name of the test that was just run.
+    Raises:
+      FatalTestException: if there's anything wrong with the perf data.
+    """
+    if not self._IsPerfTest(test):
+      return
+    raw_test_name = test.split('#')[1]
+
+    # Wait and grab annotation data so we can figure out which traces to parse
+    regex = self.adb.WaitForLogMatch(re.compile('\*\*PERFANNOTATION\(' +
+                                                raw_test_name +
+                                                '\)\:(.*)'), None)
+
+    # If the test is set to run on a specific device type only (IE: only
+    # tablet or phone) and it is being run on the wrong device, the test
+    # just quits and does not do anything.  The java test harness will still
+    # print the appropriate annotation for us, but will add --NORUN-- for
+    # us so we know to ignore the results.
+    # The --NORUN-- tag is managed by MainActivityTestBase.java
+    if regex.group(1) != '--NORUN--':
+
+      # Obtain the relevant perf data.  The data is dumped to a
+      # JSON formatted file.
+      json_string = self.adb.GetFileContents(
+          '/data/data/com.google.android.apps.chrome/files/PerfTestData.txt')
+
+      if json_string:
+        json_string = '\n'.join(json_string)
+      else:
+        raise FatalTestException('Perf file does not exist or is empty')
+
+      if self.save_perf_json:
+        json_local_file = '/tmp/chromium-android-perf-json-' + raw_test_name
+        with open(json_local_file, 'w') as f:
+          f.write(json_string)
+        logging.info('Saving Perf UI JSON from test ' +
+                     test + ' to ' + json_local_file)
+
+      raw_perf_data = regex.group(1).split(';')
+
+      for raw_perf_set in raw_perf_data:
+        if raw_perf_set:
+          perf_set = raw_perf_set.split(',')
+          if len(perf_set) != 3:
+            raise FatalTestException('Unexpected number of tokens in '
+                                     'perf annotation string: ' + raw_perf_set)
+
+          # Process the performance data
+          result = GetAverageRunInfoFromJSONString(json_string, perf_set[0])
+
+          PrintPerfResult(perf_set[1], perf_set[2],
+                          [result['average']], result['units'])
+
+  def _SetupIndividualTestTimeoutScale(self, test):
+    timeout_scale = self._GetIndividualTestTimeoutScale(test)
+    valgrind_tools.SetChromeTimeoutScale(self.adb, timeout_scale)
+
+  def _GetIndividualTestTimeoutScale(self, test):
+    """Returns the timeout scale for the given |test|."""
+    annotations = self.apks[0].GetTestAnnotations(test)
+    timeout_scale = 1
+    if 'TimeoutScale' in annotations:
+      for annotation in annotations:
+        scale_match = re.match('TimeoutScale:([0-9]+)', annotation)
+        if scale_match:
+          timeout_scale = int(scale_match.group(1))
+    if self.wait_for_debugger:
+      timeout_scale *= 100
+    return timeout_scale
+
+  def _GetIndividualTestTimeoutSecs(self, test):
+    """Returns the timeout in seconds for the given |test|."""
+    annotations = self.apks[0].GetTestAnnotations(test)
+    if 'Manual' in annotations:
+      return 600 * 60
+    if 'External' in annotations:
+      return 10 * 60
+    if 'LargeTest' in annotations or _PERF_TEST_ANNOTATION in annotations:
+      return 5 * 60
+    if 'MediumTest' in annotations:
+      return 3 * 60
+    return 1 * 60
+
+  def RunTests(self):
+    """Runs the tests, generating the coverage if needed.
+
+    Returns:
+      A TestResults object.
+    """
+    instrumentation_path = (self.instrumentation_class_path +
+                            '/android.test.InstrumentationTestRunner')
+    instrumentation_args = self._GetInstrumentationArgs()
+    for test in self._GetTestsIter():
+      test_result = None
+      start_date_ms = None
+      try:
+        self.TestSetup(test)
+        start_date_ms = int(time.time()) * 1000
+        args_with_filter = dict(instrumentation_args)
+        args_with_filter['class'] = test
+        # |test_results| is a list that should contain
+        # a single TestResult object.
+        logging.warn(args_with_filter)
+        (test_results, _) = self.adb.Adb().StartInstrumentation(
+            instrumentation_path=instrumentation_path,
+            instrumentation_args=args_with_filter,
+            timeout_time=(self._GetIndividualTestTimeoutSecs(test) *
+                          self._GetIndividualTestTimeoutScale(test) *
+                          self.tool.GetTimeoutScale()))
+        duration_ms = int(time.time()) * 1000 - start_date_ms
+        assert len(test_results) == 1
+        test_result = test_results[0]
+        status_code = test_result.GetStatusCode()
+        if status_code:
+          log = test_result.GetFailureReason()
+          if not log:
+            log = 'No information.'
+          if self.screenshot_failures or log.find('INJECT_EVENTS perm') >= 0:
+            self._TakeScreenshot(test)
+          self.test_results.failed += [SingleTestResult(test, start_date_ms,
+                                                        duration_ms, log)]
+        else:
+          result = [SingleTestResult(test, start_date_ms, duration_ms)]
+          self.test_results.ok += result
+      # Catch exceptions thrown by StartInstrumentation().
+      # See ../../third_party/android/testrunner/adb_interface.py
+      except (errors.WaitForResponseTimedOutError,
+              errors.DeviceUnresponsiveError,
+              errors.InstrumentationError), e:
+        if start_date_ms:
+          duration_ms = int(time.time()) * 1000 - start_date_ms
+        else:
+          start_date_ms = int(time.time()) * 1000
+          duration_ms = 0
+        message = str(e)
+        if not message:
+          message = 'No information.'
+        self.test_results.crashed += [SingleTestResult(test, start_date_ms,
+                                                       duration_ms,
+                                                       message)]
+        test_result = None
+      self.TestTeardown(test, test_result)
+    return self.test_results
+
+
+class TestSharder(BaseTestSharder):
+  """Responsible for sharding the tests on the connected devices."""
+
+  def __init__(self, attached_devices, options, tests, apks):
+    BaseTestSharder.__init__(self, attached_devices, options.build_type)
+    self.options = options
+    self.tests = tests
+    self.apks = apks
+
+  def SetupSharding(self, tests):
+    """Called before starting the shards."""
+    SetTestsContainer(sharded_tests_queue.ShardedTestsQueue(
+        len(self.attached_devices), tests))
+
+  def CreateShardedTestRunner(self, device, index):
+    """Creates a sharded test runner.
+
+    Args:
+      device: Device serial where this shard will run.
+      index: Index of this device in the pool.
+
+    Returns:
+      A TestRunner object.
+    """
+    return TestRunner(self.options, device, None, False, index, self.apks, [])
+
+
+def DispatchJavaTests(options, apks):
+  """Dispatches Java tests onto connected device(s).
+
+  If possible, this method will attempt to shard the tests to
+  all connected devices. Otherwise, dispatch and run tests on one device.
+
+  Args:
+    options: Command line options.
+    apks: list of APKs to use.
+
+  Returns:
+    A TestResults object holding the results of the Java tests.
+
+  Raises:
+    FatalTestException: when there's no attached the devices.
+  """
+  test_apk = apks[0]
+  # The default annotation for tests which do not have any sizes annotation.
+  default_size_annotation = 'SmallTest'
+
+  def _GetTestsMissingAnnotation(test_apk):
+    test_size_annotations = frozenset(['Smoke', 'SmallTest', 'MediumTest',
+                                       'LargeTest', 'EnormousTest', 'FlakyTest',
+                                       'DisabledTest', 'Manual', 'PerfTest'])
+    tests_missing_annotations = []
+    for test_method in test_apk.GetTestMethods():
+      annotations = frozenset(test_apk.GetTestAnnotations(test_method))
+      if (annotations.isdisjoint(test_size_annotations) and
+          not apk_info.ApkInfo.IsPythonDrivenTest(test_method)):
+        tests_missing_annotations.append(test_method)
+    return sorted(tests_missing_annotations)
+
+  if options.annotation:
+    available_tests = test_apk.GetAnnotatedTests(options.annotation)
+    if options.annotation.count(default_size_annotation) > 0:
+      tests_missing_annotations = _GetTestsMissingAnnotation(test_apk)
+      if tests_missing_annotations:
+        logging.warning('The following tests do not contain any annotation. '
+                        'Assuming "%s":\n%s',
+                        default_size_annotation,
+                        '\n'.join(tests_missing_annotations))
+        available_tests += tests_missing_annotations
+  else:
+    available_tests = [m for m in test_apk.GetTestMethods()
+                       if not apk_info.ApkInfo.IsPythonDrivenTest(m)]
+  coverage = os.environ.get('EMMA_INSTRUMENT') == 'true'
+
+  tests = []
+  if options.test_filter:
+    # |available_tests| are in adb instrument format: package.path.class#test.
+    filter_without_hash = options.test_filter.replace('#', '.')
+    tests = [t for t in available_tests
+             if filter_without_hash in t.replace('#', '.')]
+  else:
+    tests = available_tests
+
+  if not tests:
+    logging.warning('No Java tests to run with current args.')
+    return TestResults()
+
+  tests *= options.number_of_runs
+
+  attached_devices = android_commands.GetAttachedDevices()
+  test_results = TestResults()
+
+  if not attached_devices:
+    raise FatalTestException('You have no devices attached or visible!')
+  if options.device:
+    attached_devices = [options.device]
+
+  logging.info('Will run: %s', str(tests))
+
+  if len(attached_devices) > 1 and (coverage or options.wait_for_debugger):
+    logging.warning('Coverage / debugger can not be sharded, '
+                    'using first available device')
+    attached_devices = attached_devices[:1]
+  sharder = TestSharder(attached_devices, options, tests, apks)
+  test_results = sharder.RunShardedTests()
+  return test_results
diff --git a/src/build/android/pylib/run_python_tests.py b/src/build/android/pylib/run_python_tests.py
new file mode 100644
index 0000000..7d39f48
--- /dev/null
+++ b/src/build/android/pylib/run_python_tests.py
@@ -0,0 +1,207 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the Python tests (relies on using the Java test runner)."""
+
+import logging
+import os
+import sys
+import types
+
+import android_commands
+import apk_info
+import constants
+import python_test_base
+from python_test_caller import CallPythonTest
+from python_test_sharder import PythonTestSharder
+import run_java_tests
+from run_java_tests import FatalTestException
+from test_info_collection import TestInfoCollection
+from test_result import TestResults
+
+
+def _GetPythonFiles(root, files):
+  """Returns all files from |files| that end in 'Test.py'.
+
+  Args:
+    root: A directory name with python files.
+    files: A list of file names.
+
+  Returns:
+    A list with all Python driven test file paths.
+  """
+  return [os.path.join(root, f) for f in files if f.endswith('Test.py')]
+
+
+def _InferImportNameFromFile(python_file):
+  """Given a file, infer the import name for that file.
+
+  Example: /usr/foo/bar/baz.py -> baz.
+
+  Args:
+    python_file: path to the Python file, ostensibly to import later.
+
+  Returns:
+    The module name for the given file.
+  """
+  return os.path.splitext(os.path.basename(python_file))[0]
+
+
+def DispatchPythonTests(options):
+  """Dispatches the Python tests. If there are multiple devices, use sharding.
+
+  Args:
+    options: command line options.
+
+  Returns:
+    A list of test results.
+  """
+
+  attached_devices = android_commands.GetAttachedDevices()
+  if not attached_devices:
+    raise FatalTestException('You have no devices attached or visible!')
+  if options.device:
+    attached_devices = [options.device]
+
+  test_collection = TestInfoCollection()
+  all_tests = _GetAllTests(options.python_test_root, options.official_build)
+  test_collection.AddTests(all_tests)
+  test_names = [t.qualified_name for t in all_tests]
+  logging.debug('All available tests: ' + str(test_names))
+
+  available_tests = test_collection.GetAvailableTests(
+      options.annotation, options.test_filter)
+
+  if not available_tests:
+    logging.warning('No Python tests to run with current args.')
+    return TestResults()
+
+  available_tests *= options.number_of_runs
+  test_names = [t.qualified_name for t in available_tests]
+  logging.debug('Final list of tests to run: ' + str(test_names))
+
+  # Copy files to each device before running any tests.
+  for device_id in attached_devices:
+    logging.debug('Pushing files to device %s', device_id)
+    apks = [apk_info.ApkInfo(options.test_apk_path, options.test_apk_jar_path)]
+    test_files_copier = run_java_tests.TestRunner(options, device_id,
+                                                  None, False, 0, apks, [])
+    test_files_copier.CopyTestFilesOnce()
+
+  # Actually run the tests.
+  if len(attached_devices) > 1 and options.wait_for_debugger:
+    logging.warning('Debugger can not be sharded, '
+                    'using first available device')
+    attached_devices = attached_devices[:1]
+  logging.debug('Running Python tests')
+  sharder = PythonTestSharder(attached_devices, available_tests, options)
+  test_results = sharder.RunShardedTests()
+
+  return test_results
+
+
+def _GetTestModules(python_test_root, is_official_build):
+  """Retrieve a sorted list of pythonDrivenTests.
+
+  Walks the location of pythonDrivenTests, imports them, and provides the list
+  of imported modules to the caller.
+
+  Args:
+    python_test_root: the path to walk, looking for pythonDrivenTests
+    is_official_build: whether to run only those tests marked 'official'
+
+  Returns:
+    A list of Python modules which may have zero or more tests.
+  """
+  # By default run all python tests under pythonDrivenTests.
+  python_test_file_list = []
+  for root, _, files in os.walk(python_test_root):
+    if (root.endswith('pythonDrivenTests')
+        or (is_official_build
+            and root.endswith('pythonDrivenTests/official'))):
+      python_test_file_list += _GetPythonFiles(root, files)
+  python_test_file_list.sort()
+
+  test_module_list = [_GetModuleFromFile(test_file)
+                      for test_file in python_test_file_list]
+  return test_module_list
+
+
+def _GetModuleFromFile(python_file):
+  """Gets the module associated with a file by importing it.
+
+  Args:
+    python_file: file to import
+
+  Returns:
+    The module object.
+  """
+  sys.path.append(os.path.dirname(python_file))
+  import_name = _InferImportNameFromFile(python_file)
+  return __import__(import_name)
+
+
+def _GetTestsFromClass(test_class):
+  """Create a list of test objects for each test method on this class.
+
+  Test methods are methods on the class which begin with 'test'.
+
+  Args:
+    test_class: class object which contains zero or more test methods.
+
+  Returns:
+    A list of test objects, each of which is bound to one test.
+  """
+  test_names = [m for m in dir(test_class)
+                if _IsTestMethod(m, test_class)]
+  return map(test_class, test_names)
+
+
+def _GetTestClassesFromModule(test_module):
+  tests = []
+  for name in dir(test_module):
+    attr = getattr(test_module, name)
+    if _IsTestClass(attr):
+      tests.extend(_GetTestsFromClass(attr))
+  return tests
+
+
+def _IsTestClass(test_class):
+  return (type(test_class) is types.TypeType and
+          issubclass(test_class, python_test_base.PythonTestBase) and
+          test_class is not python_test_base.PythonTestBase)
+
+
+def _IsTestMethod(attrname, test_case_class):
+  """Checks whether this is a valid test method.
+
+  Args:
+    attrname: the method name.
+    test_case_class: the test case class.
+
+  Returns:
+    True if test_case_class.'attrname' is callable and it starts with 'test';
+    False otherwise.
+  """
+  attr = getattr(test_case_class, attrname)
+  return callable(attr) and attrname.startswith('test')
+
+
+def _GetAllTests(test_root, is_official_build):
+  """Retrieve a list of Python test modules and their respective methods.
+
+  Args:
+    test_root: path which contains Python-driven test files
+    is_official_build: whether this is an official build
+
+  Returns:
+    List of test case objects for all available test methods.
+  """
+  if not test_root:
+    return []
+  all_tests = []
+  test_module_list = _GetTestModules(test_root, is_official_build)
+  for module in test_module_list:
+    all_tests.extend(_GetTestClassesFromModule(module))
+  return all_tests
diff --git a/src/build/android/pylib/run_tests_helper.py b/src/build/android/pylib/run_tests_helper.py
new file mode 100644
index 0000000..15e5d53
--- /dev/null
+++ b/src/build/android/pylib/run_tests_helper.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native, java and python test runners."""
+
+import logging
+import os
+
+
+def GetExpectations(file_name):
+  """Returns a list of test names in the |file_name| test expectations file."""
+  if not file_name or not os.path.exists(file_name):
+    return []
+  return [x for x in [x.strip() for x in file(file_name).readlines()]
+          if x and x[0] != '#']
+
+
+def SetLogLevel(verbose_count):
+  """Sets log level as |verbose_count|."""
+  log_level = logging.WARNING  # Default.
+  if verbose_count == 1:
+    log_level = logging.INFO
+  elif verbose_count >= 2:
+    log_level = logging.DEBUG
+  logging.getLogger().setLevel(log_level)
diff --git a/src/build/android/pylib/sharded_tests_queue.py b/src/build/android/pylib/sharded_tests_queue.py
new file mode 100644
index 0000000..9e28e2c
--- /dev/null
+++ b/src/build/android/pylib/sharded_tests_queue.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A module that contains a queue for running sharded tests."""
+
+import multiprocessing
+
+
+class ShardedTestsQueue(object):
+  """A queue for managing pending tests across different runners.
+
+  This class should only be used when sharding.
+
+  Attributes:
+    num_devices: an integer; the number of attached Android devices.
+    tests: a list of tests to be run.
+    tests_queue: if sharding, a JoinableQueue object that holds tests from
+        |tests|. Otherwise, a list holding tests.
+    results_queue: a Queue object to hold TestResults objects.
+  """
+  _STOP_SENTINEL = 'STOP'  # sentinel value for iter()
+
+  def __init__(self, num_devices, tests):
+    self.num_devices = num_devices
+    self.tests_queue = multiprocessing.Queue()
+    for test in tests:
+      self.tests_queue.put(test)
+    for _ in xrange(self.num_devices):
+      self.tests_queue.put(ShardedTestsQueue._STOP_SENTINEL)
+
+  def __iter__(self):
+    """Returns an iterator with the test cases."""
+    return iter(self.tests_queue.get, ShardedTestsQueue._STOP_SENTINEL)
diff --git a/src/build/android/pylib/single_test_runner.py b/src/build/android/pylib/single_test_runner.py
new file mode 100644
index 0000000..dee6cd6
--- /dev/null
+++ b/src/build/android/pylib/single_test_runner.py
@@ -0,0 +1,293 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import logging
+import os
+import sys
+
+import android_commands
+from android_commands import errors
+from base_test_runner import BaseTestRunner
+import constants
+import debug_info
+import perf_tests_helper
+import run_tests_helper
+from test_package_apk import TestPackageApk
+from test_package_executable import TestPackageExecutable
+from test_result import BaseTestResult, TestResults
+
+
+class SingleTestRunner(BaseTestRunner):
+  """Single test suite attached to a single device.
+
+  Args:
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    gtest_filter: A gtest_filter flag.
+    test_arguments: Additional arguments to pass to the test binary.
+    timeout: Timeout for each test.
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    shard_index: index number of the shard on which the test suite will run.
+    dump_debug_info: Whether or not to dump debug information.
+    build_type: 'Release' or 'Debug'.
+    in_webkit_checkout: Whether the suite is being run from a WebKit checkout.
+  """
+
+  def __init__(self, device, test_suite, gtest_filter, test_arguments, timeout,
+               cleanup_test_files, tool_name, shard_index, dump_debug_info,
+               fast_and_loose, build_type, in_webkit_checkout):
+    BaseTestRunner.__init__(self, device, tool_name, shard_index, build_type)
+    self._running_on_emulator = self.device.startswith('emulator')
+    self._gtest_filter = gtest_filter
+    self._test_arguments = test_arguments
+    self.test_results = TestResults()
+    if dump_debug_info:
+      self.dump_debug_info = debug_info.GTestDebugInfo(
+          self.adb, device,
+          os.path.basename(test_suite), gtest_filter)
+    else:
+      self.dump_debug_info = None
+    self.fast_and_loose = fast_and_loose
+    self.in_webkit_checkout = in_webkit_checkout
+
+    logging.warning('Test suite: ' + test_suite)
+    if os.path.splitext(test_suite)[1] == '.apk':
+      self.test_package = TestPackageApk(
+          self.adb,
+          device,
+          test_suite,
+          timeout,
+          cleanup_test_files,
+          self.tool,
+          self.dump_debug_info)
+    else:
+      # Put a copy into the android out/target directory, to allow stack trace
+      # generation.
+      symbols_dir = os.path.join(constants.CHROME_DIR, 'out', build_type,
+                                 'lib.target')
+      self.test_package = TestPackageExecutable(
+          self.adb,
+          device,
+          test_suite, timeout,
+          cleanup_test_files,
+          self.tool,
+          self.dump_debug_info,
+          symbols_dir)
+
+  def _TestSuiteRequiresMockTestServer(self):
+    """Returns True if the test suite requires mock test server."""
+    tests_require_net_test_server = ['unit_tests', 'net_unittests',
+                                     'content_unittests']
+    return (self.test_package.test_suite_basename in
+            tests_require_net_test_server)
+
+  def _GetFilterFileName(self):
+    """Returns the filename of gtest filter."""
+    return os.path.join(
+        sys.path[0], 'gtest_filter',
+        self.test_package.test_suite_basename + '_disabled')
+
+  def _GetAdditionalEmulatorFilterName(self):
+    """Returns the filename of additional gtest filter for emulator."""
+    return os.path.join(
+        sys.path[0], 'gtest_filter',
+        self.test_package.test_suite_basename +
+        '_emulator_additional_disabled')
+
+  def GetDisabledTests(self):
+    """Returns a list of disabled tests.
+
+    Returns:
+      A list of disabled tests obtained from gtest_filter/test_suite_disabled.
+    """
+    disabled_tests = run_tests_helper.GetExpectations(self._GetFilterFileName())
+    if self._running_on_emulator:
+      # Append emulator's filter file.
+      disabled_tests.extend(run_tests_helper.GetExpectations(
+          self._GetAdditionalEmulatorFilterName()))
+    return disabled_tests
+
+  def GetDataFilesForTestSuite(self):
+    """Returns a list of data files/dirs needed by the test suite."""
+    # Ideally, we'd just push all test data. However, it has >100MB, and a lot
+    # of the files are not relevant (some are used for browser_tests, others for
+    # features not supported, etc..).
+    if self.test_package.test_suite_basename in ['base_unittests',
+                                                 'sql_unittests',
+                                                 'unit_tests']:
+      test_files = [
+          'base/data/file_util_unittest',
+          'base/data/json/bom_feff.json',
+          'base/prefs/test/data/pref_service',
+          'chrome/test/data/download-test1.lib',
+          'chrome/test/data/extensions/bad_magic.crx',
+          'chrome/test/data/extensions/good.crx',
+          'chrome/test/data/extensions/icon1.png',
+          'chrome/test/data/extensions/icon2.png',
+          'chrome/test/data/extensions/icon3.png',
+          'chrome/test/data/extensions/allow_silent_upgrade/',
+          'chrome/test/data/extensions/app/',
+          'chrome/test/data/extensions/bad/',
+          'chrome/test/data/extensions/effective_host_permissions/',
+          'chrome/test/data/extensions/empty_manifest/',
+          'chrome/test/data/extensions/good/Extensions/',
+          'chrome/test/data/extensions/manifest_tests/',
+          'chrome/test/data/extensions/page_action/',
+          'chrome/test/data/extensions/permissions/',
+          'chrome/test/data/extensions/script_and_capture/',
+          'chrome/test/data/extensions/unpacker/',
+          'chrome/test/data/bookmarks/',
+          'chrome/test/data/components/',
+          'chrome/test/data/extensions/json_schema_test.js',
+          'chrome/test/data/History/',
+          'chrome/test/data/json_schema_validator/',
+          'chrome/test/data/pref_service/',
+          'chrome/test/data/serializer_nested_test.js',
+          'chrome/test/data/serializer_test.js',
+          'chrome/test/data/serializer_test_nowhitespace.js',
+          'chrome/test/data/top_sites/',
+          'chrome/test/data/web_app_info/',
+          'chrome/test/data/web_database',
+          'chrome/test/data/webui/',
+          'chrome/test/data/zip',
+          'chrome/third_party/mock4js/',
+          'content/browser/gpu/software_rendering_list.json',
+          'net/data/cache_tests/insert_load1',
+          'net/data/cache_tests/dirty_entry5',
+          'net/data/ssl/certificates/',
+          'ui/base/test/data/data_pack_unittest',
+      ]
+      if self.test_package.test_suite_basename == 'unit_tests':
+        test_files += ['chrome/test/data/simple_open_search.xml']
+        # The following are spell check data. Now only list the data under
+        # third_party/hunspell_dictionaries which are used by unit tests.
+        old_cwd = os.getcwd()
+        os.chdir(constants.CHROME_DIR)
+        test_files += glob.glob('third_party/hunspell_dictionaries/*.bdic')
+        os.chdir(old_cwd)
+      return test_files
+    elif self.test_package.test_suite_basename == 'media_unittests':
+      return [
+          'media/test/data',
+      ]
+    elif self.test_package.test_suite_basename == 'net_unittests':
+      return [
+          'chrome/test/data/animate1.gif',
+          'chrome/test/data/simple.html',
+          'net/data/cache_tests',
+          'net/data/filter_unittests',
+          'net/data/ftp',
+          'net/data/proxy_resolver_v8_unittest',
+          'net/data/ssl/certificates',
+          'net/data/url_request_unittest/',
+          'net/data/proxy_script_fetcher_unittest'
+          ]
+    elif self.test_package.test_suite_basename == 'ui_tests':
+      return [
+          'chrome/test/data/dromaeo',
+          'chrome/test/data/json2.js',
+          'chrome/test/data/sunspider',
+          'chrome/test/data/v8_benchmark',
+          'chrome/test/perf/v8_benchmark_uitest.js',
+          ]
+    elif self.test_package.test_suite_basename == 'content_unittests':
+      return [
+          'content/test/data/gpu/webgl_conformance_test_expectations.txt',
+          'net/data/ssl/certificates/',
+          'webkit/data/dom_storage/webcore_test_database.localstorage',
+          'third_party/hyphen/hyph_en_US.dic',
+          ]
+    elif self.test_package.test_suite_basename == 'media_unittests':
+      return [
+          'media/test/data',
+          ]
+    return []
+
+  def LaunchHelperToolsForTestSuite(self):
+    """Launches helper tools for the test suite.
+
+    Sometimes one test may need to run some helper tools first in order to
+    successfully complete the test.
+    """
+    if self._TestSuiteRequiresMockTestServer():
+      self.LaunchChromeTestServerSpawner()
+
+  def StripAndCopyFiles(self):
+    """Strips and copies the required data files for the test suite."""
+    self.test_package.StripAndCopyExecutable()
+    self.test_package.PushDataAndPakFiles()
+    self.tool.CopyFiles()
+    test_data = self.GetDataFilesForTestSuite()
+    if test_data and not self.fast_and_loose:
+      # Make sure SD card is ready.
+      self.adb.WaitForSdCardReady(20)
+      for data in test_data:
+        self.CopyTestData([data], self.adb.GetExternalStorage())
+    if self.test_package.test_suite_basename == 'webkit_unit_tests':
+      self.PushWebKitUnitTestsData()
+
+  def PushWebKitUnitTestsData(self):
+    """Pushes the webkit_unit_tests data files to the device.
+
+    The path of this directory is different when the suite is being run as
+    part of a WebKit check-out.
+    """
+    webkit_src = os.path.join(constants.CHROME_DIR, 'third_party', 'WebKit')
+    if self.in_webkit_checkout:
+      webkit_src = os.path.join(constants.CHROME_DIR, '..', '..', '..')
+
+    self.adb.PushIfNeeded(
+        os.path.join(webkit_src, 'Source/WebKit/chromium/tests/data'),
+        os.path.join(
+            self.adb.GetExternalStorage(),
+            'third_party/WebKit/Source/WebKit/chromium/tests/data'))
+
+  def RunTests(self):
+    """Runs tests on a single device.
+
+    Returns:
+      A TestResults object.
+    """
+    try:
+      self.test_package.CreateTestRunnerScript(self._gtest_filter,
+                                               self._test_arguments)
+      self.test_results = self.test_package.RunTestsAndListResults()
+    except errors.DeviceUnresponsiveError as e:
+      # Make sure this device is not attached
+      if android_commands.IsDeviceAttached(self.device):
+        raise e
+
+      # TODO(frankf): We should report these as "skipped" not "failures".
+      # Wrap the results
+      logging.warning(e)
+      failed_tests = []
+      for t in self._gtest_filter.split(':'):
+        failed_tests += [BaseTestResult(t, '')]
+      self.test_results = TestResults.FromRun(
+          failed=failed_tests, device_exception=self.device)
+
+    return self.test_results
+
+  def SetUp(self):
+    """Sets up necessary test enviroment for the test suite."""
+    super(SingleTestRunner, self).SetUp()
+    self.adb.ClearApplicationState(constants.CHROME_PACKAGE)
+    if self.dump_debug_info:
+      self.dump_debug_info.StartRecordingLog(True)
+    self.StripAndCopyFiles()
+    self.LaunchHelperToolsForTestSuite()
+    self.tool.SetupEnvironment()
+
+  def TearDown(self):
+    """Cleans up the test enviroment for the test suite."""
+    self.tool.CleanUpEnvironment()
+    if self.test_package.cleanup_test_files:
+      self.adb.RemovePushedFiles()
+    if self.dump_debug_info:
+      self.dump_debug_info.StopRecordingLog()
+    if self.dump_debug_info:
+      self.dump_debug_info.ArchiveNewCrashFiles()
+    super(SingleTestRunner, self).TearDown()
diff --git a/src/build/android/pylib/surface_stats_collector.py b/src/build/android/pylib/surface_stats_collector.py
new file mode 100644
index 0000000..9c0cb7e
--- /dev/null
+++ b/src/build/android/pylib/surface_stats_collector.py
@@ -0,0 +1,229 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import Queue
+import datetime
+import logging
+import re
+import threading
+
+from pylib import perf_tests_helper
+
+
+# Log marker containing SurfaceTexture timestamps.
+_SURFACE_TEXTURE_TIMESTAMPS_MESSAGE = 'SurfaceTexture update timestamps'
+_SURFACE_TEXTURE_TIMESTAMP_RE = '\d+'
+
+
+class SurfaceStatsCollector(object):
+  """Collects surface stats for a window from the output of SurfaceFlinger.
+
+  Args:
+    adb: the adb coonection to use.
+    window_package: Package name of the window.
+    window_activity: Activity name of the window.
+  """
+  def __init__(self, adb, window_package, window_activity, trace_tag):
+    self._adb = adb
+    self._window_package = window_package
+    self._window_activity = window_activity
+    self._trace_tag = trace_tag
+    self._collector_thread = None
+    self._use_legacy_method = False
+    self._surface_before = None
+    self._get_data_event = None
+    self._data_queue = None
+    self._stop_event = None
+
+  def __enter__(self):
+    assert not self._collector_thread
+
+    if self._ClearSurfaceFlingerLatencyData():
+      self._get_data_event = threading.Event()
+      self._stop_event = threading.Event()
+      self._data_queue = Queue.Queue()
+      self._collector_thread = threading.Thread(target=self._CollectorThread)
+      self._collector_thread.start()
+    else:
+      self._use_legacy_method = True
+      self._surface_before = self._GetSurfaceStatsLegacy()
+
+  def __exit__(self, *args):
+    self._PrintPerfResults()
+    if self._collector_thread:
+      self._stop_event.set()
+      self._collector_thread.join()
+      self._collector_thread = None
+
+  def _PrintPerfResults(self):
+    if self._use_legacy_method:
+      surface_after = self._GetSurfaceStatsLegacy()
+      td = surface_after['timestamp'] - self._surface_before['timestamp']
+      seconds = td.seconds + td.microseconds / 1e6
+      frame_count = (surface_after['page_flip_count'] -
+                     self._surface_before['page_flip_count'])
+    else:
+      assert self._collector_thread
+      (seconds, latencies) = self._GetDataFromThread()
+      if not seconds or not len(latencies):
+        logging.warning('Surface stat data is empty')
+        return
+
+      frame_count = len(latencies)
+      jitter_count = 0
+      last_latency = latencies[0]
+      for latency in latencies[1:]:
+        if latency > last_latency:
+          jitter_count = jitter_count + 1
+        last_latency = latency
+
+      perf_tests_helper.PrintPerfResult(
+          'surface_latencies', 'surface_latencies' + self._trace_tag,
+          latencies, '')
+      perf_tests_helper.PrintPerfResult(
+          'peak_jitter', 'peak_jitter' + self._trace_tag, [max(latencies)], '')
+      perf_tests_helper.PrintPerfResult(
+          'jitter_percent', 'jitter_percent' + self._trace_tag,
+          [jitter_count * 100.0 / frame_count], 'percent')
+
+    print 'SurfaceMonitorTime: %fsecs' % seconds
+    perf_tests_helper.PrintPerfResult(
+        'avg_surface_fps', 'avg_surface_fps' + self._trace_tag,
+        [int(round(frame_count / seconds))], 'fps')
+
+  def _CollectorThread(self):
+    last_timestamp = 0
+    first_timestamp = 0
+    latencies = []
+
+    while not self._stop_event.is_set():
+      self._get_data_event.wait(1)
+      try:
+        (t, last_timestamp) = self._GetSurfaceFlingerLatencyData(last_timestamp,
+                                                                 latencies)
+        if not first_timestamp:
+          first_timestamp = t
+
+        if self._get_data_event.is_set():
+          self._get_data_event.clear()
+          self._data_queue.put(((last_timestamp - first_timestamp) / 1e9,
+                                latencies))
+          latencies = []
+          first_timestamp = 0
+      except Exception as e:
+        # On any error, before aborting, put the exception into _data_queue to
+        # prevent the main thread from waiting at _data_queue.get() infinitely.
+        self._data_queue.put(e)
+        raise
+
+  def _GetDataFromThread(self):
+    self._get_data_event.set()
+    ret = self._data_queue.get()
+    if isinstance(ret, Exception):
+      raise ret
+    return ret
+
+  def _ClearSurfaceFlingerLatencyData(self):
+    """Clears the SurfaceFlinger latency data.
+
+    Returns:
+      True if SurfaceFlinger latency is supported by the device, otherwise
+      False.
+    """
+    # The command returns nothing if it is supported, otherwise returns many
+    # lines of result just like 'dumpsys SurfaceFlinger'.
+    results = self._adb.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency-clear %s/%s' %
+        (self._window_package, self._window_activity))
+    return not len(results)
+
+  def _GetSurfaceFlingerLatencyData(self, previous_timestamp, latencies):
+    """Returns collected SurfaceFlinger latency data.
+
+    Args:
+      previous_timestamp: The timestamp returned from the previous call or 0.
+          Only data after this timestamp will be returned.
+      latencies: A list to receive latency data. The latencies are integers
+          each of which is the number of refresh periods of each frame.
+
+    Returns:
+      A tuple containing:
+      - The timestamp of the beginning of the first frame (ns),
+      - The timestamp of the end of the last frame (ns).
+
+    Raises:
+      Exception if failed to run the SurfaceFlinger command or SurfaceFlinger
+          returned invalid result.
+    """
+    # adb shell dumpsys SurfaceFlinger --latency <window name>
+    # prints some information about the last 128 frames displayed in
+    # that window.
+    # The data returned looks like this:
+    # 16954612
+    # 7657467895508   7657482691352   7657493499756
+    # 7657484466553   7657499645964   7657511077881
+    # 7657500793457   7657516600576   7657527404785
+    # (...)
+    #
+    # The first line is the refresh period (here 16.95 ms), it is followed
+    # by 128 lines w/ 3 timestamps in nanosecond each:
+    # A) when the app started to draw
+    # B) the vsync immediately preceding SF submitting the frame to the h/w
+    # C) timestamp immediately after SF submitted that frame to the h/w
+    #
+    # The difference between the 1st and 3rd timestamp is the frame-latency.
+    # An interesting data is when the frame latency crosses a refresh period
+    # boundary, this can be calculated this way:
+    #
+    # ceil((C - A) / refresh-period)
+    #
+    # (each time the number above changes, we have a "jank").
+    # If this happens a lot during an animation, the animation appears
+    # janky, even if it runs at 60 fps in average.
+    results = self._adb.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency %s/%s' %
+        (self._window_package, self._window_activity), log_result=True)
+    assert len(results)
+
+    refresh_period = int(results[0])
+    last_timestamp = previous_timestamp
+    first_timestamp = 0
+    for line in results[1:]:
+      fields = line.split()
+      if len(fields) == 3:
+        timestamp = long(fields[0])
+        last_timestamp = long(fields[2])
+        if (timestamp > previous_timestamp):
+          if not first_timestamp:
+            first_timestamp = timestamp
+          # This is integral equivalent of ceil((C-A) / refresh-period)
+          latency_ns = int(last_timestamp - timestamp)
+          latencies.append((latency_ns + refresh_period - 1) / refresh_period)
+    return (first_timestamp, last_timestamp)
+
+  def _GetSurfaceStatsLegacy(self):
+    """Legacy method (before JellyBean), returns the current Surface index
+       and timestamp.
+
+    Calculate FPS by measuring the difference of Surface index returned by
+    SurfaceFlinger in a period of time.
+
+    Returns:
+      Dict of {page_flip_count (or 0 if there was an error), timestamp}.
+    """
+    results = self._adb.RunShellCommand('service call SurfaceFlinger 1013')
+    assert len(results) == 1
+    match = re.search('^Result: Parcel\((\w+)', results[0])
+    cur_surface = 0
+    if match:
+      try:
+        cur_surface = int(match.group(1), 16)
+      except Exception:
+        logging.error('Failed to parse current surface from ' + match.group(1))
+    else:
+      logging.warning('Failed to call SurfaceFlinger surface ' + results[0])
+    return {
+        'page_flip_count': cur_surface,
+        'timestamp': datetime.datetime.now(),
+    }
diff --git a/src/build/android/pylib/test_info_collection.py b/src/build/android/pylib/test_info_collection.py
new file mode 100644
index 0000000..fc4e806
--- /dev/null
+++ b/src/build/android/pylib/test_info_collection.py
@@ -0,0 +1,137 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing information about the python-driven tests."""
+
+import logging
+import os
+
+import tests_annotations
+
+
+class TestInfo(object):
+  """An object containing and representing a test function, plus metadata."""
+
+  def __init__(self, runnable, set_up=None, tear_down=None):
+    # The actual test function/method.
+    self.runnable = runnable
+    # Qualified name of test function/method (e.g. FooModule.testBar).
+    self.qualified_name = self._GetQualifiedName(runnable)
+    # setUp and teardown functions, if any.
+    self.set_up = set_up
+    self.tear_down = tear_down
+
+  def _GetQualifiedName(self, runnable):
+    """Helper method to infer a runnable's name and module name.
+
+    Many filters and lists presuppose a format of module_name.testMethodName.
+    To make this easy on everyone, we use some reflection magic to infer this
+    name automatically.
+
+    Args:
+      runnable: the test method to get the qualified name for
+
+    Returns:
+      qualified name for this runnable, incl. module name and method name.
+    """
+    runnable_name = runnable.__name__
+    # See also tests_annotations.
+    module_name = os.path.splitext(
+        os.path.basename(runnable.__globals__['__file__']))[0]
+    return '.'.join([module_name, runnable_name])
+
+  def __str__(self):
+    return self.qualified_name
+
+
+class TestInfoCollection(object):
+  """A collection of TestInfo objects which facilitates filtering."""
+
+  def __init__(self):
+    """Initialize a new TestInfoCollection."""
+    # Master list of all valid tests.
+    self.all_tests = []
+
+  def AddTests(self, test_infos):
+    """Adds a set of tests to this collection.
+
+    The user may then retrieve them, optionally according to criteria, via
+    GetAvailableTests().
+
+    Args:
+      test_infos: a list of TestInfos representing test functions/methods.
+    """
+    self.all_tests = test_infos
+
+  def GetAvailableTests(self, annotation, name_filter):
+    """Get a collection of TestInfos which match the supplied criteria.
+
+    Args:
+      annotation: annotation which tests must match, if any
+      name_filter: name filter which tests must match, if any
+
+    Returns:
+      List of available tests.
+    """
+    available_tests = self.all_tests
+
+    # Filter out tests which match neither the requested annotation, nor the
+    # requested name filter, if any.
+    available_tests = [t for t in available_tests if
+                       self._AnnotationIncludesTest(t, annotation)]
+    if annotation and len(annotation) == 1 and annotation[0] == 'SmallTest':
+      tests_without_annotation = [
+          t for t in self.all_tests if
+          not tests_annotations.AnnotatedFunctions.GetTestAnnotations(
+              t.qualified_name)]
+      test_names = [t.qualified_name for t in tests_without_annotation]
+      logging.warning('The following tests do not contain any annotation. '
+                      'Assuming "SmallTest":\n%s',
+                      '\n'.join(test_names))
+      available_tests += tests_without_annotation
+    available_tests = [t for t in available_tests if
+                       self._NameFilterIncludesTest(t, name_filter)]
+
+    return available_tests
+
+  def _AnnotationIncludesTest(self, test_info, annotation_filter_list):
+    """Checks whether a given test represented by test_info matches annotation.
+
+    Args:
+      test_info: TestInfo object representing the test
+      annotation_filter_list: list of annotation filters to match (e.g. Smoke)
+
+    Returns:
+      True if no annotation was supplied or the test matches; false otherwise.
+    """
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if tests_annotations.AnnotatedFunctions.IsAnnotated(
+              key + ':' + value, test_info.qualified_name):
+            return True
+      elif tests_annotations.AnnotatedFunctions.IsAnnotated(
+          annotation_filter, test_info.qualified_name):
+        return True
+    return False
+
+  def _NameFilterIncludesTest(self, test_info, name_filter):
+    """Checks whether a name filter matches a given test_info's method name.
+
+    This is a case-sensitive, substring comparison: 'Foo' will match methods
+    Foo.testBar and Bar.testFoo. 'foo' would not match either.
+
+    Args:
+      test_info: TestInfo object representing the test
+      name_filter: substring to check for in the qualified name of the test
+
+    Returns:
+      True if no name filter supplied or it matches; False otherwise.
+    """
+    return not name_filter or name_filter in test_info.qualified_name
diff --git a/src/build/android/pylib/test_options_parser.py b/src/build/android/pylib/test_options_parser.py
new file mode 100644
index 0000000..d88e997
--- /dev/null
+++ b/src/build/android/pylib/test_options_parser.py
@@ -0,0 +1,184 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Parses options for the instrumentation tests."""
+
+import constants
+import optparse
+import os
+import sys
+
+_SDK_OUT_DIR = os.path.join(constants.CHROME_DIR, 'out')
+
+
+def AddBuildTypeOption(option_parser):
+  """Decorates OptionParser with build type option."""
+  default_build_type = 'Debug'
+  if 'BUILDTYPE' in os.environ:
+    default_build_type = os.environ['BUILDTYPE']
+  option_parser.add_option('--debug', action='store_const', const='Debug',
+                           dest='build_type', default=default_build_type,
+                           help='If set, run test suites under out/Debug. '
+                                'Default is env var BUILDTYPE or Debug')
+  option_parser.add_option('--release', action='store_const', const='Release',
+                           dest='build_type',
+                           help='If set, run test suites under out/Release. '
+                                'Default is env var BUILDTYPE or Debug.')
+
+def AddInstallAPKOption(option_parser):
+  """Decorates OptionParser with apk option used to install the APK."""
+  AddBuildTypeOption(option_parser)
+  option_parser.add_option('--apk',
+                           help=('The name of the apk containing the '
+                                 ' application (with the .apk extension).'))
+  option_parser.add_option('--apk_package',
+                           help=('The package name used by the apk containing '
+                                 'the application.'))
+
+
+def ValidateInstallAPKOption(option_parser, options):
+  if not options.apk:
+    option_parser.error('--apk is mandatory.')
+  if not os.path.exists(options.apk):
+    options.apk = os.path.join(os.environ['CHROME_SRC'],
+                               'out', options.build_type,
+                               'apks', options.apk)
+
+
+def AddTestRunnerOptions(option_parser, default_timeout=60):
+  """Decorates OptionParser with options applicable to all tests."""
+
+  option_parser.add_option('-t', dest='timeout',
+                           help='Timeout to wait for each test',
+                           type='int',
+                           default=default_timeout)
+  option_parser.add_option('-c', dest='cleanup_test_files',
+                           help='Cleanup test files on the device after run',
+                           action='store_true')
+  option_parser.add_option('-v',
+                           '--verbose',
+                           dest='verbose_count',
+                           default=0,
+                           action='count',
+                           help='Verbose level (multiple times for more)')
+  profilers = ['devicestatsmonitor', 'chrometrace', 'dumpheap', 'smaps',
+               'traceview']
+  option_parser.add_option('--profiler', dest='profilers', action='append',
+                           choices=profilers,
+                           help='Profiling tool to run during test. '
+                           'Pass multiple times to run multiple profilers. '
+                           'Available profilers: %s' % profilers)
+  option_parser.add_option('--tool',
+                           dest='tool',
+                           help='Run the test under a tool '
+                           '(use --tool help to list them)')
+  AddBuildTypeOption(option_parser)
+
+
+def AddInstrumentationOptions(option_parser):
+  """Decorates OptionParser with instrumentation tests options."""
+
+  AddTestRunnerOptions(option_parser)
+  option_parser.add_option('-w', '--wait_debugger', dest='wait_for_debugger',
+                           action='store_true', help='Wait for debugger.')
+  option_parser.add_option('-I', dest='install_apk', help='Install APK.',
+                           action='store_true')
+  option_parser.add_option('-f', '--test_filter',
+                           help='Test filter (if not fully qualified, '
+                           'will run all matches).')
+  option_parser.add_option('-A', '--annotation', dest='annotation_str',
+                           help=('Run only tests with any of the given '
+                                 'annotations. '
+                                 'An annotation can be either a key or a '
+                                 'key-values pair. '
+                                 'A test that has no annotation is '
+                                 'considered "SmallTest".'))
+  option_parser.add_option('-j', '--java_only', action='store_true',
+                           help='Run only the Java tests.')
+  option_parser.add_option('-p', '--python_only', action='store_true',
+                           help='Run only the Python tests.')
+  option_parser.add_option('-n', '--run_count', type='int',
+                           dest='number_of_runs', default=1,
+                           help=('How many times to run each test, regardless '
+                                 'of the result. (Default is 1)'))
+  option_parser.add_option('--test-apk', dest='test_apk',
+                           help=('The name of the apk containing the tests '
+                                 '(without the .apk extension). For SDK '
+                                 'builds, the apk name without the debug '
+                                 'suffix(for example, ContentShellTest).'))
+  option_parser.add_option('--screenshot', dest='screenshot_failures',
+                           action='store_true',
+                           help='Capture screenshots of test failures')
+  option_parser.add_option('--save-perf-json', action='store_true',
+                           help='Saves the JSON file for each UI Perf test.')
+  option_parser.add_option('--shard_retries', type=int, default=1,
+                           help=('Number of times to retry each failure when '
+                                 'sharding.'))
+  option_parser.add_option('--official-build', help='Run official build tests.')
+  option_parser.add_option('--device',
+                           help='Serial number of device we should use.')
+  option_parser.add_option('--python_test_root',
+                           help='Root of the python-driven tests.')
+  option_parser.add_option('--keep_test_server_ports',
+                           action='store_true',
+                           help='Indicates the test server ports must be '
+                                'kept. When this is run via a sharder '
+                                'the test server ports should be kept and '
+                                'should not be reset.')
+  option_parser.add_option('--flakiness-dashboard-server',
+                           dest='flakiness_dashboard_server',
+                           help=('Address of the server that is hosting the '
+                                 'Chrome for Android flakiness dashboard.'))
+  option_parser.add_option('--buildbot-step-failure',
+                           action='store_true',
+                           help=('If present, will set the buildbot status '
+                                 'as STEP_FAILURE, otherwise as STEP_WARNINGS '
+                                 'when test(s) fail.'))
+  option_parser.add_option('--disable_assertions', action='store_true',
+                           help='Run with java assertions disabled.')
+  option_parser.add_option('--test_data', action='append', default=[],
+                           help=('Each instance defines a directory of test '
+                                 'data that should be copied to the target(s) '
+                                 'before running the tests. The argument '
+                                 'should be of the form <target>:<source>, '
+                                 '<target> is relative to the device data'
+                                 'directory, and <source> is relative to the '
+                                 'chromium build directory.'))
+
+def ValidateInstrumentationOptions(option_parser, options, args):
+  """Validate options/arguments and populate options with defaults."""
+  if len(args) > 1:
+    option_parser.print_help(sys.stderr)
+    option_parser.error('Unknown arguments: %s' % args[1:])
+  if options.java_only and options.python_only:
+    option_parser.error('Options java_only (-j) and python_only (-p) '
+                        'are mutually exclusive.')
+  if not options.test_apk:
+    option_parser.error('--test-apk must be specified.')
+
+  options.run_java_tests = True
+  options.run_python_tests = True
+  if options.java_only:
+    options.run_python_tests = False
+  elif options.python_only:
+    options.run_java_tests = False
+
+  if os.path.exists(options.test_apk):
+    # The APK is fully qualified, assume the JAR lives along side.
+    options.test_apk_path = options.test_apk
+    options.test_apk_jar_path = os.path.splitext(options.test_apk_path) + '.jar'
+  else:
+    options.test_apk_path = os.path.join(_SDK_OUT_DIR,
+                                         options.build_type,
+                                         constants.SDK_BUILD_APKS_DIR,
+                                         '%s.apk' % options.test_apk)
+    options.test_apk_jar_path = os.path.join(
+        _SDK_OUT_DIR, options.build_type, constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%s.jar' %  options.test_apk)
+  if options.annotation_str:
+    options.annotation = options.annotation_str.split()
+  elif options.test_filter:
+    options.annotation = []
+  else:
+    options.annotation = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest']
diff --git a/src/build/android/pylib/test_package.py b/src/build/android/pylib/test_package.py
new file mode 100644
index 0000000..356268d
--- /dev/null
+++ b/src/build/android/pylib/test_package.py
@@ -0,0 +1,184 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import re
+import os
+
+import constants
+from perf_tests_helper import PrintPerfResult
+from pylib import pexpect
+from test_result import BaseTestResult, TestResults
+
+from android_commands import errors
+
+
+class TestPackage(object):
+  """A helper base class for both APK and stand-alone executables.
+
+  Args:
+    adb: ADB interface the tests are using.
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    timeout: Timeout for each test.
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    dump_debug_info: A debug_info object.
+  """
+
+  def __init__(self, adb, device, test_suite, timeout,
+               cleanup_test_files, tool, dump_debug_info):
+    self.adb = adb
+    self.device = device
+    self.test_suite_full = test_suite
+    self.test_suite = os.path.splitext(test_suite)[0]
+    self.test_suite_basename = self._GetTestSuiteBaseName()
+    self.test_suite_dirname = os.path.dirname(
+        self.test_suite.split(self.test_suite_basename)[0])
+    self.cleanup_test_files = cleanup_test_files
+    self.tool = tool
+    if timeout == 0:
+      timeout = 60
+    # On a VM (e.g. chromium buildbots), this timeout is way too small.
+    if os.environ.get('BUILDBOT_SLAVENAME'):
+      timeout = timeout * 2
+    self.timeout = timeout * self.tool.GetTimeoutScale()
+    self.dump_debug_info = dump_debug_info
+
+  def GetDisabledPrefixes(self):
+    return ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+  def _ParseGTestListTests(self, all_tests):
+    """Parses and filters the raw test lists.
+
+    Args:
+    all_tests: The raw test listing with the following format:
+
+      IPCChannelTest.
+        SendMessageInChannelConnected
+      IPCSyncChannelTest.
+        Simple
+        DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+    Returns:
+      A list of non-disabled tests. For the above raw listing:
+
+      [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple]
+    """
+    ret = []
+    current = ''
+    disabled_prefixes = self.GetDisabledPrefixes()
+    for test in all_tests:
+      if not test:
+        continue
+      if test[0] != ' ' and not test.endswith('.'):
+        # Ignore any lines with unexpected format.
+        continue
+      if test[0] != ' ' and test.endswith('.'):
+        current = test
+        continue
+      if 'YOU HAVE' in test:
+        break
+      test_name = test[2:]
+      if not any([test_name.startswith(x) for x in disabled_prefixes]):
+        ret += [current + test_name]
+    return ret
+
+  def PushDataAndPakFiles(self):
+    external_storage = self.adb.GetExternalStorage()
+    if (self.test_suite_basename == 'ui_unittests' or
+        self.test_suite_basename == 'unit_tests'):
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/chrome.pak',
+          external_storage + '/paks/chrome.pak')
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/locales/en-US.pak',
+          external_storage + '/paks/en-US.pak')
+    if self.test_suite_basename == 'unit_tests':
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/resources.pak',
+          external_storage + '/paks/resources.pak')
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/chrome_100_percent.pak',
+          external_storage + '/paks/chrome_100_percent.pak')
+      self.adb.PushIfNeeded(self.test_suite_dirname + '/test_data',
+                            external_storage + '/test_data')
+    if self.test_suite_basename == 'content_unittests':
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/content_resources.pak',
+          external_storage + '/paks/content_resources.pak')
+    if self.test_suite_basename == 'breakpad_unittests':
+      self.adb.PushIfNeeded(
+          self.test_suite_dirname + '/linux_dumper_unittest_helper',
+          constants.TEST_EXECUTABLE_DIR + '/linux_dumper_unittest_helper')
+
+  def _WatchTestOutput(self, p):
+    """Watches the test output.
+    Args:
+      p: the process generating output as created by pexpect.spawn.
+    """
+    ok_tests = []
+    failed_tests = []
+    crashed_tests = []
+    timed_out = False
+    overall_fail = False
+
+    # Test case statuses.
+    re_run = re.compile('\[ RUN      \] ?(.*)\r\n')
+    re_fail = re.compile('\[  FAILED  \] ?(.*)\r\n')
+    re_ok = re.compile('\[       OK \] ?(.*?) .*\r\n')
+
+    # Test run statuses.
+    re_passed = re.compile('\[  PASSED  \] ?(.*)\r\n')
+    re_runner_fail = re.compile('\[ RUNNER_FAILED \] ?(.*)\r\n')
+    # Signal handlers are installed before starting tests
+    # to output the CRASHED marker when a crash happens.
+    re_crash = re.compile('\[ CRASHED      \](.*)\r\n')
+
+    try:
+      while True:
+        found = p.expect([re_run, re_passed, re_runner_fail],
+                         timeout=self.timeout)
+        if found == 1:  # re_passed
+          break
+        elif found == 2:  # re_runner_fail
+          overall_fail = True
+          break
+        else:  # re_run
+          if self.dump_debug_info:
+            self.dump_debug_info.TakeScreenshot('_Test_Start_Run_')
+
+          full_test_name = p.match.group(1).replace('\r', '')
+          found = p.expect([re_ok, re_fail, re_crash], timeout=self.timeout)
+          if found == 0:  # re_ok
+            if full_test_name == p.match.group(1).replace('\r', ''):
+              ok_tests += [BaseTestResult(full_test_name, p.before)]
+          elif found == 2:  # re_crash
+            crashed_tests += [BaseTestResult(full_test_name, p.before)]
+            overall_fail = True
+            break
+          else:  # re_fail
+            failed_tests += [BaseTestResult(full_test_name, p.before)]
+    except pexpect.EOF:
+      logging.error('Test terminated - EOF')
+      raise errors.DeviceUnresponsiveError('Device may be offline')
+    except pexpect.TIMEOUT:
+      logging.error('Test terminated after %d second timeout.',
+                    self.timeout)
+      timed_out = True
+    finally:
+      p.close()
+
+    ret_code = self._GetGTestReturnCode()
+    if ret_code:
+      logging.critical(
+          'gtest exit code: %d\npexpect.before: %s\npexpect.after: %s',
+          ret_code, p.before, p.after)
+      overall_fail = True
+
+    # Create TestResults and return
+    return TestResults.FromRun(ok=ok_tests, failed=failed_tests,
+                               crashed=crashed_tests, timed_out=timed_out,
+                               overall_fail=overall_fail)
diff --git a/src/build/android/pylib/test_package_apk.py b/src/build/android/pylib/test_package_apk.py
new file mode 100644
index 0000000..e43ef75
--- /dev/null
+++ b/src/build/android/pylib/test_package_apk.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import os
+import shlex
+import sys
+import tempfile
+import time
+
+import android_commands
+import constants
+from android_commands import errors
+from test_package import TestPackage
+from pylib import pexpect
+
+class TestPackageApk(TestPackage):
+  """A helper class for running APK-based native tests.
+
+  Args:
+    adb: ADB interface the tests are using.
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    timeout: Timeout for each test.
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    dump_debug_info: A debug_info object.
+  """
+
+  def __init__(self, adb, device, test_suite, timeout,
+               cleanup_test_files, tool, dump_debug_info):
+    TestPackage.__init__(self, adb, device, test_suite, timeout,
+                         cleanup_test_files, tool, dump_debug_info)
+
+  def _CreateTestRunnerScript(self, options):
+    command_line_file = tempfile.NamedTemporaryFile()
+    # GTest expects argv[0] to be the executable path.
+    command_line_file.write(self.test_suite_basename + ' ' + options)
+    command_line_file.flush()
+    self.adb.PushIfNeeded(command_line_file.name,
+                          constants.TEST_EXECUTABLE_DIR +
+                          '/chrome-native-tests-command-line')
+
+  def _GetGTestReturnCode(self):
+    return None
+
+  def _GetFifo(self):
+    # The test.fifo path is determined by:
+    # testing/android/java/src/org/chromium/native_test/
+    #     ChromeNativeTestActivity.java and
+    # testing/android/native_test_launcher.cc
+    return '/data/data/org.chromium.native_test/files/test.fifo'
+
+  def _ClearFifo(self):
+    self.adb.RunShellCommand('rm -f ' + self._GetFifo())
+
+  def _WatchFifo(self, timeout, logfile=None):
+    for i in range(10):
+      if self.adb.FileExistsOnDevice(self._GetFifo()):
+        print 'Fifo created...'
+        break
+      time.sleep(i)
+    else:
+      raise errors.DeviceUnresponsiveError(
+          'Unable to find fifo on device %s ' % self._GetFifo())
+    args = shlex.split(self.adb.Adb()._target_arg)
+    args += ['shell', 'cat', self._GetFifo()]
+    return pexpect.spawn('adb', args, timeout=timeout, logfile=logfile)
+
+  def GetAllTests(self):
+    """Returns a list of all tests available in the test suite."""
+    self._CreateTestRunnerScript('--gtest_list_tests')
+    try:
+      self.tool.SetupEnvironment()
+      # Clear and start monitoring logcat.
+      self._ClearFifo()
+      self.adb.RunShellCommand(
+          'am start -n '
+          'org.chromium.native_test/'
+          'org.chromium.native_test.ChromeNativeTestActivity')
+      # Wait for native test to complete.
+      p = self._WatchFifo(timeout=30 * self.tool.GetTimeoutScale())
+      p.expect("<<ScopedMainEntryLogger")
+      p.close()
+    finally:
+      self.tool.CleanUpEnvironment()
+    # We need to strip the trailing newline.
+    content = [line.rstrip() for line in p.before.splitlines()]
+    ret = self._ParseGTestListTests(content)
+    return ret
+
+  def CreateTestRunnerScript(self, gtest_filter, test_arguments):
+    self._CreateTestRunnerScript('--gtest_filter=%s %s' % (gtest_filter,
+                                                           test_arguments))
+
+  def RunTestsAndListResults(self):
+    try:
+      self.tool.SetupEnvironment()
+      self._ClearFifo()
+      self.adb.RunShellCommand(
+       'am start -n '
+        'org.chromium.native_test/'
+        'org.chromium.native_test.ChromeNativeTestActivity')
+    finally:
+      self.tool.CleanUpEnvironment()
+    logfile = android_commands.NewLineNormalizer(sys.stdout)
+    return self._WatchTestOutput(self._WatchFifo(timeout=10, logfile=logfile))
+
+  def StripAndCopyExecutable(self):
+    self.tool.CopyFiles()
+    # Always uninstall the previous one (by activity name); we don't
+    # know what was embedded in it.
+    self.adb.ManagedInstall(self.test_suite_full, False,
+                            package_name='org.chromium.native_test')
+
+  def _GetTestSuiteBaseName(self):
+    """Returns the  base name of the test suite."""
+    # APK test suite names end with '-debug.apk'
+    return os.path.basename(self.test_suite).rsplit('-debug', 1)[0]
diff --git a/src/build/android/pylib/test_package_executable.py b/src/build/android/pylib/test_package_executable.py
new file mode 100644
index 0000000..cafe99c
--- /dev/null
+++ b/src/build/android/pylib/test_package_executable.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import os
+import shutil
+import sys
+import tempfile
+
+import cmd_helper
+import constants
+from test_package import TestPackage
+from pylib import pexpect
+
+
+class TestPackageExecutable(TestPackage):
+  """A helper class for running stand-alone executables."""
+
+  _TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
+
+  def __init__(self, adb, device, test_suite, timeout,
+               cleanup_test_files, tool, dump_debug_info,
+               symbols_dir=None):
+    """
+    Args:
+      adb: ADB interface the tests are using.
+      device: Device to run the tests.
+      test_suite: A specific test suite to run, empty to run all.
+      timeout: Timeout for each test.
+      cleanup_test_files: Whether or not to cleanup test files on device.
+      tool: Name of the Valgrind tool.
+      dump_debug_info: A debug_info object.
+      symbols_dir: Directory to put the stripped binaries.
+    """
+    TestPackage.__init__(self, adb, device, test_suite, timeout,
+                         cleanup_test_files, tool, dump_debug_info)
+    self.symbols_dir = symbols_dir
+
+  def _GetGTestReturnCode(self):
+    ret = None
+    ret_code = 1  # Assume failure if we can't find it
+    ret_code_file = tempfile.NamedTemporaryFile()
+    try:
+      if not self.adb.Adb().Pull(
+          constants.TEST_EXECUTABLE_DIR + '/' +
+          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE,
+          ret_code_file.name):
+        logging.critical('Unable to pull gtest ret val file %s',
+                         ret_code_file.name)
+        raise ValueError
+      ret_code = file(ret_code_file.name).read()
+      ret = int(ret_code)
+    except ValueError:
+      logging.critical('Error reading gtest ret val file %s [%s]',
+                       ret_code_file.name, ret_code)
+      ret = 1
+    return ret
+
+  def _AddNativeCoverageExports(self):
+    # export GCOV_PREFIX set the path for native coverage results
+    # export GCOV_PREFIX_STRIP indicates how many initial directory
+    #                          names to strip off the hardwired absolute paths.
+    #                          This value is calculated in buildbot.sh and
+    #                          depends on where the tree is built.
+    # Ex: /usr/local/google/code/chrome will become
+    #     /code/chrome if GCOV_PREFIX_STRIP=3
+    try:
+      depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+    except KeyError:
+      logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+                   'No native coverage.')
+      return ''
+    export_string = ('export GCOV_PREFIX="%s/gcov"\n' %
+                     self.adb.GetExternalStorage())
+    export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+    return export_string
+
+  def GetAllTests(self):
+    """Returns a list of all tests available in the test suite."""
+    all_tests = self.adb.RunShellCommand(
+        '%s %s/%s --gtest_list_tests' %
+        (self.tool.GetTestWrapper(),
+         constants.TEST_EXECUTABLE_DIR,
+         self.test_suite_basename))
+    return self._ParseGTestListTests(all_tests)
+
+  def CreateTestRunnerScript(self, gtest_filter, test_arguments):
+    """Creates a test runner script and pushes to the device.
+
+    Args:
+      gtest_filter: A gtest_filter flag.
+      test_arguments: Additional arguments to pass to the test binary.
+    """
+    tool_wrapper = self.tool.GetTestWrapper()
+    sh_script_file = tempfile.NamedTemporaryFile()
+    # We need to capture the exit status from the script since adb shell won't
+    # propagate to us.
+    sh_script_file.write('cd %s\n'
+                         '%s'
+                         '%s %s/%s --gtest_filter=%s %s\n'
+                         'echo $? > %s' %
+                         (constants.TEST_EXECUTABLE_DIR,
+                          self._AddNativeCoverageExports(),
+                          tool_wrapper, constants.TEST_EXECUTABLE_DIR,
+                          self.test_suite_basename,
+                          gtest_filter, test_arguments,
+                          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+    sh_script_file.flush()
+    cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+    self.adb.PushIfNeeded(
+            sh_script_file.name,
+            constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh')
+    logging.info('Conents of the test runner script: ')
+    for line in open(sh_script_file.name).readlines():
+      logging.info('  ' + line.rstrip())
+
+  def RunTestsAndListResults(self):
+    """Runs all the tests and checks for failures.
+
+    Returns:
+      A TestResults object.
+    """
+    args = ['adb', '-s', self.device, 'shell', 'sh',
+            constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh']
+    logging.info(args)
+    p = pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+    return self._WatchTestOutput(p)
+
+  def StripAndCopyExecutable(self):
+    """Strips and copies the executable to the device."""
+    if self.tool.NeedsDebugInfo():
+      target_name = self.test_suite
+    else:
+      target_name = self.test_suite + '_' + self.device + '_stripped'
+      should_strip = True
+      if os.path.isfile(target_name):
+        logging.info('Found target file %s' % target_name)
+        target_mtime = os.stat(target_name).st_mtime
+        source_mtime = os.stat(self.test_suite).st_mtime
+        if target_mtime > source_mtime:
+          logging.info('Target mtime (%d) is newer than source (%d), assuming '
+                       'no change.' % (target_mtime, source_mtime))
+          should_strip = False
+
+      if should_strip:
+        logging.info('Did not find up-to-date stripped binary. Generating a '
+                     'new one (%s).' % target_name)
+        # Whenever we generate a stripped binary, copy to the symbols dir. If we
+        # aren't stripping a new binary, assume it's there.
+        if self.symbols_dir:
+          if not os.path.exists(self.symbols_dir):
+            os.makedirs(self.symbols_dir)
+          shutil.copy(self.test_suite, self.symbols_dir)
+        strip = os.environ['STRIP']
+        cmd_helper.RunCmd([strip, self.test_suite, '-o', target_name])
+    test_binary = constants.TEST_EXECUTABLE_DIR + '/' + self.test_suite_basename
+    self.adb.PushIfNeeded(target_name, test_binary)
+
+  def _GetTestSuiteBaseName(self):
+    """Returns the  base name of the test suite."""
+    return os.path.basename(self.test_suite)
diff --git a/src/build/android/pylib/test_result.py b/src/build/android/pylib/test_result.py
new file mode 100644
index 0000000..2eb1c66
--- /dev/null
+++ b/src/build/android/pylib/test_result.py
@@ -0,0 +1,209 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import json
+import logging
+import os
+import time
+import traceback
+
+import buildbot_report
+import constants
+
+
+class BaseTestResult(object):
+  """A single result from a unit test."""
+
+  def __init__(self, name, log):
+    self.name = name
+    self.log = log.replace('\r', '')
+
+
+class SingleTestResult(BaseTestResult):
+  """Result information for a single test.
+
+  Args:
+    full_name: Full name of the test.
+    start_date: Date in milliseconds when the test began running.
+    dur: Duration of the test run in milliseconds.
+    log: An optional string listing any errors.
+  """
+
+  def __init__(self, full_name, start_date, dur, log=''):
+    BaseTestResult.__init__(self, full_name, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self.test_name = name_pieces[1]
+      self.class_name = name_pieces[0]
+    else:
+      self.class_name = full_name
+      self.test_name = full_name
+    self.start_date = start_date
+    self.dur = dur
+
+
+class TestResults(object):
+  """Results of a test run."""
+
+  def __init__(self):
+    self.ok = []
+    self.failed = []
+    self.crashed = []
+    self.unknown = []
+    self.timed_out = False
+    self.overall_fail = False
+    self.device_exception = None
+
+  @staticmethod
+  def FromRun(ok=None, failed=None, crashed=None, timed_out=False,
+              overall_fail=False, device_exception=None):
+    ret = TestResults()
+    ret.ok = ok or []
+    ret.failed = failed or []
+    ret.crashed = crashed or []
+    ret.timed_out = timed_out
+    ret.overall_fail = overall_fail
+    ret.device_exception = device_exception
+    return ret
+
+  @staticmethod
+  def FromTestResults(results):
+    """Combines a list of results in a single TestResults object."""
+    ret = TestResults()
+    for t in results:
+      ret.ok += t.ok
+      ret.failed += t.failed
+      ret.crashed += t.crashed
+      ret.unknown += t.unknown
+      if t.timed_out:
+        ret.timed_out = True
+      if t.overall_fail:
+        ret.overall_fail = True
+    return ret
+
+  @staticmethod
+  def FromPythonException(test_name, start_date_ms, exc_info):
+    """Constructs a TestResults with exception information for the given test.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      start_date_ms: the starting time for the test.
+      exc_info: exception info, ostensibly from sys.exc_info().
+
+    Returns:
+      A TestResults object with a SingleTestResult in the failed list.
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+    duration_ms = (int(time.time()) * 1000) - start_date_ms
+
+    exc_result = SingleTestResult(
+                     full_name='PythonWrapper#' + test_name,
+                     start_date=start_date_ms,
+                     dur=duration_ms,
+                     log=(str(exc_type) + ' ' + log_msg))
+
+    results = TestResults()
+    results.failed.append(exc_result)
+    return results
+
+  @staticmethod
+  def DeviceExceptions(results):
+    return set(filter(lambda t: t.device_exception, results))
+
+  def _Log(self, sorted_list):
+    for t in sorted_list:
+      logging.critical(t.name)
+      if t.log:
+        logging.critical(t.log)
+
+  def GetAllBroken(self):
+    """Returns the all broken tests including failed, crashed, unknown."""
+    return self.failed + self.crashed + self.unknown
+
+  def LogFull(self, test_group, test_suite, build_type, tests_to_run):
+    """Output broken test logs, summarize in a log file and the test output."""
+    # Output all broken tests or 'passed' if none broken.
+    logging.critical('*' * 80)
+    logging.critical('Final result')
+    if self.failed:
+      logging.critical('Failed:')
+      self._Log(sorted(self.failed))
+    if self.crashed:
+      logging.critical('Crashed:')
+      self._Log(sorted(self.crashed))
+    if self.unknown:
+      logging.critical('Unknown:')
+      self._Log(sorted(self.unknown))
+    if not self.GetAllBroken():
+      logging.critical('Passed')
+    logging.critical('*' * 80)
+
+    # Summarize in a log file, if tests are running on bots.
+    if test_group and test_suite and os.environ.get('BUILDBOT_BUILDERNAME'):
+      log_file_path = os.path.join(constants.CHROME_DIR, 'out',
+                                   build_type, 'test_logs')
+      if not os.path.exists(log_file_path):
+        os.mkdir(log_file_path)
+      full_file_name = os.path.join(log_file_path, test_group)
+      if not os.path.exists(full_file_name):
+        with open(full_file_name, 'w') as log_file:
+          print >> log_file, '\n%s results for %s build %s:' % (
+              test_group, os.environ.get('BUILDBOT_BUILDERNAME'),
+              os.environ.get('BUILDBOT_BUILDNUMBER'))
+      log_contents = ['  %s result : %d tests ran' % (test_suite,
+                                                      len(self.ok) +
+                                                      len(self.failed) +
+                                                      len(self.crashed) +
+                                                      len(self.unknown))]
+      content_pairs = [('passed', len(self.ok)), ('failed', len(self.failed)),
+                       ('crashed', len(self.crashed))]
+      for (result, count) in content_pairs:
+        if count:
+          log_contents.append(', %d tests %s' % (count, result))
+      with open(full_file_name, 'a') as log_file:
+        print >> log_file, ''.join(log_contents)
+      content = {'test_group': test_group,
+                 'ok': [t.name for t in self.ok],
+                 'failed': [t.name for t in self.failed],
+                 'crashed': [t.name for t in self.failed],
+                 'unknown': [t.name for t in self.unknown],}
+      with open(os.path.join(log_file_path, 'results.json'), 'a') as json_file:
+        print >> json_file, json.dumps(content)
+
+    # Summarize in the test output.
+    summary = ['Summary:\n']
+    if tests_to_run:
+      summary += ['TESTS_TO_RUN=%d\n' % (len(tests_to_run))]
+    num_tests_ran = (len(self.ok) + len(self.failed) +
+                     len(self.crashed) + len(self.unknown))
+    tests_passed = [t.name for t in self.ok]
+    tests_failed = [t.name for t in self.failed]
+    tests_crashed = [t.name for t in self.crashed]
+    tests_unknown = [t.name for t in self.unknown]
+    summary += ['RAN=%d\n' % (num_tests_ran),
+                'PASSED=%d\n' % len(tests_passed),
+                'FAILED=%d %s\n' % (len(tests_failed), tests_failed),
+                'CRASHED=%d %s\n' % (len(tests_crashed), tests_crashed),
+                'UNKNOWN=%d %s\n' % (len(tests_unknown), tests_unknown)]
+    if tests_to_run and num_tests_ran != len(tests_to_run):
+      # Add the list of tests we failed to run.
+      tests_failed_to_run = list(set(tests_to_run) - set(tests_passed) -
+                            set(tests_failed) - set(tests_crashed) -
+                            set(tests_unknown))
+      summary += ['FAILED_TO_RUN=%d %s\n' % (len(tests_failed_to_run),
+                                             tests_failed_to_run)]
+    summary_string = ''.join(summary)
+    logging.critical(summary_string)
+    return summary_string
+
+  def PrintAnnotation(self):
+    """Print buildbot annotations for test results."""
+    if self.failed or self.crashed or self.overall_fail or self.timed_out:
+      buildbot_report.PrintError()
+    else:
+      print 'Step success!'  # No annotation needed
diff --git a/src/build/android/pylib/tests_annotations.py b/src/build/android/pylib/tests_annotations.py
new file mode 100644
index 0000000..f2a1834
--- /dev/null
+++ b/src/build/android/pylib/tests_annotations.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Annotations for python-driven tests."""
+
+import os
+
+
+class AnnotatedFunctions(object):
+  """A container for annotated methods."""
+  _ANNOTATED = {}
+
+  @staticmethod
+  def _AddFunction(annotation, function):
+    """Adds an annotated to function to our container.
+
+    Args:
+      annotation: the annotation string.
+      function: the function.
+    Returns:
+      The function passed in.
+    """
+    module_name = os.path.splitext(os.path.basename(
+        function.__globals__['__file__']))[0]
+    qualified_function_name = '.'.join([module_name, function.func_name])
+    function_list = AnnotatedFunctions._ANNOTATED.get(annotation, [])
+    function_list.append(qualified_function_name)
+    AnnotatedFunctions._ANNOTATED[annotation] = function_list
+    return function
+
+  @staticmethod
+  def IsAnnotated(annotation, qualified_function_name):
+    """True if function name (module.function) contains the annotation.
+
+    Args:
+      annotation: the annotation string.
+      qualified_function_name: the qualified function name.
+    Returns:
+      True if module.function contains the annotation.
+    """
+    return qualified_function_name in AnnotatedFunctions._ANNOTATED.get(
+        annotation, [])
+
+  @staticmethod
+  def GetTestAnnotations(qualified_function_name):
+    """Returns a list containing all annotations for the given function.
+
+    Args:
+      qualified_function_name: the qualified function name.
+    Returns:
+      List of all annotations for this function.
+    """
+    return [annotation
+            for annotation, tests in AnnotatedFunctions._ANNOTATED.iteritems()
+            if qualified_function_name in tests]
+
+
+# The following functions are annotations used for the python driven tests.
+def Smoke(function):
+  return AnnotatedFunctions._AddFunction('Smoke', function)
+
+
+def SmallTest(function):
+  return AnnotatedFunctions._AddFunction('SmallTest', function)
+
+
+def MediumTest(function):
+  return AnnotatedFunctions._AddFunction('MediumTest', function)
+
+
+def LargeTest(function):
+  return AnnotatedFunctions._AddFunction('LargeTest', function)
+
+
+def FlakyTest(function):
+  return AnnotatedFunctions._AddFunction('FlakyTest', function)
+
+
+def DisabledTest(function):
+  return AnnotatedFunctions._AddFunction('DisabledTest', function)
+
+
+def Feature(feature_list):
+  def _AddFeatures(function):
+    for feature in feature_list:
+      AnnotatedFunctions._AddFunction('Feature' + feature, function)
+    return AnnotatedFunctions._AddFunction('Feature', function)
+  return _AddFeatures
diff --git a/src/build/android/pylib/thermal_throttle.py b/src/build/android/pylib/thermal_throttle.py
new file mode 100644
index 0000000..ebd61d6
--- /dev/null
+++ b/src/build/android/pylib/thermal_throttle.py
@@ -0,0 +1,70 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+class ThermalThrottle(object):
+  """Class to detect and track thermal throttling
+
+  Usage:
+    Wait for IsThrottled() to be False before running test
+    After running test call HasBeenThrottled() to find out if the
+    test run was affected by thermal throttling.
+
+    Currently assumes an OMap device.
+  """
+  def __init__(self, adb):
+    self._adb = adb
+    self._throttled = False
+
+
+  def HasBeenThrottled(self):
+    """ True if there has been any throttling since the last call to
+        HasBeenThrottled or IsThrottled
+    """
+    return self._ReadLog()
+
+  def IsThrottled(self):
+    """True if currently throttled"""
+    self._ReadLog()
+    return self._throttled
+
+  def _ReadLog(self):
+    has_been_throttled = False
+    serial_number = self._adb.Adb().GetSerialNumber()
+    log = self._adb.RunShellCommand('dmesg -c')
+    degree_symbol = unichr(0x00B0)
+    for line in log:
+      if 'omap_thermal_throttle' in line:
+        if not self._throttled:
+          logging.warning('>>> Device %s Thermally Throttled', serial_number)
+        self._throttled = True
+        has_been_throttled = True
+      if 'omap_thermal_unthrottle' in line:
+        if self._throttled:
+          logging.warning('>>> Device %s Thermally Unthrottled', serial_number)
+        self._throttled = False
+        has_been_throttled = True
+      if 'throttle_delayed_work_fn' in line:
+        temp = float([s for s in line.split() if s.isdigit()][0]) / 1000.0
+        logging.info(u' Device %s Thermally Thottled at %3.1f%sC',
+                     serial_number, temp, degree_symbol)
+
+    # Print temperature of CPU SoC.
+    omap_temp_file = '/sys/devices/platform/omap/omap_temp_sensor.0/temperature'
+    if self._adb.FileExistsOnDevice(omap_temp_file):
+      tempdata = self._adb.GetFileContents(omap_temp_file)
+      temp = float(tempdata[0]) / 1000.0
+      logging.info(u'Current OMAP Temperature of %s = %3.1f%sC',
+                   serial_number, temp, degree_symbol)
+
+    # Print temperature of battery, to give a system temperature
+    dumpsys_log = self._adb.RunShellCommand('dumpsys battery')
+    for line in dumpsys_log:
+      if 'temperature' in line:
+        btemp = float([s for s in line.split() if s.isdigit()][0]) / 10.0
+        logging.info(u'Current battery temperature of %s = %3.1f%sC',
+                     serial_number, btemp, degree_symbol)
+
+    return has_been_throttled
diff --git a/src/build/android/pylib/valgrind_tools.py b/src/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..fdc6ebf
--- /dev/null
+++ b/src/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,256 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles().
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles().
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+
+import os.path
+import sys
+
+from constants import CHROME_DIR
+
+
+def SetChromeTimeoutScale(adb, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    adb.RunShellCommand('rm %s' % path)
+  else:
+    adb.SetFileContents(path, '%f' % scale)
+
+
+class BaseTool(object):
+  """A tool that does nothing."""
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ''
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper name for the utilities.
+
+    Returns:
+       A string that is to be prepended to the command line of utility
+    processes (forwarder, etc.).
+    """
+    return ''
+
+  def CopyFiles(self):
+    """Copies tool-specific files to the device, create directories, etc."""
+    pass
+
+  def SetupEnvironment(self):
+    """Sets up the system environment for a test.
+
+    This is a good place to set system properties.
+    """
+    pass
+
+  def CleanUpEnvironment(self):
+    """Cleans up environment."""
+    pass
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 1.0
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return False
+
+
+class AddressSanitizerTool(BaseTool):
+  """AddressSanitizer tool."""
+
+  TMP_DIR = '/data/local/tmp/asan'
+  WRAPPER_NAME = 'asanwrapper.sh'
+
+  def __init__(self, adb):
+    self._adb = adb
+    self._wrap_properties = ['wrap.com.google.android.apps.ch',
+                             'wrap.org.chromium.native_test']
+
+  def CopyFiles(self):
+    """Copies ASan tools to the device."""
+    files = ['tools/android/asan/asanwrapper.sh',
+             'third_party/llvm-build/Release+Asserts/lib/clang/3.2/lib/linux/' +
+                 'libclang_rt.asan-arm-android.so']
+    for f in files:
+      self._adb.PushIfNeeded(os.path.join(CHROME_DIR, f),
+                             os.path.join(AddressSanitizerTool.TMP_DIR,
+                                          os.path.basename(f)))
+
+  def GetTestWrapper(self):
+    return os.path.join(AddressSanitizerTool.TMP_DIR,
+                        AddressSanitizerTool.WRAPPER_NAME)
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    self._adb.EnableAdbRoot()
+    for prop in self._wrap_properties:
+      self._adb.RunShellCommand('setprop %s "logwrapper %s"' % (
+          prop, self.GetTestWrapper()))
+    SetChromeTimeoutScale(self._adb, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    for prop in self._wrap_properties:
+      self._adb.RunShellCommand('setprop %s ""' % (prop,))
+    SetChromeTimeoutScale(self._adb, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+class ValgrindTool(BaseTool):
+  """Base abstract class for Valgrind tools."""
+
+  VG_DIR = '/data/local/tmp/valgrind'
+  VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+  def __init__(self, adb):
+    self._adb = adb
+    # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+    self._wrap_properties = ['wrap.com.google.android.apps.ch',
+                             'wrap.org.chromium.native_test']
+
+  def CopyFiles(self):
+    """Copies Valgrind tools to the device."""
+    self._adb.RunShellCommand('rm -r %s; mkdir %s' %
+                              (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+    self._adb.RunShellCommand('rm -r %s; mkdir %s' %
+                              (ValgrindTool.VGLOGS_DIR,
+                               ValgrindTool.VGLOGS_DIR))
+    files = self.GetFilesForTool()
+    for f in files:
+      self._adb.PushIfNeeded(os.path.join(CHROME_DIR, f),
+                             os.path.join(ValgrindTool.VG_DIR,
+                                          os.path.basename(f)))
+
+  def SetupEnvironment(self):
+    """Sets up device environment."""
+    self._adb.RunShellCommand('chmod 777 /data/local/tmp')
+    for prop in self._wrap_properties:
+      self._adb.RunShellCommand('setprop %s "logwrapper %s"' % (
+          prop, self.GetTestWrapper()))
+    SetChromeTimeoutScale(self._adb, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    """Cleans up device environment."""
+    for prop in self._wrap_properties:
+      self._adb.RunShellCommand('setprop %s ""' % (prop,))
+    SetChromeTimeoutScale(self._adb, None)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    raise NotImplementedError()
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return True
+
+
+class MemcheckTool(ValgrindTool):
+  """Memcheck tool."""
+
+  def __init__(self, adb):
+    super(MemcheckTool, self).__init__(adb)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+            'tools/valgrind/memcheck/suppressions.txt',
+            'tools/valgrind/memcheck/suppressions_android.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+class TSanTool(ValgrindTool):
+  """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+  def __init__(self, adb):
+    super(TSanTool, self).__init__(adb)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+            'tools/valgrind/tsan/suppressions.txt',
+            'tools/valgrind/tsan/suppressions_android.txt',
+            'tools/valgrind/tsan/ignores.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30.0
+
+
+TOOL_REGISTRY = {
+    'memcheck': lambda x: MemcheckTool(x),
+    'memcheck-renderer': lambda x: MemcheckTool(x),
+    'tsan': lambda x: TSanTool(x),
+    'tsan-renderer': lambda x: TSanTool(x),
+    'asan': lambda x: AddressSanitizerTool(x),
+}
+
+
+def CreateTool(tool_name, adb):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    adb: ADB interface the tool will use.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(adb)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
diff --git a/src/build/android/run_instrumentation_tests.py b/src/build/android/run_instrumentation_tests.py
new file mode 100755
index 0000000..23e613c
--- /dev/null
+++ b/src/build/android/run_instrumentation_tests.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs both the Python and Java tests."""
+
+import optparse
+import os
+import sys
+import time
+
+from pylib import apk_info
+from pylib import buildbot_report
+from pylib import constants
+from pylib import flakiness_dashboard_results_uploader
+from pylib import ports
+from pylib import run_java_tests
+from pylib import run_python_tests
+from pylib import run_tests_helper
+from pylib import test_options_parser
+from pylib.test_result import TestResults
+
+
+def SummarizeResults(java_results, python_results, annotation, build_type):
+  """Summarize the results from the various test types.
+
+  Args:
+    java_results: a TestResults object with java test case results.
+    python_results: a TestResults object with python test case results.
+    annotation: the annotation used for these results.
+    build_type: 'Release' or 'Debug'.
+
+  Returns:
+    A tuple (all_results, summary_string, num_failing)
+  """
+  all_results = TestResults.FromTestResults([java_results, python_results])
+  summary_string = all_results.LogFull('Instrumentation', annotation,
+                                       build_type, [])
+  num_failing = (len(all_results.failed) + len(all_results.crashed) +
+                 len(all_results.unknown))
+  return all_results, summary_string, num_failing
+
+
+def DispatchInstrumentationTests(options):
+  """Dispatches the Java and Python instrumentation tests, sharding if possible.
+
+  Uses the logging module to print the combined final results and
+  summary of the Java and Python tests. If the java_only option is set, only
+  the Java tests run. If the python_only option is set, only the python tests
+  run. If neither are set, run both Java and Python tests.
+
+  Args:
+    options: command-line options for running the Java and Python tests.
+
+  Returns:
+    An integer representing the number of failing tests.
+  """
+  if not options.keep_test_server_ports:
+    # Reset the test port allocation. It's important to do it before starting
+    # to dispatch any tests.
+    if not ports.ResetTestServerPortAllocation():
+      raise Exception('Failed to reset test server port.')
+
+  start_date = int(time.time() * 1000)
+  java_results = TestResults()
+  python_results = TestResults()
+
+  if options.run_java_tests:
+    java_results = run_java_tests.DispatchJavaTests(
+        options,
+        [apk_info.ApkInfo(options.test_apk_path, options.test_apk_jar_path)])
+  if options.run_python_tests:
+    python_results = run_python_tests.DispatchPythonTests(options)
+
+  all_results, summary_string, num_failing = SummarizeResults(
+      java_results, python_results, options.annotation, options.build_type)
+
+  if options.flakiness_dashboard_server:
+    flakiness_dashboard_results_uploader.Upload(
+        options.flakiness_dashboard_server, 'Chromium_Android_Instrumentation',
+        TestResults.FromTestResults([java_results, python_results]))
+
+  return num_failing
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  test_options_parser.AddInstrumentationOptions(option_parser)
+  options, args = option_parser.parse_args(argv)
+  test_options_parser.ValidateInstrumentationOptions(option_parser, options,
+                                                     args)
+
+  run_tests_helper.SetLogLevel(options.verbose_count)
+  buildbot_report.PrintNamedStep(
+      'Instrumentation tests: %s - %s' % (', '.join(options.annotation),
+                                          options.test_apk))
+  ret = 1
+  try:
+    ret = DispatchInstrumentationTests(options)
+  finally:
+    buildbot_report.PrintStepResultIfNeeded(options, ret)
+  return ret
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/run_monkey_test.py b/src/build/android/run_monkey_test.py
new file mode 100755
index 0000000..433b2bd
--- /dev/null
+++ b/src/build/android/run_monkey_test.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the Monkey tests on one or more devices."""
+import logging
+import optparse
+import random
+import sys
+import time
+
+from pylib import android_commands
+from pylib import python_test_base
+from pylib import python_test_sharder
+from pylib import test_options_parser
+from pylib import test_result
+
+
+class MonkeyTest(python_test_base.PythonTestBase):
+  def testMonkey(self):
+    start_ms = int(time.time()) * 1000
+
+    # Launch and wait for Chrome to launch.
+    self.adb.StartActivity(self.options.package_name,
+                           self.options.activity_name,
+                           wait_for_completion=True,
+                           action='android.intent.action.MAIN',
+                           force_stop=True)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Verify Chrome has the same PID before and after the test.
+    before_pids = self.adb.ExtractPid(self.options.package_name)
+
+    # Run the test.
+    output = ''
+    duration_ms = 0
+    if before_pids:
+      output = '\n'.join(self._LaunchMonkeyTest())
+      duration_ms = int(time.time()) * 1000 - start_ms
+      after_pids = self.adb.ExtractPid(self.options.package_name)
+
+    crashed = (not before_pids or not after_pids
+               or after_pids[0] != before_pids[0])
+    result = test_result.SingleTestResult(self.qualified_name, start_ms,
+                                          duration_ms, log=output)
+    results = test_result.TestResults()
+
+    if 'Monkey finished' in output and not crashed:
+      results.ok = [result]
+    else:
+      results.crashed = [result]
+
+    return results
+
+  def _LaunchMonkeyTest(self):
+    """Runs monkey test for a given package.
+
+    Looks at the following parameters in the options object provided
+    in class initializer:
+      package_name: Allowed package.
+      category: A list of allowed categories.
+      throttle: Delay between events (ms).
+      seed: Seed value for pseduo-random generator. Same seed value
+        generates the same sequence of events. Seed is randomized by
+        default.
+      event_count: Number of events to generate.
+      verbosity: Verbosity level [0-3].
+      extra_args: A string of other args to pass to the command verbatim.
+    """
+
+    category = self.options.category or []
+    seed = self.options.seed or random.randint(1, 100)
+    throttle = self.options.throttle or 100
+    event_count = self.options.event_count or 10000
+    verbosity = self.options.verbosity or 1
+    extra_args = self.options.extra_args or ''
+
+    timeout_ms = event_count * throttle * 1.5
+
+    cmd = ['monkey',
+           '-p %s' % self.options.package_name,
+           ' '.join(['-c %s' % c for c in category]),
+           '--throttle %d' % throttle,
+           '-s %d' % seed,
+           '-v ' * verbosity,
+           '--monitor-native-crashes',
+           '--kill-process-after-error',
+           extra_args,
+           '%d' % event_count]
+    return self.adb.RunShellCommand(' '.join(cmd), timeout_time=timeout_ms)
+
+
+def DispatchPythonTests(options):
+  """Dispatches the Monkey tests, sharding it if there multiple devices."""
+  logger = logging.getLogger()
+  logger.setLevel(logging.DEBUG)
+
+  available_tests = [MonkeyTest('testMonkey')]
+  attached_devices = android_commands.GetAttachedDevices()
+  if not attached_devices:
+    raise Exception('You have no devices attached or visible!')
+
+  # Actually run the tests.
+  logging.debug('Running monkey tests.')
+  available_tests *= len(attached_devices)
+  options.ensure_value('shard_retries', 1)
+  sharder = python_test_sharder.PythonTestSharder(
+      attached_devices, available_tests, options)
+  result = sharder.RunShardedTests()
+  result.LogFull('Monkey', 'Monkey', options.build_type, available_tests)
+  result.PrintAnnotation()
+
+
+def main():
+  desc = 'Run the Monkey tests on 1 or more devices.'
+  parser = optparse.OptionParser(description=desc)
+  test_options_parser.AddBuildTypeOption(parser)
+  parser.add_option('--package-name', help='Allowed package.')
+  parser.add_option('--activity-name',
+                    default='com.google.android.apps.chrome.Main',
+                    help='Name of the activity to start [default: %default].')
+  parser.add_option('--category',
+                    help='A list of allowed categories [default: ""].')
+  parser.add_option('--throttle', default=100, type='int',
+                    help='Delay between events (ms) [default: %default]. ')
+  parser.add_option('--seed', type='int',
+                    help=('Seed value for pseduo-random generator. Same seed '
+                          'value generates the same sequence of events. Seed '
+                          'is randomized by default.'))
+  parser.add_option('--event-count', default=10000, type='int',
+                    help='Number of events to generate [default: %default].')
+  parser.add_option('--verbosity', default=1, type='int',
+                    help='Verbosity level [0-3] [default: %default].')
+  parser.add_option('--extra-args', default='',
+                    help=('String of other args to pass to the command verbatim'
+                          ' [default: "%default"].'))
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.print_help(sys.stderr)
+    parser.error('Unknown arguments: %s' % args)
+
+  if not options.package_name:
+    parser.print_help(sys.stderr)
+    parser.error('Missing package name')
+
+  if options.category:
+    options.category = options.category.split(',')
+
+  DispatchPythonTests(options)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/src/build/android/run_tests.py b/src/build/android/run_tests.py
new file mode 100755
index 0000000..f7459dd
--- /dev/null
+++ b/src/build/android/run_tests.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all the native unit tests.
+
+1. Copy over test binary to /data/local on device.
+2. Resources: chrome/unit_tests requires resources (chrome.pak and en-US.pak)
+   to be deployed to the device. We use the device's $EXTERNAL_STORAGE as the
+   base dir (which maps to Context.getExternalFilesDir()).
+3. Environment:
+3.1. chrome/unit_tests requires (via chrome_paths.cc) a directory named:
+     $EXTERNAL_STORAGE + /chrome/test/data
+4. Run the binary in the device and stream the log to the host.
+4.1. Optionally, filter specific tests.
+4.2. If we're running a single test suite and we have multiple devices
+     connected, we'll shard the tests.
+5. Clean up the device.
+
+Suppressions:
+
+Individual tests in a test binary can be suppressed by listing it in
+the gtest_filter directory in a file of the same name as the test binary,
+one test per line. Here is an example:
+
+  $ cat gtest_filter/base_unittests_disabled
+  DataPackTest.Load
+  ReadOnlyFileUtilTest.ContentsEqual
+
+This file is generated by the tests running on devices. If running on emulator,
+additonal filter file which lists the tests only failed in emulator will be
+loaded. We don't care about the rare testcases which succeeded on emuatlor, but
+failed on device.
+"""
+
+import copy
+import fnmatch
+import logging
+import optparse
+import os
+import signal
+import subprocess
+import sys
+import time
+
+import emulator
+from pylib import android_commands
+from pylib import buildbot_report
+from pylib import cmd_helper
+from pylib import debug_info
+from pylib import ports
+from pylib import run_tests_helper
+from pylib import test_options_parser
+from pylib.base_test_sharder import BaseTestSharder
+from pylib.single_test_runner import SingleTestRunner
+
+
+_TEST_SUITES = ['base_unittests',
+                'cc_unittests',
+                'content_unittests',
+                'gpu_unittests',
+                'ipc_tests',
+                'media_unittests',
+                'net_unittests',
+                'sql_unittests',
+                'sync_unit_tests',
+                'ui_unittests',
+                'unit_tests',
+                'webkit_compositor_bindings_unittests',
+               ]
+
+
+def FullyQualifiedTestSuites(exe, option_test_suite, build_type):
+  """Get a list of absolute paths to test suite targets.
+
+  Args:
+    exe: if True, use the executable-based test runner.
+    option_test_suite: the test_suite specified as an option.
+    build_type: 'Release' or 'Debug'.
+  """
+  test_suite_dir = os.path.join(cmd_helper.OutDirectory.get(), build_type)
+  if option_test_suite:
+    all_test_suites = [option_test_suite]
+  else:
+    all_test_suites = _TEST_SUITES
+
+  if exe:
+    qualified_test_suites = [os.path.join(test_suite_dir, t)
+                             for t in all_test_suites]
+  else:
+    # out/(Debug|Release)/$SUITE_apk/$SUITE-debug.apk
+    qualified_test_suites = [os.path.join(test_suite_dir,
+                                          t + '_apk',
+                                          t + '-debug.apk')
+                             for t in all_test_suites]
+  for t, q in zip(all_test_suites, qualified_test_suites):
+    if not os.path.exists(q):
+      raise Exception('Test suite %s not found in %s.\n'
+                      'Supported test suites:\n %s\n'
+                      'Ensure it has been built.\n' %
+                      (t, q, _TEST_SUITES))
+  return qualified_test_suites
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description):
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+
+  def Stop(self):
+    """Stop profiling and dump a log."""
+    if self._starttime:
+      stoptime = time.time()
+      logging.info('%fsec to perform %s',
+                   stoptime - self._starttime, self._description)
+      self._starttime = None
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def _IsLinux(self):
+    """Return True if on Linux; else False."""
+    return sys.platform.startswith('linux')
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not self._IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
+
+
+class TestSharder(BaseTestSharder):
+  """Responsible for sharding the tests on the connected devices."""
+
+  def __init__(self, attached_devices, test_suite, gtest_filter,
+               test_arguments, timeout, cleanup_test_files, tool,
+               log_dump_name, fast_and_loose, build_type, in_webkit_checkout):
+    BaseTestSharder.__init__(self, attached_devices, build_type)
+    self.test_suite = test_suite
+    self.test_suite_basename = os.path.basename(test_suite)
+    self.gtest_filter = gtest_filter or ''
+    self.test_arguments = test_arguments
+    self.timeout = timeout
+    self.cleanup_test_files = cleanup_test_files
+    self.tool = tool
+    self.log_dump_name = log_dump_name
+    self.fast_and_loose = fast_and_loose
+    self.in_webkit_checkout = in_webkit_checkout
+    self.all_tests = []
+    if not self.gtest_filter:
+      # No filter has been specified, let's add all tests then.
+      self.all_tests, self.attached_devices = self._GetAllEnabledTests()
+    self.tests = self.all_tests
+
+  def _GetAllEnabledTests(self):
+    """Get all enabled tests and available devices.
+
+    Obtains a list of enabled tests from the test package on the device,
+    then filters it again using the diabled list on the host.
+
+    Returns:
+      Tuple of (all enabled tests, available devices).
+
+    Raises Exception if all devices failed.
+    """
+    # TODO(frankf): This method is doing too much in a non-systematic way.
+    # If the intention is to drop flaky devices, why not go through all devices
+    # instead of breaking on the first succesfull run?
+    available_devices = list(self.attached_devices)
+    while available_devices:
+      try:
+        return (self._GetTestsFromDevice(available_devices[-1]),
+                available_devices)
+      except Exception as e:
+        logging.warning('Failed obtaining tests from %s %s',
+                        available_devices[-1], e)
+        available_devices.pop()
+
+    raise Exception('No device available to get the list of tests.')
+
+  def _GetTestsFromDevice(self, device):
+    logging.info('Obtaining tests from %s', device)
+    test_runner = SingleTestRunner(
+        device,
+        self.test_suite,
+        self.gtest_filter,
+        self.test_arguments,
+        self.timeout,
+        self.cleanup_test_files,
+        self.tool,
+        0,
+        not not self.log_dump_name,
+        self.fast_and_loose,
+        self.build_type,
+        self.in_webkit_checkout)
+    # The executable/apk needs to be copied before we can call GetAllTests.
+    test_runner.test_package.StripAndCopyExecutable()
+    all_tests = test_runner.test_package.GetAllTests()
+    disabled_list = test_runner.GetDisabledTests()
+    # Only includes tests that do not have any match in the disabled list.
+    all_tests = filter(lambda t:
+                       not any([fnmatch.fnmatch(t, disabled_pattern)
+                                for disabled_pattern in disabled_list]),
+                       all_tests)
+    return all_tests
+
+  def CreateShardedTestRunner(self, device, index):
+    """Creates a suite-specific test runner.
+
+    Args:
+      device: Device serial where this shard will run.
+      index: Index of this device in the pool.
+
+    Returns:
+      A SingleTestRunner object.
+    """
+    device_num = len(self.attached_devices)
+    shard_size = (len(self.tests) + device_num - 1) / device_num
+    shard_test_list = self.tests[index * shard_size : (index + 1) * shard_size]
+    test_filter = ':'.join(shard_test_list) + self.gtest_filter
+    return SingleTestRunner(
+        device,
+        self.test_suite,
+        test_filter,
+        self.test_arguments,
+        self.timeout,
+        self.cleanup_test_files, self.tool, index,
+        not not self.log_dump_name,
+        self.fast_and_loose,
+        self.build_type,
+        self.in_webkit_checkout)
+
+  def OnTestsCompleted(self, test_runners, test_results):
+    """Notifies that we completed the tests."""
+    test_results.LogFull('Unit test', os.path.basename(self.test_suite),
+                         self.build_type, self.all_tests)
+    test_results.PrintAnnotation()
+    if self.log_dump_name:
+      # Zip all debug info outputs into a file named by log_dump_name.
+      debug_info.GTestDebugInfo.ZipAndCleanResults(
+          os.path.join(
+              cmd_helper.OutDirectory.get(), self.build_type,
+              'debug_info_dumps'),
+          self.log_dump_name)
+
+
+def _RunATestSuite(options):
+  """Run a single test suite.
+
+  Helper for Dispatch() to allow stop/restart of the emulator across
+  test bundles.  If using the emulator, we start it on entry and stop
+  it on exit.
+
+  Args:
+    options: options for running the tests.
+
+  Returns:
+    0 if successful, number of failing tests otherwise.
+  """
+  step_name = os.path.basename(options.test_suite).replace('-debug.apk', '')
+  buildbot_report.PrintNamedStep(step_name)
+  attached_devices = []
+  buildbot_emulators = []
+
+  if options.use_emulator:
+    for n in range(options.emulator_count):
+      t = TimeProfile('Emulator launch %d' % n)
+      avd_name = None
+      if n > 0:
+        # Creates a temporary AVD for the extra emulators.
+        avd_name = 'run_tests_avd_%d' % n
+      buildbot_emulator = emulator.Emulator(avd_name, options.fast_and_loose)
+      buildbot_emulator.Launch(kill_all_emulators=n == 0)
+      t.Stop()
+      buildbot_emulators.append(buildbot_emulator)
+      attached_devices.append(buildbot_emulator.device)
+    # Wait for all emulators to boot completed.
+    map(lambda buildbot_emulator: buildbot_emulator.ConfirmLaunch(True),
+        buildbot_emulators)
+  elif options.test_device:
+    attached_devices = [options.test_device]
+  else:
+    attached_devices = android_commands.GetAttachedDevices()
+
+  if not attached_devices:
+    logging.critical('A device must be attached and online.')
+    buildbot_report.PrintError()
+    return 1
+
+  # Reset the test port allocation. It's important to do it before starting
+  # to dispatch any tests.
+  if not ports.ResetTestServerPortAllocation():
+    raise Exception('Failed to reset test server port.')
+
+  if options.gtest_filter:
+    logging.warning('Sharding is not possible with these configurations.')
+    attached_devices = [attached_devices[0]]
+
+  sharder = TestSharder(
+      attached_devices,
+      options.test_suite,
+      options.gtest_filter,
+      options.test_arguments,
+      options.timeout,
+      options.cleanup_test_files,
+      options.tool,
+      options.log_dump,
+      options.fast_and_loose,
+      options.build_type,
+      options.webkit)
+  test_results = sharder.RunShardedTests()
+
+  for buildbot_emulator in buildbot_emulators:
+    buildbot_emulator.Shutdown()
+
+  return len(test_results.failed)
+
+
+def Dispatch(options):
+  """Dispatches the tests, sharding if possible.
+
+  If options.use_emulator is True, all tests will be run in new emulator
+  instance.
+
+  Args:
+    options: options for running the tests.
+
+  Returns:
+    0 if successful, number of failing tests otherwise.
+  """
+  if options.test_suite == 'help':
+    ListTestSuites()
+    return 0
+
+  if options.use_xvfb:
+    xvfb = Xvfb()
+    xvfb.Start()
+
+  all_test_suites = FullyQualifiedTestSuites(options.exe, options.test_suite,
+                                             options.build_type)
+  failures = 0
+  for suite in all_test_suites:
+    # Give each test suite its own copy of options.
+    test_options = copy.deepcopy(options)
+    test_options.test_suite = suite
+    failures += _RunATestSuite(test_options)
+
+  if options.use_xvfb:
+    xvfb.Stop()
+  return failures
+
+
+def ListTestSuites():
+  """Display a list of available test suites."""
+  print 'Available test suites are:'
+  for test_suite in _TEST_SUITES:
+    print test_suite
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  test_options_parser.AddTestRunnerOptions(option_parser, default_timeout=0)
+  option_parser.add_option('-s', '--suite', dest='test_suite',
+                           help='Executable name of the test suite to run '
+                           '(use -s help to list them)')
+  option_parser.add_option('--out-directory', dest='out_directory',
+                           help='Path to the out/ directory, irrespective of '
+                           'the build type. Only for non-Chromium uses.')
+  option_parser.add_option('-d', '--device', dest='test_device',
+                           help='Target device the test suite to run ')
+  option_parser.add_option('-f', '--gtest_filter', dest='gtest_filter',
+                           help='gtest filter')
+  option_parser.add_option('-a', '--test_arguments', dest='test_arguments',
+                           help='Additional arguments to pass to the test')
+  option_parser.add_option('-L', dest='log_dump',
+                           help='file name of log dump, which will be put in '
+                           'subfolder debug_info_dumps under the same '
+                           'directory in where the test_suite exists.')
+  option_parser.add_option('-e', '--emulator', dest='use_emulator',
+                           action='store_true',
+                           help='Run tests in a new instance of emulator')
+  option_parser.add_option('-n', '--emulator_count',
+                           type='int', default=1,
+                           help='Number of emulators to launch for running the '
+                           'tests.')
+  option_parser.add_option('-x', '--xvfb', dest='use_xvfb',
+                           action='store_true',
+                           help='Use Xvfb around tests (ignored if not Linux)')
+  option_parser.add_option('--webkit', action='store_true',
+                           help='Run the tests from a WebKit checkout.')
+  option_parser.add_option('--fast', '--fast_and_loose', dest='fast_and_loose',
+                           action='store_true',
+                           help='Go faster (but be less stable), '
+                           'for quick testing.  Example: when tracking down '
+                           'tests that hang to add to the disabled list, '
+                           'there is no need to redeploy the test binary '
+                           'or data to the device again.  '
+                           'Don\'t use on bots by default!')
+  option_parser.add_option('--repeat', dest='repeat', type='int',
+                           default=2,
+                           help='Repeat count on test timeout')
+  option_parser.add_option('--exit_code', action='store_true',
+                           help='If set, the exit code will be total number '
+                           'of failures.')
+  option_parser.add_option('--exe', action='store_true',
+                           help='If set, use the exe test runner instead of '
+                           'the APK.')
+
+  options, args = option_parser.parse_args(argv)
+
+  if len(args) > 1:
+    print 'Unknown argument:', args[1:]
+    option_parser.print_usage()
+    sys.exit(1)
+
+  run_tests_helper.SetLogLevel(options.verbose_count)
+
+  if options.out_directory:
+    cmd_helper.OutDirectory.set(options.out_directory)
+
+  if options.use_emulator:
+    emulator.DeleteAllTempAVDs()
+
+  failed_tests_count = Dispatch(options)
+
+  # Failures of individual test suites are communicated by printing a
+  # STEP_FAILURE message.
+  # Returning a success exit status also prevents the buildbot from incorrectly
+  # marking the last suite as failed if there were failures in other suites in
+  # the batch (this happens because the exit status is a sum of all failures
+  # from all suites, but the buildbot associates the exit status only with the
+  # most recent step).
+  if options.exit_code:
+    return failed_tests_count
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/android/screenshot.py b/src/build/android/screenshot.py
new file mode 100755
index 0000000..86607cc
--- /dev/null
+++ b/src/build/android/screenshot.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes and saves a screenshot from an Android device.
+
+Usage: screenshot.py [-s SERIAL] [[-f] FILE]
+
+Options:
+  -s SERIAL  connect to device with specified SERIAL
+  -f FILE    write screenshot to FILE (default: Screenshot.png)
+"""
+
+from optparse import OptionParser
+import os
+import sys
+
+from pylib import android_commands
+
+
+def main():
+  # Parse options.
+  parser = OptionParser(usage='screenshot.py [-s SERIAL] [[-f] FILE]')
+  parser.add_option('-s', '--serial', dest='serial',
+                    help='connect to device with specified SERIAL',
+                    metavar='SERIAL', default=None)
+  parser.add_option('-f', '--file', dest='filename',
+                    help='write screenshot to FILE (default: %default)',
+                    metavar='FILE', default='Screenshot.png')
+  (options, args) = parser.parse_args()
+
+  if not options.serial and len(android_commands.GetAttachedDevices()) > 1:
+    parser.error('Multiple devices are attached. '
+                 'Please specify SERIAL with -s.')
+
+  if len(args) > 1:
+    parser.error('Too many positional arguments.')
+  filename = os.path.abspath(args[0] if args else options.filename)
+
+  # Grab screenshot and write to disk.
+  ac = android_commands.AndroidCommands(options.serial)
+  ac.TakeScreenshot(filename)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/apk_test.gypi b/src/build/apk_test.gypi
new file mode 100644
index 0000000..d6027d0
--- /dev/null
+++ b/src/build/apk_test.gypi
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK based test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'input_shlib_path' : '/path/to/test_suite.so',  # string
+#     'input_jars_paths': ['/path/to/test_suite.jar', ... ],  # list
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'variables' : {
+    'conditions' : [
+      ['OS == "lb_shell" and target_arch == "android"', {
+        'chromium_src_dir' : '<(PRODUCT_DIR)/../../../../external/chromium',
+      },{
+        'chromium_src_dir' : '<(ant_build_out)/../..',
+      }],
+    ],
+  },
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+  ],
+  'target_conditions': [
+    ['_toolset == "target"', {
+      'conditions': [
+        ['((OS == "android" and gtest_target_type == "shared_library") or (OS == "lb_shell" and target_arch == "android"))', {
+          'actions': [{
+            'action_name': 'apk_<(test_suite_name)',
+            'message': 'Building <(test_suite_name) test apk.',
+            'inputs': [
+              '<(DEPTH)/testing/android/AndroidManifest.xml',
+              '<(DEPTH)/testing/android/generate_native_test.py',
+              '<(input_shlib_path)',
+              '>@(input_jars_paths)',
+            ],
+            'outputs': [
+              '<(PRODUCT_DIR)/<(test_suite_name)_apk/<(test_suite_name)-debug.apk',
+            ],
+            'action': [
+              '<(DEPTH)/testing/android/generate_native_test.py',
+              '--native_library',
+              '<(input_shlib_path)',
+              '--output',
+              '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+              '--strip-binary=<(android_strip)',
+              '--app_abi',
+              '<(android_app_abi)',
+              '--ant-args',
+              '-DPRODUCT_DIR=<(ant_build_out)',
+              '--ant-args',
+              '-DANDROID_SDK=<(android_sdk)',
+              '--ant-args',
+              '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+              '--ant-args',
+              '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+              '--ant-args',
+              '-DANDROID_SDK_VERSION=<(android_sdk_version)',
+              '--ant-args',
+              '-DANDROID_GDBSERVER=<(android_gdbserver)',
+              '--ant-args',
+              '-DCHROMIUM_SRC=<(chromium_src_dir)',
+              '--ant-args',
+              '-DINPUT_JARS_PATHS=>(input_jars_paths)',
+            ],
+          }],
+        }],  # 'OS == "android" and gtest_target_type == "shared_library"
+      ],  # conditions
+    }],
+  ],  # target_conditions
+}
diff --git a/src/build/apply_locales.py b/src/build/apply_locales.py
new file mode 100755
index 0000000..6af7280
--- /dev/null
+++ b/src/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+  parser = optparse.OptionParser()
+  usage = 'usage: %s [options ...] format_string locale_list'
+  parser.set_usage(usage.replace('%s', '%prog'))
+  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+                    default=False,
+                    help='map "en-US" to "en" and "-" to "_" in locales')
+
+  (options, arglist) = parser.parse_args(argv)
+
+  if len(arglist) < 3:
+    print 'ERROR: need string and list of locales'
+    return 1
+
+  str_template = arglist[1]
+  locales = arglist[2:]
+
+  results = []
+  for locale in locales:
+    # For Cocoa to find the locale at runtime, it needs to use '_' instead
+    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
+    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+    if options.dash_to_underscore:
+      if locale == 'en-US':
+        locale = 'en'
+      locale = locale.replace('-', '_')
+    results.append(str_template.replace('ZZLOCALE', locale))
+
+  # Quote each element so filename spaces don't mess up GYP's attempt to parse
+  # it into a list.
+  print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/asan.saves b/src/build/asan.saves
new file mode 100644
index 0000000..0c4e4ed
--- /dev/null
+++ b/src/build/asan.saves
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file lists symbols that should not be stripped by Xcode from the binaries
+# built for Mac OS X using AddressSanitizer
+# (http://dev.chromium.org/developers/testing/addresssanitizer).
+
+___asan_init
+___asan_handle_no_return
+___asan_register_global
+___asan_register_globals
+___asan_unregister_globals
+___asan_report_load1
+___asan_report_load2
+___asan_report_load4
+___asan_report_load8
+___asan_report_load16
+___asan_report_store1
+___asan_report_store2
+___asan_report_store4
+___asan_report_store8
+___asan_report_store16
diff --git a/src/build/branding_value.sh b/src/build/branding_value.sh
new file mode 100755
index 0000000..9fcb550
--- /dev/null
+++ b/src/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files.  Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+#  branding_value.sh Chromium COPYRIGHT
+#  branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ;  then
+  echo "error: expect two arguments, branding and key" >&2
+  exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+  Chromium)
+    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+    ;;
+  Chrome)
+    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+    ;;
+  *)
+    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+    exit 1
+    ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+  echo "error: failed to find key '${THE_KEY}'" >&2
+  exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/src/build/build_config.h b/src/build/build_config.h
new file mode 100644
index 0000000..b2e249b
--- /dev/null
+++ b/src/build/build_config.h
@@ -0,0 +1,226 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX)
+//    OS_STARBOARD
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_PPC_FAMILY
+//    ARCH_CPU_MIPS / ARCH_CPU_MIPSEL / ARCH_CPU_MIPS_FAMILY
+//    ARCH_CPU_ARM / ARCH_CPU_ARMEL / ARCH_CPU_ARM_FAMILY
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+//    ARCH_CPU_BIG_ENDIAN / ARCH_CPU_LITTLE_ENDIAN
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+#if defined(__APPLE__)
+#include <TargetConditionals.h>
+#endif
+
+// A set of macros to use for platform detection.
+#if defined(STARBOARD)
+#define OS_STARBOARD 1
+#elif defined(__APPLE__)
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__native_client__)
+#define OS_NACL 1
+#elif defined(__linux__)
+#define OS_LINUX 1
+// Use TOOLKIT_GTK on linux if TOOLKIT_VIEWS isn't defined.
+#if !defined(TOOLKIT_VIEWS)
+#define TOOLKIT_GTK
+#endif
+#elif defined(__LB_SHELL__)
+// NO toolkit!
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#define TOOLKIT_GTK
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#define TOOLKIT_GTK
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#define TOOLKIT_GTK
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL) && defined(USE_NSS)
+#error Cannot use both OpenSSL and NSS
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
+    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
+    defined(OS_NACL) || defined(__LB_SHELL__)
+#define OS_POSIX 1
+#endif
+
+#if defined(OS_POSIX) && !defined(OS_MACOSX) && !defined(OS_ANDROID) && \
+    !defined(OS_NACL) && !defined(__LB_SHELL__)
+#define USE_X11 1  // Use X for graphics.
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX)) && !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__SNC__)
+#define COMPILER_SNC
+#endif
+
+#if defined(__ghs) || defined(__ghs__)
+#define COMPILER_GHS 1
+#endif
+
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(OS_STARBOARD)
+#  include "starboard/configuration.h"
+#  if SB_IS(32_BIT)
+#    define ARCH_CPU_32_BITS 1
+#  elif SB_IS(64_BIT)
+#    define ARCH_CPU_64_BITS 1
+#  endif  // SB_IS(32_BIT)
+#  if SB_IS(BIG_ENDIAN)
+#    define ARCH_CPU_BIG_ENDIAN 1
+#  else  // SB_IS(BIG_ENDIAN)
+#    define ARCH_CPU_LITTLE_ENDIAN 1
+#  endif  // SB_IS(BIG_ENDIAN)
+#  if SB_IS(ARCH_X86)
+#    define ARCH_CPU_X86_FAMILY 1
+#    if SB_IS(32_BIT)
+#      define ARCH_CPU_X86 1
+#    elif SB_IS(64_BIT)
+#      define ARCH_CPU_X86_64 1
+#    endif  // SB_IS(32_BIT)
+#  elif SB_IS(ARCH_PPC)
+#    define ARCH_CPU_PPC_FAMILY 1
+#  elif SB_IS(ARCH_MIPS)
+#    define ARCH_CPU_MIPS_FAMILY 1
+#    if SB_IS(BIG_ENDIAN)
+#      define ARCH_CPU_MIPS 1
+#    else  // SB_IS(BIG_ENDIAN)
+#      define ARCH_CPU_MIPSEL 1
+#    endif  // SB_IS(BIG_ENDIAN)
+#  elif SB_IS(ARCH_ARM)
+#    define ARCH_CPU_ARM_FAMILY 1
+#    if SB_IS(BIG_ENDIAN)
+#      define ARCH_CPU_ARM 1
+#    else  // SB_IS(BIG_ENDIAN)
+#      define ARCH_CPU_ARMEL 1
+#    endif  // SB_IS(BIG_ENDIAN)
+#  endif  // SB_IS(ARCH_X86)
+#elif defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__LB_PS3__) || defined(__LB_WIIU__) || defined(__LB_XB360__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#define ARCH_CPU_PPC_FAMILY 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#elif defined(__MIPSEL__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_STARBOARD)
+#  if SB_IS(WCHAR_T_UTF16)
+#    define WCHAR_T_IS_UTF16 1
+#  elif SB_IS(WCHAR_T_UTF32)
+#    define WCHAR_T_IS_UTF32 1
+#  endif
+#elif defined(OS_WIN) || \
+    (defined(__LB_SHELL__) && \
+        !(defined(__LB_LINUX__) || defined(__LB_ANDROID__)))
+#define WCHAR_T_IS_UTF16 1
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32 1
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_STARBOARD)
+#  if SB_IS(WCHAR_T_UNSIGNED)
+#    define WCHAR_T_IS_UNSIGNED 1
+#  elif SB_IS(WCHAR_T_SIGNED)
+#    define WCHAR_T_IS_UNSIGNED 0
+#  endif
+#elif defined(__ARMEL__) && !defined(OS_IOS)
+#define WCHAR_T_IS_UNSIGNED 1
+#elif defined(__MIPSEL__)
+#define WCHAR_T_IS_UNSIGNED 0
+#endif
+
+// TODO: Worry about these defines if/when we need to support Android.
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/src/build/build_output_dirs_android.gyp b/src/build/build_output_dirs_android.gyp
new file mode 100644
index 0000000..cc35e6d
--- /dev/null
+++ b/src/build/build_output_dirs_android.gyp
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      # Target for creating common output build directories. Creating output
+      # dirs beforehand ensures that build scripts can assume these folders to
+      # exist and there are no race conditions resulting from build scripts
+      # trying to create these directories.
+      # The build/java.gypi target depends on this target.
+      'target_name': 'build_output_dirs',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'create_java_output_dirs',
+          'variables' : {
+          'output_dirs' : [
+            '<(PRODUCT_DIR)/apks',
+            '<(PRODUCT_DIR)/lib.java',
+            '<(PRODUCT_DIR)/test.lib.java',
+           ]
+          },
+          'inputs' : [],
+          # By not specifying any outputs, we ensure that this command isn't
+          # re-run when the output directories are touched (i.e. apks are
+          # written to them).
+          'outputs': [''],
+          'action': [
+            'mkdir',
+            '-p',
+            '<@(output_dirs)',
+          ],
+        },
+      ],
+    }, # build_output_dirs
+  ], # targets
+}
diff --git a/src/build/common.croc b/src/build/common.croc
new file mode 100644
index 0000000..ecf29f2
--- /dev/null
+++ b/src/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+#       croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+  # List of root directories, applied in order
+  'roots' : [
+    # Sub-paths we specifically care about and want to call out
+    {
+      'root' : '_/src',
+      'altname' : 'CHROMIUM',
+    },
+  ],
+
+  # List of rules, applied in order
+  # Note that any 'include':0 rules here will be overridden by the 'include':1
+  # rules in the platform-specific configs.
+  'rules' : [
+    # Don't scan for executable lines in uninstrumented C++ header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '',
+      'group' : 'source',
+    },
+    {
+      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.(c|h)$',
+      'language' : 'C',
+    },
+    {
+      'regexp' : '.*\\.(cc|cpp|hpp)$',
+      'language' : 'C++',
+    },
+
+    # Files/paths to include.  Specify these before the excludes, since rules
+    # are in order.
+    {
+      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+      'include' : 1,
+    },
+    # Don't include subversion or mercurial SCM dirs
+    {
+      'regexp' : '.*/(\\.svn|\\.hg)/',
+      'include' : 0,
+    },
+    # Don't include output dirs
+    {
+      'regexp' : '.*/(Debug|Release|sconsbuild|out|xcodebuild)/',
+      'include' : 0,
+    },
+    # Don't include third-party source
+    {
+      'regexp' : '.*/third_party/',
+      'include' : 0,
+    },
+    # We don't run the V8 test suite, so we don't care about V8 coverage.
+    {
+      'regexp' : '.*/v8/',
+      'include' : 0,
+    },
+  ],
+
+  # Paths to add source from
+  'add_files' : [
+    'CHROMIUM'
+  ],
+
+  # Statistics to print
+  'print_stats' : [
+    {
+      'stat' : 'files_executable',
+      'format' : '*RESULT FilesKnown: files_executable= %d files',
+    },
+    {
+      'stat' : 'files_instrumented',
+      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+    },
+    {
+      'stat' : '100.0 * files_instrumented / files_executable',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
+    },
+    {
+      'stat' : 'lines_executable',
+      'format' : '*RESULT LinesKnown: lines_known= %d lines',
+    },
+    {
+      'stat' : 'lines_instrumented',
+      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+      'group' : 'source',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+      'group' : 'test',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
+      'group' : 'source',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/src/build/common.gypi b/src/build/common.gypi
new file mode 100644
index 0000000..c4bcc2b
--- /dev/null
+++ b/src/build/common.gypi
@@ -0,0 +1,3947 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+  # Variables expected to be overriden on the GYP command line (-D) or by
+  # ~/.gyp/include.gypi.
+  'variables': {
+    # Putting a variables dict inside another variables dict looks kind of
+    # weird.  This is done so that 'host_arch', 'chromeos', etc are defined as
+    # variables within the outer variables dict here.  This is necessary
+    # to get these variables defined for the conditions within this variables
+    # dict that operate on these variables.
+    'variables': {
+      'variables': {
+        'variables': {
+          'variables': {
+            # Whether we're building a ChromeOS build.
+            'chromeos%': 0,
+
+            # Whether or not we are using the Aura windowing framework.
+            'use_aura%': 0,
+
+            # Whether or not we are building the Ash shell.
+            'use_ash%': 0,
+          },
+          # Copy conditionally-set variables out one scope.
+          'chromeos%': '<(chromeos)',
+          'use_aura%': '<(use_aura)',
+          'use_ash%': '<(use_ash)',
+
+          # Whether we are using Views Toolkit
+          'toolkit_views%': 0,
+
+          # Use OpenSSL instead of NSS. Under development: see http://crbug.com/62803
+          'use_openssl%': 0,
+
+          'use_ibus%': 0,
+
+          # Disable viewport meta tag by default.
+          'enable_viewport%': 0,
+
+          # Enable HiDPI support.
+          'enable_hidpi%': 0,
+
+          # Enable touch optimized art assets and metrics.
+          'enable_touch_ui%': 0,
+
+          # Override buildtype to select the desired build flavor.
+          # Dev - everyday build for development/testing
+          # Official - release build (generally implies additional processing)
+          # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+          # conversion is done), some of the things which are now controlled by
+          # 'branding', such as symbol generation, will need to be refactored
+          # based on 'buildtype' (i.e. we don't care about saving symbols for
+          # non-Official # builds).
+          'buildtype%': 'Dev',
+
+          'conditions': [
+            # ChromeOS implies ash.
+            ['chromeos==1', {
+              'use_ash%': 1,
+              'use_aura%': 1,
+            }],
+
+            # For now, Windows builds that |use_aura| should also imply using
+            # ash. This rule should be removed for the future when Windows is
+            # using the aura windows without the ash interface.
+            ['use_aura==1 and OS=="win"', {
+              'use_ash%': 1,
+            }],
+            ['use_ash==1', {
+              'use_aura%': 1,
+            }],
+
+            # Compute the architecture that we're building on.
+            ['OS=="win" or OS=="mac" or OS=="ios"', {
+              'host_arch%': 'ia32',
+            }, {
+              # This handles the Unix platforms for which there is some support.
+              # Anything else gets passed through, which probably won't work
+              # very well; such hosts should pass an explicit target_arch to
+              # gyp.
+              'host_arch%':
+                '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/i86pc/ia32/")',
+            }],
+          ],
+        },
+
+        # Copy conditionally-set variables out one scope.
+        'chromeos%': '<(chromeos)',
+        'use_aura%': '<(use_aura)',
+        'use_ash%': '<(use_ash)',
+        'use_openssl%': '<(use_openssl)',
+        'use_ibus%': '<(use_ibus)',
+        'enable_viewport%': '<(enable_viewport)',
+        'enable_hidpi%': '<(enable_hidpi)',
+        'enable_touch_ui%': '<(enable_touch_ui)',
+        'buildtype%': '<(buildtype)',
+        'host_arch%': '<(host_arch)',
+
+        # Default architecture we're building for is the architecture we're
+        # building on.
+        'target_arch%': '<(host_arch)',
+
+        # Sets whether we're building with the Android SDK/NDK (and hence with
+        # Ant, value 0), or as part of the Android system (and hence with the
+        # Android build system, value 1).
+        'android_build_type%': 0,
+
+        'conditions': [
+          # Set default value of toolkit_views based on OS.
+          ['OS=="win" or chromeos==1 or use_aura==1', {
+            'toolkit_views%': 1,
+          }, {
+            'toolkit_views%': 0,
+          }],
+
+          # Set toolkit_uses_gtk for the Chromium browser on Linux.
+          ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and use_aura==0', {
+            'toolkit_uses_gtk%': 1,
+          }, {
+            'toolkit_uses_gtk%': 0,
+          }],
+
+          # Enable HiDPI on Mac OS and Chrome OS.
+          ['OS=="mac" or chromeos==1', {
+            'enable_hidpi%': 1,
+          }],
+
+          # Enable touch UI on Metro.
+          ['OS=="win"', {
+            'enable_touch_ui%': 1,
+          }],
+
+          # Enable App Launcher only on ChromeOS and Windows for now.
+          ['use_ash==1 or OS=="win"', {
+            'enable_app_list%': 1,
+          }, {
+            'enable_app_list%': 0,
+          }],
+
+          ['use_aura==1 or (OS!="win" and OS!="mac" and OS!="ios" and OS!="android")', {
+            'use_default_render_theme%': 1,
+          }, {
+            'use_default_render_theme%': 0,
+          }],
+        ],
+      },
+
+      # Copy conditionally-set variables out one scope.
+      'chromeos%': '<(chromeos)',
+      'host_arch%': '<(host_arch)',
+      'target_arch%': '<(target_arch)',
+      'toolkit_views%': '<(toolkit_views)',
+      'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+      'use_aura%': '<(use_aura)',
+      'use_ash%': '<(use_ash)',
+      'use_openssl%': '<(use_openssl)',
+      'use_ibus%': '<(use_ibus)',
+      'enable_viewport%': '<(enable_viewport)',
+      'enable_hidpi%': '<(enable_hidpi)',
+      'enable_touch_ui%': '<(enable_touch_ui)',
+      'android_build_type%': '<(android_build_type)',
+      'enable_app_list%': '<(enable_app_list)',
+      'use_default_render_theme%': '<(use_default_render_theme)',
+      'buildtype%': '<(buildtype)',
+
+      # We used to provide a variable for changing how libraries were built.
+      # This variable remains until we can clean up all the users.
+      # This needs to be one nested variables dict in so that dependent
+      # gyp files can make use of it in their outer variables.  (Yikes!)
+      # http://code.google.com/p/chromium/issues/detail?id=83308
+      'library%': 'static_library',
+
+      # Override branding to select the desired branding flavor.
+      'branding%': 'Chromium',
+
+      # This variable tells WebCore.gyp and JavaScriptCore.gyp whether they are
+      # are built under a chromium full build (1) or a webkit.org chromium
+      # build (0).
+      'inside_chromium_build%': 1,
+
+      # Set to 1 to enable fast builds. It disables debug info for fastest
+      # compilation.
+      'fastbuild%': 0,
+
+      # Set to 1 to enable dcheck in release without having to use the flag.
+      'dcheck_always_on%': 0,
+
+      # Disable file manager component extension by default.
+      'file_manager_extension%': 0,
+
+      # Python version.
+      'python_ver%': '2.6',
+
+      # Set ARM-v7 compilation flags
+      'armv7%': 0,
+
+      # Set Neon compilation flags (only meaningful if armv7==1).
+      'arm_neon%': 1,
+
+      # The system root for cross-compiles. Default: none.
+      'sysroot%': '',
+
+      # The system libdir used for this ABI.
+      'system_libdir%': 'lib',
+
+      # On Linux, we build with sse2 for Chromium builds.
+      'disable_sse2%': 0,
+
+      # Use libjpeg-turbo as the JPEG codec used by Chromium.
+      'use_libjpeg_turbo%': 1,
+
+      # Use system libjpeg. Note that the system's libjepg will be used even if
+      # use_libjpeg_turbo is set.
+      'use_system_libjpeg%': 0,
+
+      # Variable 'component' is for cases where we would like to build some
+      # components as dynamic shared libraries but still need variable
+      # 'library' for static libraries.
+      # By default, component is set to whatever library is set to and
+      # it can be overriden by the GYP command line or by ~/.gyp/include.gypi.
+      'component%': 'static_library',
+
+      # Set to select the Title Case versions of strings in GRD files.
+      'use_titlecase_in_grd_files%': 0,
+
+      # Use translations provided by volunteers at launchpad.net.  This
+      # currently only works on Linux.
+      'use_third_party_translations%': 0,
+
+      # Remoting compilation is enabled by default. Set to 0 to disable.
+      'remoting%': 1,
+
+      # Configuration policy is enabled by default. Set to 0 to disable.
+      'configuration_policy%': 1,
+
+      # Variable safe_browsing is used to control the build time configuration
+      # for safe browsing feature. Safe browsing can be compiled in 3 different
+      # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+      # reporting features without enabling phishing and malware detection. This
+      # is useful to integrate a third party phishing/malware detection to
+      # existing safe browsing logic.
+      'safe_browsing%': 1,
+
+      # Speech input is compiled in by default. Set to 0 to disable.
+      'input_speech%': 1,
+
+      # Notifications are compiled in by default. Set to 0 to disable.
+      'notifications%' : 1,
+
+      # If this is set, the clang plugins used on the buildbot will be used.
+      # Run tools/clang/scripts/update.sh to make sure they are compiled.
+      # This causes 'clang_chrome_plugins_flags' to be set.
+      # Has no effect if 'clang' is not set as well.
+      'clang_use_chrome_plugins%': 1,
+
+      # Enable building with ASAN (Clang's -fsanitize=address option).
+      # -fsanitize=address only works with clang, but asan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+      'asan%': 0,
+
+      # Enable building with TSAN (Clang's -fsanitize=thread option).
+      # -fsanitize=thread only works with clang, but tsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/ThreadSanitizer.html
+      'tsan%': 0,
+      'tsan_blacklist%': '<(PRODUCT_DIR)/../../tools/valgrind/tsan_v2/ignores.txt',
+
+      # Use a modified version of Clang to intercept allocated types and sizes
+      # for allocated objects. clang_type_profiler=1 implies clang=1.
+      # See http://dev.chromium.org/developers/deep-memory-profiler/cpp-object-type-identifier
+      # TODO(dmikurube): Support mac.  See http://crbug.com/123758#c11
+      'clang_type_profiler%': 0,
+
+      # Set to true to instrument the code with function call logger.
+      # See src/third_party/cygprofile/cyg-profile.cc for details.
+      'order_profiling%': 0,
+
+      # Use the provided profiled order file to link Chrome image with it.
+      # This makes Chrome faster by better using CPU cache when executing code.
+      # This is known as PGO (profile guided optimization).
+      # See https://sites.google.com/a/google.com/chrome-msk/dev/boot-speed-up-effort
+      'order_text_section%' : "",
+
+      # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+      # libraries on linux x86-64 and arm, plus ASLR.
+      'linux_fpic%': 1,
+
+      # Whether one-click signin is enabled or not.
+      'enable_one_click_signin%': 0,
+
+      # Enable Web Intents support in WebKit.
+      'enable_web_intents%': 1,
+
+      # Enable Chrome browser extensions
+      'enable_extensions%': 1,
+
+      # Enable browser automation.
+      'enable_automation%': 1,
+
+      # Enable Google Now.
+      'enable_google_now%': 1,
+
+      # Enable language detection.
+      'enable_language_detection%': 1,
+
+      # Enable printing support and UI.
+      'enable_printing%': 1,
+
+      # Enable Web Intents web content registration via HTML element
+      # and WebUI managing such registrations.
+      'enable_web_intents_tag%': 0,
+
+      # Webrtc compilation is enabled by default. Set to 0 to disable.
+      'enable_webrtc%': 1,
+
+      # PPAPI by default does not support plugins making calls off the main
+      # thread. Set to 1 to turn on experimental support for out-of-process
+      # plugins to make call of the main thread.
+      'enable_pepper_threading%': 1,
+
+      # Enables use of the session service, which is enabled by default.
+      # Support for disabling depends on the platform.
+      'enable_session_service%': 1,
+
+      # Enables theme support, which is enabled by default.  Support for
+      # disabling depends on the platform.
+      'enable_themes%': 1,
+
+      # Uses OEM-specific wallpaper resources on Chrome OS.
+      'use_oem_wallpaper%': 0,
+
+      # Enables support for background apps.
+      'enable_background%': 1,
+
+      # Enable the task manager by default.
+      'enable_task_manager%': 1,
+
+      # Enable FTP support by default.
+      'disable_ftp_support%': 0,
+
+      # XInput2 multitouch support is disabled by default (use_xi2_mt=0).
+      # Setting to non-zero value enables XI2 MT. When XI2 MT is enabled,
+      # the input value also defines the required XI2 minor minimum version.
+      # For example, use_xi2_mt=2 means XI2.2 or above version is required.
+      'use_xi2_mt%': 0,
+
+      # Use of precompiled headers on Windows.
+      #
+      # This is on by default in VS 2010, but off by default for VS
+      # 2008 because of complications that it can cause with our
+      # trybots etc.
+      #
+      # This variable may be explicitly set to 1 (enabled) or 0
+      # (disabled) in ~/.gyp/include.gypi or via the GYP command line.
+      # This setting will override the default.
+      #
+      # Note that a setting of 1 is probably suitable for most or all
+      # Windows developers using VS 2008, since precompiled headers
+      # provide a build speedup of 20-25%.  There are a couple of
+      # small workarounds you may need to use when using VS 2008 (but
+      # not 2010), see
+      # http://code.google.com/p/chromium/wiki/WindowsPrecompiledHeaders
+      # for details.
+      'chromium_win_pch%': 0,
+
+      # Set this to true when building with Clang.
+      # See http://code.google.com/p/chromium/wiki/Clang for details.
+      'clang%': 0,
+
+      # Enable plug-in installation by default.
+      'enable_plugin_installation%': 1,
+
+      # Enable PPAPI and NPAPI by default.
+      # TODO(nileshagrawal): Make this flag enable/disable NPAPI as well
+      # as PPAPI; see crbug.com/162667.
+      'enable_plugins%': 1,
+
+      # Specifies whether to use canvas_skia.cc in place of platform
+      # specific implementations of gfx::Canvas. Affects text drawing in the
+      # Chrome UI.
+      # TODO(asvitkine): Enable this on all platforms and delete this flag.
+      #                  http://crbug.com/105550
+      'use_canvas_skia%': 0,
+
+      # Set to "tsan", "memcheck", or "drmemory" to configure the build to work
+      # with one of those tools.
+      'build_for_tool%': '',
+
+      # Whether tests targets should be run, archived or just have the
+      # dependencies verified. All the tests targets have the '_run' suffix,
+      # e.g. base_unittests_run runs the target base_unittests. The test target
+      # always calls tools/swarm_client/isolate.py. See the script's --help for
+      # more information and the valid --mode values. Meant to be overriden with
+      # GYP_DEFINES.
+      # TODO(maruel): Converted the default from 'check' to 'noop' so work can
+      # be done while the builders are being reconfigured to check out test data
+      # files.
+      'test_isolation_mode%': 'noop',
+      # It must not be '<(PRODUCT_DIR)' alone, the '/' is necessary otherwise
+      # gyp will remove duplicate flags, causing isolate.py to be confused.
+      'test_isolation_outdir%': '<(PRODUCT_DIR)/isolate',
+
+      'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/x86',
+      'wix_path%': '<(DEPTH)/third_party/wix',
+
+      'use_harfbuzz_ng%': 0,
+
+      'conditions': [
+        # TODO(epoger): Figure out how to set use_skia=1 for Mac outside of
+        # the 'conditions' clause.  Initial attempts resulted in chromium and
+        # webkit disagreeing on its setting.
+        ['OS=="mac"', {
+          'use_skia%': 1,
+        }, {
+          'use_skia%': 1,
+        }],
+
+        # A flag for POSIX platforms
+        ['OS=="win" or OS=="starboard"', {
+          'os_posix%': 0,
+        }, {
+          'os_posix%': 1,
+        }],
+
+        # A flag for BSD platforms
+        ['OS=="freebsd" or OS=="openbsd"', {
+          'os_bsd%': 1,
+        }, {
+          'os_bsd%': 0,
+        }],
+
+        # NSS usage.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and use_openssl==0', {
+          'use_nss%': 1,
+        }, {
+          'use_nss%': 0,
+        }],
+
+        # Flags to use X11 on non-Mac POSIX platforms
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or OS=="lb_shell" or OS=="starboard"', {
+          'use_glib%': 0,
+          'use_x11%': 0,
+        }, {
+          'use_glib%': 1,
+          'use_x11%': 1,
+        }],
+
+        # We always use skia text rendering in Aura on Windows, since GDI
+        # doesn't agree with our BackingStore.
+        # TODO(beng): remove once skia text rendering is on by default.
+        ['use_aura==1 and OS=="win"', {
+          'enable_skia_text%': 1,
+        }],
+
+        # A flag to enable or disable our compile-time dependency
+        # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+        # support will be available. This option is useful
+        # for Linux distributions and for Aura.
+        ['chromeos==1 or use_aura==1', {
+          'use_gnome_keyring%': 0,
+        }, {
+          'use_gnome_keyring%': 1,
+        }],
+
+        ['toolkit_uses_gtk==1 or OS=="mac" or OS=="ios"', {
+          # GTK+, Mac and iOS want Title Case strings
+          'use_titlecase_in_grd_files%': 1,
+        }],
+
+        # Enable file manager extension on Chrome OS.
+        ['chromeos==1', {
+          'file_manager_extension%': 1,
+        }, {
+          'file_manager_extension%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac" or (OS=="linux" and use_aura==0)', {
+          'enable_one_click_signin%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'enable_language_detection%': 0,
+          'enable_printing%': 0,
+          'enable_themes%': 0,
+          'enable_webrtc%': 0,
+          'proprietary_codecs%': 1,
+          'remoting%': 0,
+        }],
+
+        ['OS=="ios"', {
+          'configuration_policy%': 0,
+          'disable_ftp_support%': 1,
+          'enable_automation%': 0,
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'enable_language_detection%': 0,
+          'enable_printing%': 0,
+          'enable_session_service%': 0,
+          'enable_themes%': 0,
+          'enable_webrtc%': 0,
+          'notifications%': 0,
+          'remoting%': 0,
+          'safe_browsing%': 0,
+        }],
+
+        # Use GPU accelerated cross process image transport by default
+        # on linux builds with the Aura window manager
+        ['use_aura==1 and OS=="linux"', {
+          'ui_compositor_image_transport%': 1,
+        }, {
+          'ui_compositor_image_transport%': 0,
+        }],
+
+        # Turn precompiled headers on by default for VS 2010.
+        ['OS=="win" and MSVS_VERSION=="2010" and buildtype!="Official"', {
+          'chromium_win_pch%': 1
+        }],
+
+        ['use_aura==1 or chromeos==1 or OS=="android"', {
+          'enable_plugin_installation%': 0,
+        }, {
+          'enable_plugin_installation%': 1,
+        }],
+
+        ['OS=="android" or OS=="ios"', {
+          'enable_plugins%': 0,
+        }, {
+          'enable_plugins%': 1,
+        }],
+
+        # linux_use_gold_binary: whether to use the binary checked into
+        # third_party/gold.
+        ['OS=="linux"', {
+          'linux_use_gold_binary%': 1,
+        }, {
+          'linux_use_gold_binary%': 0,
+        }],
+
+        # linux_use_gold_flags: whether to use build flags that rely on gold.
+        # On by default for x64 Linux.  Temporarily off for ChromeOS as
+        # it failed on a buildbot.
+        ['OS=="linux" and chromeos==0', {
+          'linux_use_gold_flags%': 1,
+        }, {
+          'linux_use_gold_flags%': 0,
+        }],
+
+        ['OS=="android" or OS=="ios"', {
+          'enable_captive_portal_detection%': 0,
+        }, {
+          'enable_captive_portal_detection%': 1,
+        }],
+
+        # Enable Skia UI text drawing incrementally on different platforms.
+        # http://crbug.com/105550
+        #
+        # On Aura, this allows per-tile painting to be used in the browser
+        # compositor.
+        ['OS!="mac" and OS!="android"', {
+          'use_canvas_skia%': 1,
+        }],
+
+        ['chromeos==1', {
+          # When building for ChromeOS we dont want Chromium to use libjpeg_turbo.
+          'use_libjpeg_turbo%': 0,
+        }],
+
+        ['OS=="android"', {
+          # When building as part of the Android system, use system libraries
+          # where possible to reduce ROM size.
+          'use_system_libjpeg%': '<(android_build_type)',
+        }],
+
+        # Enable Settings App only on Windows.
+        ['enable_app_list==1 and OS=="win"', {
+          'enable_settings_app%': 1,
+        }, {
+          'enable_settings_app%': 0,
+        }],
+
+        ['OS=="linux" and target_arch=="arm" and chromeos==0', {
+          # Set some defaults for arm/linux chrome builds
+          'armv7%': 1,
+          'linux_breakpad%': 0,
+          'linux_use_tcmalloc%': 0,
+          'linux_use_gold_flags%': 0,
+          # sysroot needs to be an absolute path otherwise it generates
+          # incorrect results when passed to pkg-config
+          'sysroot%': '<!(cd <(DEPTH) && pwd -P)/arm-sysroot',
+        }], # OS=="linux" and target_arch=="arm" and chromeos==0
+      ],
+
+      # Set this to 1 to use the Google-internal file containing
+      # official API keys for Google Chrome even in a developer build.
+      # Setting this variable explicitly to 1 will cause your build to
+      # fail if the internal file is missing.
+      #
+      # Set this to 0 to not use the internal file, even when it
+      # exists in your checkout.
+      #
+      # Leave set to 2 to have this variable implicitly set to 1 if
+      # you have src/google_apis/internal/google_chrome_api_keys.h in
+      # your checkout, and implicitly set to 0 if not.
+      #
+      # Note that official builds always behave as if this variable
+      # was explicitly set to 1, i.e. they always use official keys,
+      # and will fail to build if the internal file is missing.
+      'use_official_google_api_keys%': 2,
+
+      # Set these to bake the specified API keys and OAuth client
+      # IDs/secrets into your build.
+      #
+      # If you create a build without values baked in, you can instead
+      # set environment variables to provide the keys at runtime (see
+      # src/google_apis/google_api_keys.h for details).  Features that
+      # require server-side APIs may fail to work if no keys are
+      # provided.
+      #
+      # Note that if you are building an official build or if
+      # use_official_google_api_keys has been set to 1 (explicitly or
+      # implicitly), these values will be ignored and the official
+      # keys will be used instead.
+      'google_api_key%': '',
+      'google_default_client_id%': '',
+      'google_default_client_secret%': '',
+    },
+
+    # Make sure that cobalt is defined. This is needed in
+    # the case where the code is built using Chromium stock
+    # build tools.
+    'cobalt%': 0,
+
+    # Copy conditionally-set variables out one scope.
+    'branding%': '<(branding)',
+    'buildtype%': '<(buildtype)',
+    'target_arch%': '<(target_arch)',
+    'host_arch%': '<(host_arch)',
+    'library%': 'static_library',
+    'toolkit_views%': '<(toolkit_views)',
+    'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+    'use_aura%': '<(use_aura)',
+    'use_ash%': '<(use_ash)',
+    'use_openssl%': '<(use_openssl)',
+    'use_ibus%': '<(use_ibus)',
+    'use_nss%': '<(use_nss)',
+    'os_bsd%': '<(os_bsd)',
+    'os_posix%': '<(os_posix)',
+    'use_glib%': '<(use_glib)',
+    'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+    'use_skia%': '<(use_skia)',
+    'use_x11%': '<(use_x11)',
+    'use_gnome_keyring%': '<(use_gnome_keyring)',
+    'linux_fpic%': '<(linux_fpic)',
+    'enable_pepper_threading%': '<(enable_pepper_threading)',
+    'chromeos%': '<(chromeos)',
+    'enable_viewport%': '<(enable_viewport)',
+    'enable_hidpi%': '<(enable_hidpi)',
+    'enable_touch_ui%': '<(enable_touch_ui)',
+    'use_xi2_mt%':'<(use_xi2_mt)',
+    'file_manager_extension%': '<(file_manager_extension)',
+    'inside_chromium_build%': '<(inside_chromium_build)',
+    'fastbuild%': '<(fastbuild)',
+    'dcheck_always_on%': '<(dcheck_always_on)',
+    'python_ver%': '<(python_ver)',
+    'armv7%': '<(armv7)',
+    'arm_neon%': '<(arm_neon)',
+    'sysroot%': '<(sysroot)',
+    'system_libdir%': '<(system_libdir)',
+    'component%': '<(component)',
+    'use_titlecase_in_grd_files%': '<(use_titlecase_in_grd_files)',
+    'use_third_party_translations%': '<(use_third_party_translations)',
+    'remoting%': '<(remoting)',
+    'enable_one_click_signin%': '<(enable_one_click_signin)',
+    'enable_webrtc%': '<(enable_webrtc)',
+    'chromium_win_pch%': '<(chromium_win_pch)',
+    'configuration_policy%': '<(configuration_policy)',
+    'safe_browsing%': '<(safe_browsing)',
+    'input_speech%': '<(input_speech)',
+    'notifications%': '<(notifications)',
+    'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+    'asan%': '<(asan)',
+    'tsan%': '<(tsan)',
+    'tsan_blacklist%': '<(tsan_blacklist)',
+    'clang_type_profiler%': '<(clang_type_profiler)',
+    'order_profiling%': '<(order_profiling)',
+    'order_text_section%': '<(order_text_section)',
+    'enable_extensions%': '<(enable_extensions)',
+    'enable_web_intents%': '<(enable_web_intents)',
+    'enable_web_intents_tag%': '<(enable_web_intents_tag)',
+    'enable_plugin_installation%': '<(enable_plugin_installation)',
+    'enable_plugins%': '<(enable_plugins)',
+    'enable_session_service%': '<(enable_session_service)',
+    'enable_themes%': '<(enable_themes)',
+    'use_oem_wallpaper%': '<(use_oem_wallpaper)',
+    'enable_background%': '<(enable_background)',
+    'linux_use_gold_binary%': '<(linux_use_gold_binary)',
+    'linux_use_gold_flags%': '<(linux_use_gold_flags)',
+    'use_canvas_skia%': '<(use_canvas_skia)',
+    'test_isolation_mode%': '<(test_isolation_mode)',
+    'test_isolation_outdir%': '<(test_isolation_outdir)',
+    'enable_automation%': '<(enable_automation)',
+    'enable_printing%': '<(enable_printing)',
+    'enable_google_now%': '<(enable_google_now)',
+    'enable_language_detection%': '<(enable_language_detection)',
+    'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
+    'disable_ftp_support%': '<(disable_ftp_support)',
+    'enable_task_manager%': '<(enable_task_manager)',
+    'sas_dll_path%': '<(sas_dll_path)',
+    'wix_path%': '<(wix_path)',
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+    'use_system_libjpeg%': '<(use_system_libjpeg)',
+    'android_build_type%': '<(android_build_type)',
+    'enable_app_list%': '<(enable_app_list)',
+    'use_default_render_theme%': '<(use_default_render_theme)',
+    'enable_settings_app%': '<(enable_settings_app)',
+    'use_official_google_api_keys%': '<(use_official_google_api_keys)',
+    'google_api_key%': '<(google_api_key)',
+    'google_default_client_id%': '<(google_default_client_id)',
+    'google_default_client_secret%': '<(google_default_client_secret)',
+    'use_harfbuzz_ng%': '<(use_harfbuzz_ng)',
+
+    # Use system protobuf instead of bundled one.
+    'use_system_protobuf%': 0,
+
+    # Use system yasm instead of bundled one.
+    'use_system_yasm%': 0,
+
+    # Default to enabled PIE; this is important for ASLR but we may need to be
+    # able to turn it off for various reasons.
+    'linux_disable_pie%': 0,
+
+    # The release channel that this build targets. This is used to restrict
+    # channel-specific build options, like which installer packages to create.
+    # The default is 'all', which does no channel-specific filtering.
+    'channel%': 'all',
+
+    # Override chromium_mac_pch and set it to 0 to suppress the use of
+    # precompiled headers on the Mac.  Prefix header injection may still be
+    # used, but prefix headers will not be precompiled.  This is useful when
+    # using distcc to distribute a build to compile slaves that don't
+    # share the same compiler executable as the system driving the compilation,
+    # because precompiled headers rely on pointers into a specific compiler
+    # executable's image.  Setting this to 0 is needed to use an experimental
+    # Linux-Mac cross compiler distcc farm.
+    'chromium_mac_pch%': 1,
+
+    # The default value for mac_strip in target_defaults. This cannot be
+    # set there, per the comment about variable% in a target_defaults.
+    'mac_strip_release%': 1,
+
+    # Set to 1 to enable code coverage.  In addition to build changes
+    # (e.g. extra CFLAGS), also creates a new target in the src/chrome
+    # project file called "coverage".
+    # Currently ignored on Windows.
+    'coverage%': 0,
+
+    # Set to 1 to force Visual C++ to use legacy debug information format /Z7.
+    # This is useful for parallel compilation tools which can't support /Zi.
+    # Only used on Windows.
+    'win_z7%' : 0,
+
+    # Although base/allocator lets you select a heap library via an
+    # environment variable, the libcmt shim it uses sometimes gets in
+    # the way.  To disable it entirely, and switch to normal msvcrt, do e.g.
+    #  'win_use_allocator_shim': 0,
+    #  'win_release_RuntimeLibrary': 2
+    # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+    'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+    # Whether usage of OpenMAX is enabled.
+    'enable_openmax%': 0,
+
+    # Whether proprietary audio/video codecs are assumed to be included with
+    # this build (only meaningful if branding!=Chrome).
+    'proprietary_codecs%': 0,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to prevent release.vsprops from being included.
+    # Yes(1) means include release.vsprops.
+    # Once all vsprops settings are migrated into gyp, this can go away.
+    'msvs_use_common_release%': 1,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to override additional linker options for msvs.
+    # Yes(1) means set use the common linker options.
+    'msvs_use_common_linker_extras%': 1,
+
+    # TODO(sgk): eliminate this if possible.
+    # It would be nicer to support this via a setting in 'target_defaults'
+    # in chrome/app/locales/locales.gypi overriding the setting in the
+    # 'Debug' configuration in the 'target_defaults' dict below,
+    # but that doesn't work as we'd like.
+    'msvs_debug_link_incremental%': '2',
+
+    # Needed for some of the largest modules.
+    'msvs_debug_link_nonincremental%': '1',
+
+    # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows
+    # to get incremental linking to be faster in debug builds.
+    'incremental_chrome_dll%': '0',
+
+    # The default settings for third party code for treating
+    # warnings-as-errors. Ideally, this would not be required, however there
+    # is some third party code that takes a long time to fix/roll. So, this
+    # flag allows us to have warnings as errors in general to prevent
+    # regressions in most modules, while working on the bits that are
+    # remaining.
+    'win_third_party_warn_as_error%': 'true',
+
+    # This is the location of the sandbox binary. Chrome looks for this before
+    # running the zygote process. If found, and SUID, it will be used to
+    # sandbox the zygote process and, thus, all renderer processes.
+    'linux_sandbox_path%': '',
+
+    # Set this to true to enable SELinux support.
+    'selinux%': 0,
+
+    # Clang stuff.
+    'clang%': '<(clang)',
+    'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+
+    # These two variables can be set in GYP_DEFINES while running
+    # |gclient runhooks| to let clang run a plugin in every compilation.
+    # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+    # Example:
+    #     GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+    'clang_load%': '',
+    'clang_add_plugin%': '',
+
+    # The default type of gtest.
+    'gtest_target_type%': 'executable',
+
+    # Enable sampling based profiler.
+    # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+    'profiling%': '0',
+
+    # Enable strict glibc debug mode.
+    'glibcxx_debug%': 0,
+
+    # Override whether we should use Breakpad on Linux. I.e. for Chrome bot.
+    'linux_breakpad%': 0,
+    # And if we want to dump symbols for Breakpad-enabled builds.
+    'linux_dump_symbols%': 0,
+    # And if we want to strip the binary after dumping symbols.
+    'linux_strip_binary%': 0,
+    # Strip the test binaries needed for Linux reliability tests.
+    'linux_strip_reliability_tests%': 0,
+
+    # Enable TCMalloc.
+    'linux_use_tcmalloc%': 1,
+
+    # Disable TCMalloc's heapchecker.
+    'linux_use_heapchecker%': 0,
+
+    # Disable shadow stack keeping used by heapcheck to unwind the stacks
+    # better.
+    'linux_keep_shadow_stacks%': 0,
+
+    # Set to 1 to link against libgnome-keyring instead of using dlopen().
+    'linux_link_gnome_keyring%': 0,
+    # Set to 1 to link against gsettings APIs instead of using dlopen().
+    'linux_link_gsettings%': 0,
+
+    # Set Thumb compilation flags.
+    'arm_thumb%': 0,
+
+    # Set ARM fpu compilation flags (only meaningful if armv7==1 and
+    # arm_neon==0).
+    'arm_fpu%': 'vfpv3',
+
+    # Set ARM float abi compilation flag.
+    'arm_float_abi%': 'softfp',
+
+    # Enable new NPDevice API.
+    'enable_new_npdevice_api%': 0,
+
+    # Enable EGLImage support in OpenMAX
+    'enable_eglimage%': 1,
+
+    # Enable a variable used elsewhere throughout the GYP files to determine
+    # whether to compile in the sources for the GPU plugin / process.
+    'enable_gpu%': 1,
+
+    # .gyp files or targets should set chromium_code to 1 if they build
+    # Chromium-specific code, as opposed to external code.  This variable is
+    # used to control such things as the set of warnings to enable, and
+    # whether warnings are treated as errors.
+    'chromium_code%': 0,
+
+    'release_valgrind_build%': 0,
+
+    # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+    'enable_wexit_time_destructors%': 0,
+
+    # Set to 1 to compile with the built in pdf viewer.
+    'internal_pdf%': 0,
+
+    # Set to 1 to compile with the OpenGL ES 2.0 conformance tests.
+    'internal_gles2_conform_tests%': 0,
+
+    # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+    # so Cocoa is happy (http://crbug.com/20441).
+    'locales': [
+      'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+      'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+      'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+      'ml', 'mr', 'ms', 'nb', 'nl', 'pl', 'pt-PT', 'ro', 'ru',
+      'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+      'vi', 'zh-CN', 'zh-TW',
+    ],
+
+    # Pseudo locales are special locales which are used for testing and
+    # debugging. They don't get copied to the final app. For more info,
+    # check out https://sites.google.com/a/chromium.org/dev/Home/fake-bidi
+    'pseudo_locales': [
+      'fake-bidi',
+    ],
+
+    'grit_defines': [],
+
+    # If debug_devtools is set to 1, JavaScript files for DevTools are
+    # stored as is and loaded from disk. Otherwise, a concatenated file
+    # is stored in resources.pak. It is still possible to load JS files
+    # from disk by passing --debug-devtools cmdline switch.
+    'debug_devtools%': 0,
+
+    # The Java Bridge is not compiled in by default.
+    'java_bridge%': 0,
+
+    # Code signing for iOS binaries.  The bots need to be able to disable this.
+    'chromium_ios_signing%': 1,
+
+    # This flag is only used when disable_nacl==0 and disables all those
+    # subcomponents which would require the installation of a native_client
+    # untrusted toolchain.
+    'disable_nacl_untrusted%': 0,
+
+    # Disable Dart by default.
+    'enable_dart%': 0,
+
+    # The desired version of Windows SDK can be set in ~/.gyp/include.gypi.
+    'msbuild_toolset%': '',
+
+    # Native Client is enabled by default.
+    'disable_nacl%': 0,
+
+    # Whether to build full debug version for Debug configuration on Android.
+    # Compared to full debug version, the default Debug configuration on Android
+    # has no full v8 debug, has size optimization and linker gc section, so that
+    # we can build a debug version with acceptable size and performance.
+    'android_full_debug%': 0,
+
+    # Sets the default version name and code for Android app, by default we
+    # do a developer build.
+    'android_app_version_name%': 'Developer Build',
+    'android_app_version_code%': 0,
+
+    'sas_dll_exists': '<!(python <(DEPTH)/build/dir_exists.py <(sas_dll_path))',
+    'wix_exists': '<!(python <(DEPTH)/build/dir_exists.py <(wix_path))',
+
+    'windows_sdk_default_path': '<(DEPTH)/third_party/platformsdk_win8/files',
+    'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+
+    # Whether we are using the rlz library or not.  Platforms like Android send
+    # rlz codes for searches but do not use the library.
+    'enable_rlz%': 0,
+
+    'conditions': [
+      ['OS=="win" and "<!(python <(DEPTH)/build/dir_exists.py <(windows_sdk_default_path))"=="True"', {
+        'windows_sdk_path%': '<(windows_sdk_default_path)',
+      }, {
+        'windows_sdk_path%': 'C:/Program Files (x86)/Windows Kits/8.0',
+      }],
+      ['OS=="win" and "<!(python <(DEPTH)/build/dir_exists.py <(directx_sdk_default_path))"=="True"', {
+        'directx_sdk_path%': '<(directx_sdk_default_path)',
+      }, {
+        'directx_sdk_path%': '$(DXSDK_DIR)',
+      }],
+      ['OS=="win"', {
+        'windows_driver_kit_path%': '$(WDK_DIR)',
+      }],
+      # If use_official_google_api_keys is already set (to 0 or 1), we
+      # do none of the implicit checking.  If it is set to 1 and the
+      # internal keys file is missing, the build will fail at compile
+      # time.  If it is set to 0 and keys are not provided by other
+      # means, a warning will be printed at compile time.
+      ['use_official_google_api_keys==2', {
+        'use_official_google_api_keys%':
+            '<!(python <(DEPTH)/google_apis/build/check_internal.py <(DEPTH)/google_apis/internal/google_chrome_api_keys.h)',
+      }],
+      ['os_posix==1 and OS!="mac" and OS!="ios" and OS!="lb_shell"', {
+        # Figure out the python architecture to decide if we build pyauto.
+        'python_arch%': '<!(<(DEPTH)/build/linux/python_arch.sh <(sysroot)/usr/<(system_libdir)/libpython<(python_ver).so.1.0)',
+        'conditions': [
+          # TODO(glider): set clang to 1 earlier for ASan and TSan builds so
+          # that it takes effect here.
+          ['clang==0 and asan==0 and tsan==0', {
+            # This will set gcc_version to XY if you are running gcc X.Y.*.
+            # This is used to tweak build flags for gcc 4.5.
+            'gcc_version%': '<!(python <(DEPTH)/build/compiler_version.py)',
+          }, {
+            'gcc_version%': 0,
+          }],
+          ['branding=="Chrome"', {
+            'linux_breakpad%': 1,
+          }],
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'linux_dump_symbols%': 1,
+          }],
+        ],
+      }],  # os_posix==1 and OS!="mac" and OS!="ios"
+      ['OS=="ios"', {
+        'disable_nacl%': 1,
+        'enable_background%': 0,
+        'enable_gpu%': 0,
+        'enable_task_manager%': 0,
+        'icu_use_data_file_flag%': 1,
+        'use_system_bzip2%': 1,
+        'use_system_libxml%': 1,
+        'use_system_sqlite%': 1,
+
+        # The Mac SDK is set for iOS builds and passed through to Mac
+        # sub-builds. This allows the Mac sub-build SDK in an iOS build to be
+        # overridden from the command line the same way it is for a Mac build.
+        'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py 10.6)',
+
+        # iOS SDK and deployment target support.  The iOS 5.0 SDK is actually
+        # what is required, but the value is left blank so when it is set in
+        # the project files it will be the "current" iOS SDK.  Forcing 5.0
+        # even though it is "current" causes Xcode to spit out a warning for
+        # every single project file for not using the "current" SDK.
+        'ios_sdk%': '',
+        'ios_sdk_path%': '',
+        'ios_deployment_target%': '4.3',
+
+        'conditions': [
+          # ios_product_name is set to the name of the .app bundle as it should
+          # appear on disk.
+          ['branding=="Chrome"', {
+            'ios_product_name%': 'Chrome',
+          }, { # else: branding!="Chrome"
+            'ios_product_name%': 'Chromium',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'ios_breakpad%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'ios_breakpad%': 0,
+          }],
+        ],
+      }],  # OS=="ios"
+      ['OS=="android" or ((OS=="lb_shell" or OS=="starboard") and target_arch=="android")', {
+        # Location of Android NDK.
+        'variables': {
+          'variables': {
+            'variables': {
+              'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
+            },
+            'android_ndk_root%': '<(android_ndk_root)',
+            'conditions': [
+              ['target_arch == "ia32"', {
+                'android_app_abi%': 'x86',
+                'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-9/arch-x86',
+              }],
+              ['target_arch=="arm" or (OS=="lb_shell" and target_arch=="android")', {
+                'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-9/arch-arm',
+                'conditions': [
+                  ['armv7==0', {
+                    'android_app_abi%': 'armeabi',
+                  }, {
+                    'android_app_abi%': 'armeabi-v7a',
+                  }],
+                ],
+              }],
+            ],
+          },
+          'android_ndk_root%': '<(android_ndk_root)',
+          'android_app_abi%': '<(android_app_abi)',
+          'android_ndk_sysroot%': '<(android_ndk_sysroot)',
+        },
+        'android_ndk_root%': '<(android_ndk_root)',
+        'android_ndk_sysroot': '<(android_ndk_sysroot)',
+        'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+        'android_ndk_lib': '<(android_ndk_sysroot)/usr/lib',
+        'android_app_abi%': '<(android_app_abi)',
+
+        # Location of the "strip" binary, used by both gyp and scripts.
+        'android_strip%' : '<!(/bin/echo -n <(android_toolchain)/*-strip)',
+
+        # Provides an absolute path to PRODUCT_DIR (e.g. out/Release). Used
+        # to specify the output directory for Ant in the Android build.
+        'ant_build_out': '`cd <(PRODUCT_DIR) && pwd -P`',
+
+        # Uses Android's crash report system
+        'linux_breakpad%': 0,
+
+        # Always uses openssl.
+        'use_openssl%': 1,
+
+        'proprietary_codecs%': '<(proprietary_codecs)',
+        'enable_task_manager%': 0,
+        'safe_browsing%': 2,
+        'configuration_policy%': 0,
+        'input_speech%': 0,
+        'enable_web_intents%': 0,
+        'enable_automation%': 0,
+        'java_bridge%': 1,
+        'build_ffmpegsumo%': 0,
+        'linux_use_tcmalloc%': 0,
+
+        # Disable Native Client.
+        'disable_nacl%': 1,
+
+        # Android does not support background apps.
+        'enable_background%': 0,
+
+        # Sessions are store separately in the Java side.
+        'enable_session_service%': 0,
+
+        # Set to 1 once we have a notification system for Android.
+        # http://crbug.com/115320
+        'notifications%': 0,
+
+        'p2p_apis%' : 0,
+
+        # TODO(jrg): when 'gtest_target_type'=='shared_library' and
+        # OS==android, make all gtest_targets depend on
+        # testing/android/native_test.gyp:native_test_apk.
+        'gtest_target_type%': 'shared_library',
+
+        # Uses system APIs for decoding audio and video.
+        'use_libffmpeg%': '0',
+
+        # Always use the chromium skia.
+        'use_system_skia%': '0',
+
+        # Configure crash reporting and build options based on release type.
+        'conditions': [
+          ['buildtype=="Official"', {
+            # Only report crash dumps for Official builds.
+            'linux_breakpad%': 1,
+          }, {
+            'linux_breakpad%': 0,
+          }],
+        ],
+
+        # When building as part of the Android system, use system libraries
+        # where possible to reduce ROM size.
+        # TODO(steveblock): Investigate using the system version of sqlite.
+        'use_system_sqlite%': 0,  # '<(android_build_type)',
+        'use_system_expat%': '<(android_build_type)',
+        'use_system_icu%': '<(android_build_type)',
+        'use_system_stlport%': '<(android_build_type)',
+
+        # Copy it out one scope.
+        'android_build_type%': '<(android_build_type)',
+      }],  # OS=="android"
+      ['OS=="mac"', {
+        'variables': {
+          # Mac OS X SDK and deployment target support.  The SDK identifies
+          # the version of the system headers that will be used, and
+          # corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time
+          # macro.  "Maximum allowed" refers to the operating system version
+          # whose APIs are available in the headers.  The deployment target
+          # identifies the minimum system version that the built products are
+          # expected to function on.  It corresponds to the
+          # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro.  To ensure these
+          # macros are available, #include <AvailabilityMacros.h>.  Additional
+          # documentation on these macros is available at
+          # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+          # Chrome normally builds with the Mac OS X 10.6 SDK and sets the
+          # deployment target to 10.6.  Other projects, such as O3D, may
+          # override these defaults.
+
+          # Normally, mac_sdk_min is used to find an SDK that Xcode knows
+          # about that is at least the specified version. In official builds,
+          # the SDK must match mac_sdk_min exactly. If the SDK is installed
+          # someplace that Xcode doesn't know about, set mac_sdk_path to the
+          # path to the SDK; when set to a non-empty string, SDK detection
+          # based on mac_sdk_min will be bypassed entirely.
+          'mac_sdk_min%': '10.6',
+          'mac_sdk_path%': '',
+
+          'mac_deployment_target%': '10.6',
+        },
+
+        'mac_sdk_min': '<(mac_sdk_min)',
+        'mac_sdk_path': '<(mac_sdk_path)',
+        'mac_deployment_target': '<(mac_deployment_target)',
+
+        # Enable clang on mac by default!
+        'clang%': 1,
+
+        # Compile in Breakpad support by default so that it can be
+        # tested, even if it is not enabled by default at runtime.
+        'mac_breakpad_compiled_in%': 1,
+        'conditions': [
+          # mac_product_name is set to the name of the .app bundle as it should
+          # appear on disk.  This duplicates data from
+          # chrome/app/theme/chromium/BRANDING and
+          # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+          # these names into the build system.
+          ['OS!="lb_shell"', {
+            'default_configuration': 'Debug',
+          }],
+          ['branding=="Chrome"', {
+            'mac_product_name%': 'Google Chrome',
+          }, { # else: branding!="Chrome"
+            'mac_product_name%': 'Chromium',
+          }],
+
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py --verify <(mac_sdk_min) --sdk_path=<(mac_sdk_path))',
+            # Enable uploading crash dumps.
+            'mac_breakpad_uploads%': 1,
+            # Enable dumping symbols at build time for use by Mac Breakpad.
+            'mac_breakpad%': 1,
+            # Enable Keystone auto-update support.
+            'mac_keystone%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py <(mac_sdk_min))',
+            'mac_breakpad_uploads%': 0,
+            'mac_breakpad%': 0,
+            'mac_keystone%': 0,
+          }],
+        ],
+      }],  # OS=="mac"
+      ['OS=="win"', {
+        'conditions': [
+          ['component=="shared_library"', {
+            'win_use_allocator_shim%': 0,
+          }],
+          ['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
+            # Only enabled by default for ninja because it's buggy in VS.
+            # Not enabled for component=static_library because some targets
+            # are too large and the toolchain fails due to the size of the
+            # .obj files.
+            'incremental_chrome_dll%': 1,
+          }],
+          # Don't do incremental linking for large modules on 32-bit.
+          ['MSVS_OS_BITS==32', {
+            'msvs_large_module_debug_link_mode%': '1',  # No
+          },{
+            'msvs_large_module_debug_link_mode%': '2',  # Yes
+          }],
+          ['MSVS_VERSION=="2010e" or MSVS_VERSION=="2008e" or MSVS_VERSION=="2005e"', {
+            'msvs_express%': 1,
+            'secure_atl%': 0,
+          },{
+            'msvs_express%': 0,
+            'secure_atl%': 1,
+          }],
+        ],
+        'nacl_win64_defines': [
+          # This flag is used to minimize dependencies when building
+          # Native Client loader for 64-bit Windows.
+          'NACL_WIN64',
+        ],
+      }],
+
+      ['os_posix==1 and chromeos==0 and OS!="android" and OS!="ios"', {
+        'use_cups%': 1,
+      }, {
+        'use_cups%': 0,
+      }],
+
+      # Native Client glibc toolchain is enabled by default except on arm.
+      ['target_arch=="arm"', {
+        'disable_glibc%': 1,
+      }, {
+        'disable_glibc%': 0,
+      }],
+
+      # Disable SSE2 when building for ARM or MIPS.
+      ['target_arch=="arm" or target_arch=="mipsel"', {
+        'disable_sse2%': 1,
+      }, {
+        'disable_sse2%': '<(disable_sse2)',
+      }],
+
+      # Set the relative path from this file to the GYP file of the JPEG
+      # library used by Chromium.
+      ['use_system_libjpeg==1 or use_libjpeg_turbo==0', {
+        # Configuration for using the system libjeg is here.
+        'libjpeg_gyp_path': '../third_party/libjpeg/libjpeg.gyp',
+      }, {
+        'libjpeg_gyp_path': '../third_party/libjpeg_turbo/libjpeg.gyp',
+      }],
+
+      # Options controlling the use of GConf (the classic GNOME configuration
+      # system) and GIO, which contains GSettings (the new GNOME config system).
+      ['chromeos==1', {
+        'use_gconf%': 0,
+        'use_gio%': 0,
+      }, {
+        'use_gconf%': 1,
+        'use_gio%': 1,
+      }],
+
+      # Set up -D and -E flags passed into grit.
+      ['branding=="Chrome"', {
+        # TODO(mmoss) The .grd files look for _google_chrome, but for
+        # consistency they should look for google_chrome_build like C++.
+        'grit_defines': ['-D', '_google_chrome',
+                         '-E', 'CHROMIUM_BUILD=google_chrome'],
+      }, {
+        'grit_defines': ['-D', '_chromium',
+                         '-E', 'CHROMIUM_BUILD=chromium'],
+      }],
+      ['chromeos==1', {
+        'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
+      }],
+      ['toolkit_views==1', {
+        'grit_defines': ['-D', 'toolkit_views'],
+      }],
+      ['use_aura==1', {
+        'grit_defines': ['-D', 'use_aura'],
+      }],
+      ['use_ash==1', {
+        'grit_defines': ['-D', 'use_ash'],
+      }],
+      ['use_nss==1', {
+        'grit_defines': ['-D', 'use_nss'],
+      }],
+      ['file_manager_extension==1', {
+        'grit_defines': ['-D', 'file_manager_extension'],
+      }],
+      ['remoting==1', {
+        'grit_defines': ['-D', 'remoting'],
+      }],
+      ['use_titlecase_in_grd_files==1', {
+        'grit_defines': ['-D', 'use_titlecase'],
+      }],
+      ['use_third_party_translations==1', {
+        'grit_defines': ['-D', 'use_third_party_translations'],
+        'locales': [
+          'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+          'ka', 'ku', 'kw', 'ms', 'ug'
+        ],
+      }],
+      ['OS=="android"', {
+        'grit_defines': ['-D', 'android'],
+      }],
+      ['OS=="mac"', {
+        'grit_defines': ['-D', 'scale_factors=2x'],
+      }],
+      ['OS == "ios"', {
+        'grit_defines': [
+          # define for iOS specific resources.
+          '-D', 'ios',
+          # iOS uses a whitelist to filter resources.
+          '-w', '<(DEPTH)/build/ios/grit_whitelist.txt'
+        ],
+        # iOS uses pt instead of pt-BR.
+        'locales': ['pt'],
+      }, {  # OS != "ios"
+        'locales': ['pt-BR'],
+      }],
+      ['enable_extensions==1', {
+        'grit_defines': ['-D', 'enable_extensions'],
+      }],
+      ['enable_printing==1', {
+        'grit_defines': ['-D', 'enable_printing'],
+      }],
+      ['enable_themes==1', {
+        'grit_defines': ['-D', 'enable_themes'],
+      }],
+      ['use_oem_wallpaper==1', {
+        'grit_defines': ['-D', 'use_oem_wallpaper'],
+      }],
+      ['enable_app_list==1', {
+        'grit_defines': ['-D', 'enable_app_list'],
+      }],
+      ['enable_settings_app==1', {
+        'grit_defines': ['-D', 'enable_settings_app'],
+      }],
+      ['clang_use_chrome_plugins==1 and OS!="win"', {
+        'clang_chrome_plugins_flags': [
+          '<!@(<(DEPTH)/tools/clang/scripts/plugin_flags.sh)'
+        ],
+      }],
+
+      ['enable_web_intents_tag==1', {
+        'grit_defines': ['-D', 'enable_web_intents_tag'],
+      }],
+
+      ['asan==1 and OS!="win"', {
+        'clang%': 1,
+      }],
+      ['asan==1 and OS=="mac"', {
+        # See http://crbug.com/145503.
+        'component': "static_library",
+      }],
+      ['tsan==1', {
+        'clang%': 1,
+      }],
+
+      ['OS=="linux" and clang_type_profiler==1', {
+        'clang%': 1,
+        'clang_use_chrome_plugins%': 0,
+        'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_x64',
+      }],
+
+      # On valgrind bots, override the optimizer settings so we don't inline too
+      # much and make the stacks harder to figure out.
+      #
+      # TODO(rnk): Kill off variables that no one else uses and just implement
+      # them under a build_for_tool== condition.
+      ['build_for_tool=="memcheck" or build_for_tool=="tsan"', {
+        # gcc flags
+        'mac_debug_optimization': '1',
+        'mac_release_optimization': '1',
+        'release_optimize': '1',
+        'no_gc_sections': 1,
+        'debug_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                              '-fno-builtin -fno-optimize-sibling-calls',
+        'release_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                                '-fno-builtin -fno-optimize-sibling-calls',
+
+        # MSVS flags for TSan on Pin and Windows.
+        'win_debug_RuntimeChecks': '0',
+        'win_debug_disable_iterator_debugging': '1',
+        'win_debug_Optimization': '1',
+        'win_debug_InlineFunctionExpansion': '0',
+        'win_release_InlineFunctionExpansion': '0',
+        'win_release_OmitFramePointers': '0',
+
+        'linux_use_tcmalloc': 1,
+        'release_valgrind_build': 1,
+        'werror': '',
+        'component': 'static_library',
+        'use_system_zlib': 0,
+      }],
+
+      # Build tweaks for DrMemory.
+      # TODO(rnk): Combine with tsan config to share the builder.
+      # http://crbug.com/108155
+      ['build_for_tool=="drmemory"', {
+        # These runtime checks force initialization of stack vars which blocks
+        # DrMemory's uninit detection.
+        'win_debug_RuntimeChecks': '0',
+        # Iterator debugging is slow.
+        'win_debug_disable_iterator_debugging': '1',
+        # Try to disable optimizations that mess up stacks in a release build.
+        # DrM-i#1054 (http://code.google.com/p/drmemory/issues/detail?id=1054)
+        # /O2 and /Ob0 (disable inline) cannot be used together because of a
+        # compiler bug, so we use /Ob1 instead.
+        'win_release_InlineFunctionExpansion': '1',
+        'win_release_OmitFramePointers': '0',
+        # Ditto for debug, to support bumping win_debug_Optimization.
+        'win_debug_InlineFunctionExpansion': 0,
+        'win_debug_OmitFramePointers': 0,
+        # Keep the code under #ifndef NVALGRIND.
+        'release_valgrind_build': 1,
+      }],
+
+      # Enable RLZ on Win, Mac and ChromeOS.
+      ['branding=="Chrome" and (OS=="win" or OS=="mac" or chromeos==1)', {
+        'enable_rlz%': 1,
+      }],
+    ],
+
+    # List of default apps to install in new profiles.  The first list contains
+    # the source files as found in svn.  The second list, used only for linux,
+    # contains the destination location for each of the files.  When a crx
+    # is added or removed from the list, the chrome/browser/resources/
+    # default_apps/external_extensions.json file must also be updated.
+    'default_apps_list': [
+      'browser/resources/default_apps/external_extensions.json',
+      'browser/resources/default_apps/gmail.crx',
+      'browser/resources/default_apps/search.crx',
+      'browser/resources/default_apps/youtube.crx',
+      'browser/resources/default_apps/drive.crx',
+      'browser/resources/default_apps/docs.crx',
+    ],
+    'default_apps_list_linux_dest': [
+      '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+      '<(PRODUCT_DIR)/default_apps/gmail.crx',
+      '<(PRODUCT_DIR)/default_apps/search.crx',
+      '<(PRODUCT_DIR)/default_apps/youtube.crx',
+      '<(PRODUCT_DIR)/default_apps/drive.crx',
+      '<(PRODUCT_DIR)/default_apps/docs.crx',
+    ],
+  },
+  'target_defaults': {
+    'variables': {
+      # The condition that operates on chromium_code is in a target_conditions
+      # section, and will not have access to the default fallback value of
+      # chromium_code at the top of this file, or to the chromium_code
+      # variable placed at the root variables scope of .gyp files, because
+      # those variables are not set at target scope.  As a workaround,
+      # if chromium_code is not set at target scope, define it in target scope
+      # to contain whatever value it has during early variable expansion.
+      # That's enough to make it available during target conditional
+      # processing.
+      'chromium_code%': '<(chromium_code)',
+
+      # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+      'win_release_Optimization%': '2', # 2 = /Os
+      'win_debug_Optimization%': '0',   # 0 = /Od
+
+      # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_release_OmitFramePointers%': '0',
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_debug_OmitFramePointers%': '',
+
+      # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+      'win_debug_RuntimeChecks%': '3',    # 3 = all checks enabled, 0 = off
+
+      # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+      'win_debug_InlineFunctionExpansion%': '',    # empty = default, 0 = off,
+      'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+      # VS inserts quite a lot of extra checks to algorithms like
+      # std::partial_sort in Debug build which make them O(N^2)
+      # instead of O(N*logN). This is particularly slow under memory
+      # tools like ThreadSanitizer so we want it to be disablable.
+      # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+      'win_debug_disable_iterator_debugging%': '0',
+
+      'release_extra_cflags%': '',
+      'debug_extra_cflags%': '',
+
+      'release_valgrind_build%': '<(release_valgrind_build)',
+
+      # the non-qualified versions are widely assumed to be *nix-only
+      'win_release_extra_cflags%': '',
+      'win_debug_extra_cflags%': '',
+
+      # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+      'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+      # Only used by Windows build for now.  Can be used to build into a
+      # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+      # output files in src/build/VS2010_{Debug,Release}.
+      'build_dir_prefix%': '',
+
+      # Targets are by default not nacl untrusted code.
+      'nacl_untrusted_build%': 0,
+
+      'pnacl_compile_flags': [
+        # pnacl uses the clang compiler so we need to supress all the
+        # same warnings as we do for clang.
+        # TODO(sbc): Remove these if/when they are removed from the clang
+        # build.
+        '-Wno-unused-function',
+        '-Wno-char-subscripts',
+        '-Wno-c++11-extensions',
+        '-Wno-unnamed-type-template-args',
+      ],
+
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+        ['OS=="ios"', {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': 's', # Use -Os unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }, {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': '3', # Use -O3 unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }],
+      ],
+    },
+    'conditions': [
+      ['OS=="linux" and linux_use_tcmalloc==1 and clang_type_profiler==1', {
+        'cflags_cc!': ['-fno-rtti'],
+        'cflags_cc+': [
+          '-frtti',
+          '-gline-tables-only',
+          '-fintercept-allocation-functions',
+        ],
+        'defines': ['TYPE_PROFILING'],
+        'dependencies': [
+          '<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
+        ],
+      }],
+      ['OS=="win" and "<(msbuild_toolset)"!=""', {
+        'msbuild_toolset': '<(msbuild_toolset)',
+      }],
+      ['branding=="Chrome"', {
+        'defines': ['GOOGLE_CHROME_BUILD'],
+      }, {  # else: branding!="Chrome"
+        'defines': ['CHROMIUM_BUILD'],
+      }],
+      ['OS=="mac" and component=="shared_library"', {
+        'xcode_settings': {
+          'DYLIB_INSTALL_NAME_BASE': '@rpath',
+          'LD_RUNPATH_SEARCH_PATHS': [
+            # For unbundled binaries.
+            '@loader_path/.',
+            # For bundled binaries, to get back from Binary.app/Contents/MacOS.
+            '@loader_path/../../..',
+          ],
+        },
+      }],
+      ['enable_rlz==1', {
+        'defines': ['ENABLE_RLZ'],
+      }],
+      ['component=="shared_library"', {
+        'defines': ['COMPONENT_BUILD'],
+      }],
+      ['toolkit_views==1', {
+        'defines': ['TOOLKIT_VIEWS=1'],
+      }],
+      ['ui_compositor_image_transport==1', {
+        'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+      }],
+      ['use_aura==1', {
+        'defines': ['USE_AURA=1'],
+      }],
+      ['use_ash==1', {
+        'defines': ['USE_ASH=1'],
+      }],
+      ['use_default_render_theme==1', {
+        'defines': ['USE_DEFAULT_RENDER_THEME=1'],
+      }],
+      ['use_libjpeg_turbo==1', {
+        'defines': ['USE_LIBJPEG_TURBO=1'],
+      }],
+      ['use_nss==1', {
+        'defines': ['USE_NSS=1'],
+      }],
+      ['enable_one_click_signin==1', {
+        'defines': ['ENABLE_ONE_CLICK_SIGNIN'],
+      }],
+      ['toolkit_uses_gtk==1 and toolkit_views==0', {
+        # TODO(erg): We are progressively sealing up use of deprecated features
+        # in gtk in preparation for an eventual porting to gtk3.
+        'defines': ['GTK_DISABLE_SINGLE_INCLUDES=1'],
+      }],
+      ['chromeos==1', {
+        'defines': ['OS_CHROMEOS=1'],
+      }],
+      ['use_xi2_mt!=0', {
+        'defines': ['USE_XI2_MT=<(use_xi2_mt)'],
+      }],
+      ['file_manager_extension==1', {
+        'defines': ['FILE_MANAGER_EXTENSION=1'],
+      }],
+      ['profiling==1', {
+        'defines': ['ENABLE_PROFILING=1'],
+      }],
+      ['OS=="linux" and glibcxx_debug==1', {
+        'defines': ['_GLIBCXX_DEBUG=1',],
+        'cflags_cc+': ['-g'],
+      }],
+      ['remoting==1', {
+        'defines': ['ENABLE_REMOTING=1'],
+      }],
+      ['enable_webrtc==1', {
+        'defines': ['ENABLE_WEBRTC=1'],
+      }],
+      ['proprietary_codecs==1', {
+        'defines': ['USE_PROPRIETARY_CODECS'],
+      }],
+      ['enable_pepper_threading==1', {
+        'defines': ['ENABLE_PEPPER_THREADING'],
+      }],
+      ['enable_viewport==1', {
+        'defines': ['ENABLE_VIEWPORT'],
+      }],
+      ['configuration_policy==1', {
+        'defines': ['ENABLE_CONFIGURATION_POLICY'],
+      }],
+      ['input_speech==1', {
+        'defines': ['ENABLE_INPUT_SPEECH'],
+      }],
+      ['notifications==1', {
+        'defines': ['ENABLE_NOTIFICATIONS'],
+      }],
+      ['enable_hidpi==1', {
+        'defines': ['ENABLE_HIDPI=1'],
+      }],
+      ['fastbuild!=0', {
+        # Clang creates chubby debug information, which makes linking very
+        # slow. For now, don't create debug information with clang.  See
+        # http://crbug.com/70000
+        'conditions': [
+          ['clang==1', {
+            'conditions': [
+              ['OS=="linux"', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                },
+              }],
+              # Android builds symbols on release by default, disable them.
+              ['OS=="android"', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                  'release_extra_cflags': '-g0',
+                },
+              }],
+            ],
+          }, { # else clang!=1
+            'conditions': [
+              # For Windows and Mac, we don't genererate debug information.
+              ['OS=="win"', {
+                'msvs_settings': {
+                  'VCLinkerTool': {
+                    'GenerateDebugInformation': 'false',
+                  },
+                  'VCCLCompilerTool': {
+                    'DebugInformationFormat': '0',
+                  },
+                },
+              }],
+              ['OS=="mac"', {
+                'xcode_settings': {
+                  'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+                },
+              }],
+              ['OS=="linux"', {
+                'variables': {
+                  'debug_extra_cflags': '-g1',
+                },
+              }],
+              ['OS=="android"', {
+                'variables': {
+                  'debug_extra_cflags': '-g1',
+                  'release_extra_cflags': '-g1',
+                },
+              }],
+            ],
+          }], # clang!=1
+        ],
+      }],  # fastbuild!=0
+      ['dcheck_always_on!=0', {
+        'defines': ['DCHECK_ALWAYS_ON=1'],
+      }],  # dcheck_always_on!=0
+      ['selinux==1', {
+        'defines': ['CHROMIUM_SELINUX=1'],
+      }],
+      ['win_use_allocator_shim==0', {
+        'conditions': [
+          ['OS=="win"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+        ],
+      }],
+      ['enable_gpu==1', {
+        'defines': [
+          'ENABLE_GPU=1',
+        ],
+      }],
+      ['use_openssl==1', {
+        'defines': [
+          'USE_OPENSSL=1',
+        ],
+      }],
+      ['enable_eglimage==1', {
+        'defines': [
+          'ENABLE_EGLIMAGE=1',
+        ],
+      }],
+      ['use_skia==1', {
+        'defines': [
+          'USE_SKIA=1',
+        ],
+      }],
+      ['asan==1 and OS=="win"', {
+        # Since asan on windows uses Syzygy, we need /PROFILE turned on to
+        # produce appropriate pdbs.
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'Profile': 'true',
+          },
+        },
+        'defines': ['ADDRESS_SANITIZER'],
+      }],  # asan==1 and OS=="win"
+      ['coverage!=0', {
+        'conditions': [
+          ['OS=="mac" or OS=="ios"', {
+            'xcode_settings': {
+              'GCC_INSTRUMENT_PROGRAM_FLOW_ARCS': 'YES',  # -fprofile-arcs
+              'GCC_GENERATE_TEST_COVERAGE_FILES': 'YES',  # -ftest-coverage
+            },
+          }],
+          ['OS=="mac"', {
+            # Add -lgcov for types executable, shared_library, and
+            # loadable_module; not for static_library.
+            # This is a delayed conditional.
+            'target_conditions': [
+              ['_type!="static_library"', {
+                'xcode_settings': { 'OTHER_LDFLAGS': [ '-lgcov' ] },
+              }],
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'cflags': [ '-ftest-coverage',
+                        '-fprofile-arcs' ],
+            'link_settings': { 'libraries': [ '-lgcov' ] },
+          }],
+          ['OS=="win"', {
+            'variables': {
+              # Disable incremental linking for all modules.
+              # 0: inherit, 1: disabled, 2: enabled.
+              'msvs_debug_link_incremental': '1',
+              'msvs_large_module_debug_link_mode': '1',
+            },
+            'defines': [
+              # Disable iterator debugging (huge speed boost without any
+              # change in coverage results).
+              '_HAS_ITERATOR_DEBUGGING=0',
+            ],
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # Enable profile information (necessary for coverage
+                # instrumentation). This is incompatible with incremental
+                # linking.
+                'Profile': 'true',
+              },
+            }
+         }],  # OS==win
+        ],  # conditions for coverage
+      }],  # coverage!=0
+      ['OS=="win"', {
+        'defines': [
+          '__STD_C',
+          '_CRT_SECURE_NO_DEPRECATE',
+          '_SCL_SECURE_NO_DEPRECATE',
+          # This define is required to pull in the new Win8 interfaces from
+          # system headers like ShObjIdl.h.
+          'NTDDI_VERSION=0x06020000',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/wtl/include',
+        ],
+        'conditions': [
+          ['win_z7!=0', {
+            'msvs_settings': {
+              # Generates debug info when win_z7=1
+              # even if fastbuild=1 (that makes GenerateDebugInformation false).
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '1',
+              }
+            }
+          }],
+          ['"<(GENERATOR)"=="msvs"', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # Make the pdb name sane. Otherwise foo.exe and foo.dll both
+                # have foo.pdb. The ninja generator already defaults to this and
+                # can't handle the $(TargetPath) macro.
+                'ProgramDatabaseFile': '$(TargetPath).pdb',
+              }
+            },
+          }],
+        ],  # win_z7!=0
+      }],  # OS==win
+      ['enable_task_manager==1', {
+        'defines': [
+          'ENABLE_TASK_MANAGER=1',
+        ],
+      }],
+      ['enable_web_intents==1', {
+        'defines': [
+          'ENABLE_WEB_INTENTS=1',
+        ],
+      }],
+      ['enable_extensions==1', {
+        'defines': [
+          'ENABLE_EXTENSIONS=1',
+        ],
+      }],
+      ['OS=="win" and branding=="Chrome"', {
+        'defines': ['ENABLE_SWIFTSHADER'],
+      }],
+      ['enable_dart==1', {
+        'defines': ['WEBKIT_USING_DART=1'],
+      }],
+      ['enable_plugin_installation==1', {
+        'defines': ['ENABLE_PLUGIN_INSTALLATION=1'],
+      }],
+      ['enable_plugins==1', {
+        'defines': ['ENABLE_PLUGINS=1'],
+      }],
+      ['enable_session_service==1', {
+        'defines': ['ENABLE_SESSION_SERVICE=1'],
+      }],
+      ['enable_themes==1', {
+        'defines': ['ENABLE_THEMES=1'],
+      }],
+      ['enable_background==1', {
+        'defines': ['ENABLE_BACKGROUND=1'],
+      }],
+      ['enable_automation==1', {
+        'defines': ['ENABLE_AUTOMATION=1'],
+      }],
+      ['enable_google_now==1', {
+        'defines': ['ENABLE_GOOGLE_NOW=1'],
+      }],
+      ['enable_language_detection==1', {
+        'defines': ['ENABLE_LANGUAGE_DETECTION=1'],
+      }],
+      ['enable_printing==1', {
+        'defines': ['ENABLE_PRINTING=1'],
+      }],
+      ['enable_captive_portal_detection==1', {
+        'defines': ['ENABLE_CAPTIVE_PORTAL_DETECTION=1'],
+      }],
+      ['enable_app_list==1', {
+        'defines': ['ENABLE_APP_LIST=1'],
+      }],
+      ['enable_settings_app==1', {
+        'defines': ['ENABLE_SETTINGS_APP=1'],
+      }],
+      ['disable_ftp_support==1', {
+        'defines': ['DISABLE_FTP_SUPPORT=1'],
+      }],
+    ],  # conditions for 'target_defaults'
+    'target_conditions': [
+      ['enable_wexit_time_destructors==1', {
+        'conditions': [
+          [ 'clang==1', {
+            'cflags': [
+              '-Wexit-time-destructors',
+            ],
+            'xcode_settings': {
+              'WARNING_CFLAGS': [
+                '-Wexit-time-destructors',
+              ],
+            },
+          }],
+        ],
+      }],
+      ['chromium_code==0', {
+        'conditions': [
+          [ 'os_posix==1 and OS!="mac" and OS!="ios" and OS!="lb_shell"', {
+            # We don't want to get warnings from third-party code,
+            # so remove any existing warning-enabling flags like -Wall.
+            'cflags!': [
+              '-Wall',
+              '-Wextra',
+            ],
+            'cflags_cc': [
+              # Don't warn about hash_map in third-party code.
+              '-Wno-deprecated',
+            ],
+            'cflags': [
+              # Don't warn about printf format problems.
+              # This is off by default in gcc but on in Ubuntu's gcc(!).
+              '-Wno-format',
+            ],
+            'cflags_cc!': [
+              # TODO(fischman): remove this.
+              # http://code.google.com/p/chromium/issues/detail?id=90453
+              '-Wsign-compare',
+            ]
+          }],
+          # TODO: Fix all warnings on chromeos too.
+          [ 'os_posix==1 and OS!="mac" and OS!="ios" and OS!="lb_shell" and (clang!=1 or chromeos==1)', {
+            'cflags!': [
+              '-Werror',
+            ],
+          }],
+          [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android" and OS!="lb_shell"', {
+            'cflags': [
+              # Don't warn about ignoring the return value from e.g. close().
+              # This is off by default in some gccs but on by default in others.
+              # BSD systems do not support this option, since they are usually
+              # using gcc 4.2.1, which does not have this flag yet.
+              '-Wno-unused-result',
+            ],
+          }],
+          [ 'OS=="win"', {
+            'defines': [
+              '_CRT_SECURE_NO_DEPRECATE',
+              '_CRT_NONSTDC_NO_WARNINGS',
+              '_CRT_NONSTDC_NO_DEPRECATE',
+              '_SCL_SECURE_NO_DEPRECATE',
+            ],
+            'msvs_disabled_warnings': [4800],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'WarningLevel': '3',
+                'WarnAsError': '<(win_third_party_warn_as_error)',
+                'Detect64BitPortabilityProblems': 'false',
+              },
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'msvs_settings': {
+                  'VCCLCompilerTool': { 'WarnAsError': 'false' },
+                }
+              }],
+            ],
+          }],
+          # TODO(darin): Unfortunately, some third_party code depends on base.
+          [ '(OS=="win" or target_arch=="xb1") and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+          [ 'OS=="mac" or OS=="ios"', {
+            'xcode_settings': {
+              'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'xcode_settings': {
+                  'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',    # -Werror
+                },
+              }],
+            ],
+          }],
+          [ 'OS=="ios"', {
+            'xcode_settings': {
+              # TODO(ios): Fix remaining warnings in third-party code, then
+              # remove this; the Mac cleanup didn't get everything that's
+              # flagged in an iOS build.
+              'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+              'RUN_CLANG_STATIC_ANALYZER': 'NO',
+            },
+          }],
+        ],
+      }, {
+        'includes': [
+           # Rules for excluding e.g. foo_win.cc from the build on non-Windows.
+          'filename_rules.gypi',
+        ],
+        # In Chromium code, we define __STDC_foo_MACROS in order to get the
+        # C99 macros on Mac and Linux.
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          '__STDC_FORMAT_MACROS',
+        ],
+        'conditions': [
+          ['OS=="win"', {
+            # turn on warnings for signed/unsigned mismatch on chromium code.
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': ['/we4389'],
+              },
+            },
+          }],
+          ['(OS == "win" or target_arch=="xb1") and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+        ],
+      }],
+    ],  # target_conditions for 'target_defaults'
+    'configurations': {
+      # VCLinkerTool LinkIncremental values below:
+      #   0 == default
+      #   1 == /INCREMENTAL:NO
+      #   2 == /INCREMENTAL
+      # Debug links incremental, Release does not.
+      #
+      # Abstract base configurations to cover common attributes.
+      #
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
+      },
+      'x86_Base': {
+        'abstract': 1,
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'TargetMachine': '1',
+          },
+        },
+        'msvs_configuration_platform': 'Win32',
+      },
+      'x64_Base': {
+        'abstract': 1,
+        'msvs_configuration_platform': 'x64',
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'TargetMachine': '17', # x86 - 64
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+          },
+          'VCLibrarianTool': {
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+          },
+        },
+        'defines': [
+          # Not sure if tcmalloc works on 64-bit Windows.
+          'NO_TCMALLOC',
+        ],
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'defines': [
+          'DYNAMIC_ANNOTATIONS_ENABLED=1',
+          'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+        ],
+        'xcode_settings': {
+          'COPY_PHASE_STRIP': 'NO',
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+          'OTHER_CFLAGS': [
+            '<@(debug_extra_cflags)',
+          ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_debug_Optimization)',
+            'PreprocessorDefinitions': ['_DEBUG'],
+            'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_debug_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_debug_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_debug_InlineFunctionExpansion)',
+              }],
+              ['win_debug_disable_iterator_debugging==1', {
+                'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+              }],
+
+              # if win_debug_OmitFramePointers is blank, leave as default
+              ['win_debug_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_debug_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': '<(msvs_debug_link_incremental)',
+            # ASLR makes debugging with windbg difficult because Chrome.exe and
+            # Chrome.dll share the same base name. As result, windbg will
+            # name the Chrome.dll module like chrome_<base address>, where
+            # <base address> typically changes with each launch. This in turn
+            # means that breakpoints in Chrome.dll don't stick from one launch
+            # to the next. For this reason, we turn ASLR off in debug builds.
+            # Note that this is a three-way bool, where 0 means to pick up
+            # the default setting, 1 is off and 2 is on.
+            'RandomizedBaseAddress': 1,
+          },
+          'VCResourceCompilerTool': {
+            'PreprocessorDefinitions': ['_DEBUG'],
+          },
+        },
+        'conditions': [
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(debug_extra_cflags)',
+                ],
+              }],
+            ],
+          }],
+          # Disabled on iOS because it was causing a crash on startup.
+          # TODO(michelea): investigate, create a reduced test and possibly
+          # submit a radar.
+          ['release_valgrind_build==0 and OS!="ios"', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fstack-protector-all',  # Implies -fstack-protector
+              ],
+            },
+          }],
+        ],
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'xcode_settings': {
+          'DEAD_CODE_STRIPPING': 'YES',  # -Wl,-dead_strip
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+          'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+            'conditions': [
+              # In official builds, each target will self-select
+              # an optimization level.
+              ['buildtype!="Official"', {
+                  'Optimization': '<(win_release_Optimization)',
+                },
+              ],
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_release_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_release_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_release_InlineFunctionExpansion)',
+              }],
+
+              # if win_release_OmitFramePointers is blank, leave as default
+              ['win_release_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_release_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_release_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            # LinkIncremental is a tri-state boolean, where 0 means default
+            # (i.e., inherit from parent solution), 1 means false, and
+            # 2 means true.
+            'LinkIncremental': '1',
+            # This corresponds to the /PROFILE flag which ensures the PDB
+            # file contains FIXUP information (growing the PDB file by about
+            # 5%) but does not otherwise alter the output binary. This
+            # information is used by the Syzygy optimization tool when
+            # decomposing the release image.
+            'Profile': 'true',
+          },
+        },
+        'conditions': [
+          ['msvs_use_common_release', {
+            'includes': ['release.gypi'],
+          }],
+          ['release_valgrind_build==0 and tsan==0', {
+            'defines': [
+              'NVALGRIND',
+              'DYNAMIC_ANNOTATIONS_ENABLED=0',
+            ],
+          }, {
+            'defines': [
+              'DYNAMIC_ANNOTATIONS_ENABLED=1',
+              'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+            ],
+          }],
+          ['win_use_allocator_shim==0', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['os_posix==1 and chromium_code==1', {
+            # Non-chromium code is not guaranteed to compile cleanly
+            # with _FORTIFY_SOURCE. Also, fortified build may fail
+            # when optimizations are disabled, so only do that for Release
+            # build.
+            'defines': [
+              '_FORTIFY_SOURCE=2',
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(release_extra_cflags)',
+                ],
+              }],
+            ],
+          }],
+        ],
+      },
+      'conditions': [
+        [ 'OS!="lb_shell"', {
+          #
+          # Concrete configurations omitted from LBPS3 build
+          #
+          'Debug': {
+            'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+          },
+          'Release': {
+            'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+            'conditions': [
+              ['msvs_use_common_release', {
+                'includes': ['release.gypi'],
+              }],
+            ]
+          },
+        }],
+        [ 'OS=="win"', {
+          # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+          'Debug_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+          },
+          'Release_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+          },
+        }],
+      ],
+    },
+  },
+  'conditions': [
+    ['os_posix==1 and OS!="lb_shell"', {
+      'target_defaults': {
+        'cflags': [
+          # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc
+          # supports it.
+          '-fstack-protector',
+          '--param=ssp-buffer-size=4',
+        ],
+        'ldflags': [
+          '-Wl,-z,now',
+          '-Wl,-z,relro',
+        ],
+      },
+    }],
+    ['os_posix==1 and OS!="mac" and OS!="ios" and OS!="lb_shell"', {
+      'target_defaults': {
+        # Enable -Werror by default, but put it in a variable so it can
+        # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+        'variables': {
+          'werror%': '-Werror',
+          'libraries_for_target%': '',
+        },
+        'defines': [
+          '_FILE_OFFSET_BITS=64',
+        ],
+        'cflags': [
+          '<(werror)',  # See note above about the werror variable.
+          '-pthread',
+          '-fno-exceptions',
+          '-fno-strict-aliasing',  # See http://crbug.com/32204
+          '-Wall',
+          # TODO(evan): turn this back on once all the builds work.
+          # '-Wextra',
+          # Don't warn about unused function params.  We use those everywhere.
+          '-Wno-unused-parameter',
+          # Don't warn about the "struct foo f = {0};" initialization pattern.
+          '-Wno-missing-field-initializers',
+          # Don't export any symbols (for example, to plugins we dlopen()).
+          # Note: this is *required* to make some plugins work.
+          '-fvisibility=hidden',
+          '-pipe',
+        ],
+        'cflags_cc': [
+          '-fno-rtti',
+          '-fno-threadsafe-statics',
+          # Make inline functions have hidden visiblity by default.
+          # Surprisingly, not covered by -fvisibility=hidden.
+          '-fvisibility-inlines-hidden',
+          # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+          # so we specify it explicitly.
+          # TODO(fischman): remove this if http://llvm.org/PR10448 obsoletes it.
+          # http://code.google.com/p/chromium/issues/detail?id=90453
+          '-Wsign-compare',
+        ],
+        'ldflags': [
+          '-pthread', '-Wl,-z,noexecstack',
+        ],
+        'libraries' : [
+          '<(libraries_for_target)',
+        ],
+        'configurations': {
+          'Debug_Base': {
+            'variables': {
+              'debug_optimize%': '0',
+            },
+            'defines': [
+              '_DEBUG',
+            ],
+            'cflags': [
+              '-O>(debug_optimize)',
+              '-g',
+            ],
+            'conditions' : [
+              ['OS=="android" and android_full_debug==0', {
+                # Some configurations are copied from Release_Base to reduce
+                # the binary size.
+                'variables': {
+                  'debug_optimize%': 's',
+                },
+                'cflags': [
+                  '-fomit-frame-pointer',
+                  '-fdata-sections',
+                  '-ffunction-sections',
+                ],
+                'ldflags': [
+                  '-Wl,-O1',
+                  '-Wl,--as-needed',
+                  '-Wl,--gc-sections',
+                ],
+              }],
+            ],
+          },
+          'Release_Base': {
+            'variables': {
+              'release_optimize%': '2',
+              # Binaries become big and gold is unable to perform GC
+              # and remove unused sections for some of test targets
+              # on 32 bit platform.
+              # (This is currently observed only in chromeos valgrind bots)
+              # The following flag is to disable --gc-sections linker
+              # option for these bots.
+              'no_gc_sections%': 0,
+
+              # TODO(bradnelson): reexamine how this is done if we change the
+              # expansion of configurations
+              'release_valgrind_build%': 0,
+            },
+            'cflags': [
+              '-O<(release_optimize)',
+              # Don't emit the GCC version ident directives, they just end up
+              # in the .comment section taking up binary size.
+              '-fno-ident',
+              # Put data and code in their own sections, so that unused symbols
+              # can be removed at link time with --gc-sections.
+              '-fdata-sections',
+              '-ffunction-sections',
+            ],
+            'ldflags': [
+              # Specifically tell the linker to perform optimizations.
+              # See http://lwn.net/Articles/192624/ .
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'conditions' : [
+              ['no_gc_sections==0', {
+                'ldflags': [
+                  '-Wl,--gc-sections',
+                ],
+              }],
+              ['OS=="android"', {
+                'variables': {
+                  'release_optimize%': 's',
+                },
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+              ['clang==1', {
+                'cflags!': [
+                  '-fno-ident',
+                ],
+              }],
+              ['profiling==1', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-g',
+                ],
+              }],
+            ],
+          },
+        },
+        'variants': {
+          'coverage': {
+            'cflags': ['-fprofile-arcs', '-ftest-coverage'],
+            'ldflags': ['-fprofile-arcs'],
+          },
+          'profile': {
+            'cflags': ['-pg', '-g'],
+            'ldflags': ['-pg'],
+          },
+          'symbols': {
+            'cflags': ['-g'],
+          },
+        },
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'asflags': [
+                  # Needed so that libs with .s files (e.g. libicudata.a)
+                  # are compatible with the general 32-bit-ness.
+                  '-32',
+                ],
+                # All floating-point computations on x87 happens in 80-bit
+                # precision.  Because the C and C++ language standards allow
+                # the compiler to keep the floating-point values in higher
+                # precision than what's specified in the source and doing so
+                # is more efficient than constantly rounding up to 64-bit or
+                # 32-bit precision as specified in the source, the compiler,
+                # especially in the optimized mode, tries very hard to keep
+                # values in x87 floating-point stack (in 80-bit precision)
+                # as long as possible. This has important side effects, that
+                # the real value used in computation may change depending on
+                # how the compiler did the optimization - that is, the value
+                # kept in 80-bit is different than the value rounded down to
+                # 64-bit or 32-bit. There are possible compiler options to
+                # make this behavior consistent (e.g. -ffloat-store would keep
+                # all floating-values in the memory, thus force them to be
+                # rounded to its original precision) but they have significant
+                # runtime performance penalty.
+                #
+                # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+                # which keep floating-point values in SSE registers in its
+                # native precision (32-bit for single precision, and 64-bit
+                # for double precision values). This means the floating-point
+                # value used during computation does not change depending on
+                # how the compiler optimized the code, since the value is
+                # always kept in its specified precision.
+                'conditions': [
+                  ['branding=="Chromium" and disable_sse2==0', {
+                    'cflags': [
+                      '-march=pentium4',
+                      '-msse2',
+                      '-mfpmath=sse',
+                    ],
+                  }],
+                  # ChromeOS targets Pinetrail, which is sse3, but most of the
+                  # benefit comes from sse2 so this setting allows ChromeOS
+                  # to build on other CPUs.  In the future -march=atom would
+                  # help but requires a newer compiler.
+                  ['chromeos==1 and disable_sse2==0', {
+                    'cflags': [
+                      '-msse2',
+                    ],
+                  }],
+                  # Install packages have started cropping up with
+                  # different headers between the 32-bit and 64-bit
+                  # versions, so we have to shadow those differences off
+                  # and make sure a 32-bit-on-64-bit build picks up the
+                  # right files.
+                  # For android build, use NDK headers instead of host headers
+                  ['host_arch!="ia32" and OS!="android"', {
+                    'include_dirs+': [
+                      '/usr/include32',
+                    ],
+                  }],
+                ],
+                # -mmmx allows mmintrin.h to be used for mmx intrinsics.
+                # video playback is mmx and sse2 optimized.
+                'cflags': [
+                  '-m32',
+                  '-mmmx',
+                ],
+                'ldflags': [
+                  '-m32',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags_cc': [
+                  # The codesourcery arm-2009q3 toolchain warns at that the ABI
+                  # has changed whenever it encounters a varargs function. This
+                  # silences those warnings, as they are not helpful and
+                  # clutter legitimate warnings.
+                  '-Wno-abi',
+                ],
+                'conditions': [
+                  ['arm_thumb==1', {
+                    'cflags': [
+                    '-mthumb',
+                    ]
+                  }],
+                  ['armv7==1', {
+                    'cflags': [
+                      '-march=armv7-a',
+                      '-mtune=cortex-a8',
+                      '-mfloat-abi=<(arm_float_abi)',
+                    ],
+                    'conditions': [
+                      ['arm_neon==1', {
+                        'cflags': [ '-mfpu=neon', ],
+                      }, {
+                        'cflags': [ '-mfpu=<(arm_fpu)', ],
+                      }],
+                    ],
+                  }],
+                  ['OS=="android"', {
+                    # Most of the following flags are derived from what Android
+                    # uses by default when building for arm, reference for which
+                    # can be found in the following file in the Android NDK:
+                    # toolchains/arm-linux-androideabi-4.4.3/setup.mk
+                    'cflags': [
+                      # The tree-sra optimization (scalar replacement for
+                      # aggregates enabling subsequent optimizations) leads to
+                      # invalid code generation when using the Android NDK's
+                      # compiler (r5-r7). This can be verified using
+                      # TestWebKitAPI's WTF.Checked_int8_t test.
+                      '-fno-tree-sra',
+                      '-fuse-ld=gold',
+                      '-Wno-psabi',
+                    ],
+                    # Android now supports .relro sections properly.
+                    # NOTE: While these flags enable the generation of .relro
+                    # sections, the generated libraries can still be loaded on
+                    # older Android platform versions.
+                    'ldflags': [
+                        '-Wl,-z,relro',
+                        '-Wl,-z,now',
+                        '-fuse-ld=gold',
+                    ],
+                    'conditions': [
+                      ['arm_thumb==1', {
+                        # Android toolchain doesn't support -mimplicit-it=thumb
+                        'cflags!': [ '-Wa,-mimplicit-it=thumb' ],
+                        'cflags': [ '-mthumb-interwork' ],
+                      }],
+                      ['armv7==0', {
+                        # Flags suitable for Android emulator
+                        'cflags': [
+                          '-march=armv5te',
+                          '-mtune=xscale',
+                          '-msoft-float',
+                        ],
+                        'defines': [
+                          '__ARM_ARCH_5__',
+                          '__ARM_ARCH_5T__',
+                          '__ARM_ARCH_5E__',
+                          '__ARM_ARCH_5TE__',
+                        ],
+                      }],
+                      ['profiling==1', {
+                        'cflags': [
+                          '-marm', # Probably reduntant, but recommend by "perf" docs.
+                          '-mapcs-frame', # Seems required by -fno-omit-frame-pointer.
+                        ],
+                      }],
+                      ['clang==1', {
+                        'cflags!': [
+                          # Clang does not support the following options.
+                          '-mthumb-interwork',
+                          '-finline-limit=64',
+                          '-fno-tree-sra',
+                          '-fuse-ld=gold',
+                          '-Wno-psabi',
+                        ],
+                      }],
+                    ],
+                    'target_conditions': [
+                      # ndk-build copies .a's around the filesystem, breaking
+                      # relative paths in thin archives.  Disable using thin
+                      # archives to avoid problems until one of these is fixed:
+                      # http://code.google.com/p/android/issues/detail?id=40302
+                      # http://code.google.com/p/android/issues/detail?id=40303
+                      ['_type=="static_library"', {
+                        'standalone_static_library': 1,
+                      }],
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_fpic==1', {
+            'cflags': [
+              '-fPIC',
+            ],
+            'ldflags': [
+              '-fPIC',
+            ],
+          }],
+          ['sysroot!=""', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+              }]]
+          }],
+          ['clang==1', {
+            'cflags': [
+              '-Wheader-hygiene',
+              # Clang spots more unused functions.
+              '-Wno-unused-function',
+              # Don't die on dtoa code that uses a char as an array index.
+              '-Wno-char-subscripts',
+              # Especially needed for gtest macros using enum values from Mac
+              # system headers.
+              # TODO(pkasting): In C++11 this is legal, so this should be
+              # removed when we change to that.  (This is also why we don't
+              # bother fixing all these cases today.)
+              '-Wno-unnamed-type-template-args',
+              # This (rightfully) complains about 'override', which we use
+              # heavily.
+              '-Wno-c++11-extensions',
+
+              # Warns on switches on enums that cover all enum values but
+              # also contain a default: branch. Chrome is full of that.
+              '-Wno-covered-switch-default',
+            ],
+            'cflags!': [
+              # Clang doesn't seem to know know this flag.
+              '-mfpmath=sse',
+            ],
+          }],
+          ['clang==1 and clang_use_chrome_plugins==1', {
+            'cflags': [
+              '<@(clang_chrome_plugins_flags)',
+            ],
+          }],
+          ['clang==1 and clang_load!=""', {
+            'cflags': [
+              '-Xclang', '-load', '-Xclang', '<(clang_load)',
+            ],
+          }],
+          ['clang==1 and clang_add_plugin!=""', {
+            'cflags': [
+              '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+            ],
+          }],
+          ['clang==1 and "<(GENERATOR)"=="ninja"', {
+            'cflags': [
+              # See http://crbug.com/110262
+              '-fcolor-diagnostics',
+            ],
+          }],
+          ['asan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=address',
+                  '-fno-omit-frame-pointer',
+                  '-w',  # http://crbug.com/162783
+                ],
+                'ldflags': [
+                  '-fsanitize=address',
+                ],
+                'defines': [
+                  'ADDRESS_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['tsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=thread',
+                  '-fno-omit-frame-pointer',
+                  '-fPIE',
+                  '-mllvm', '-tsan-blacklist=<(tsan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=thread',
+                ],
+                'defines': [
+                  'THREAD_SANITIZER',
+                  'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1',
+                  'WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1',
+                ],
+                'target_conditions': [
+                  ['_type=="executable"', {
+                    'ldflags': [
+                      '-pie',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['order_profiling!=0 and (chromeos==1 or OS=="linux" or OS=="android")', {
+            'target_conditions' : [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-finstrument-functions',
+                  # Allow mmx intrinsics to inline, so that the
+                  # compiler can expand the intrinsics.
+                  '-finstrument-functions-exclude-file-list=mmintrin.h',
+                ],
+              }],
+            ],
+          }],
+          ['linux_breakpad==1', {
+            'cflags': [ '-g' ],
+            'defines': ['USE_LINUX_BREAKPAD'],
+            'conditions': [
+              ['target_arch=="ia32"', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      # Workaround for linker OOM. http://crbug.com/160253.
+                      '-Wl,--no-keep-files-mapped',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_use_heapchecker==1', {
+            'variables': {'linux_use_tcmalloc%': 1},
+            'defines': ['USE_HEAPCHECKER'],
+          }],
+          ['linux_use_tcmalloc==0', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['linux_keep_shadow_stacks==1', {
+            'defines': ['KEEP_SHADOW_STACKS'],
+            'cflags': [
+              '-finstrument-functions',
+              # Allow mmx intrinsics to inline, so that the compiler can expand
+              # the intrinsics.
+              '-finstrument-functions-exclude-file-list=mmintrin.h',
+            ],
+          }],
+          ['linux_use_gold_flags==1', {
+            'ldflags': [
+              # Experimentation found that using four linking threads
+              # saved ~20% of link time.
+              # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+              '-Wl,--threads',
+              '-Wl,--thread-count=4',
+            ],
+            'conditions': [
+              ['release_valgrind_build==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      # There seems to be a conflict of --icf and -pie
+                      # in gold which can generate crashy binaries. As
+                      # a security measure, -pie takes precendence for
+                      # now.
+                      #'-Wl,--icf=safe',
+                      '-Wl,--icf=none',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_use_gold_binary==1', {
+            'variables': {
+              'conditions': [
+                ['inside_chromium_build==1', {
+                  # We pass the path to gold to the compiler.  gyp leaves
+                  # unspecified what the cwd is when running the compiler,
+                  # so the normal gyp path-munging fails us.  This hack
+                  # gets the right path.
+                  'gold_path': '<(PRODUCT_DIR)/../../third_party/gold',
+                }, {
+                  'gold_path': '<(PRODUCT_DIR)/../../Source/WebKit/chromium/third_party/gold',
+                }]
+              ]
+            },
+            'ldflags': [
+              # Put our gold binary in the search path for the linker.
+              '-B<(gold_path)',
+            ],
+          }],
+        ],
+      },
+    }],
+    # FreeBSD-specific options; note that most FreeBSD options are set above,
+    # with Linux.
+    ['OS=="freebsd"', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--no-keep-memory',
+        ],
+      },
+    }],
+    # Android-specific options; note that most are set above with Linux.
+    ['OS=="android"', {
+      'variables': {
+        # This is a unique identifier for a given build. It's used for
+        # identifying various build artifacts corresponding to a particular
+        # build of chrome (e.g. where to find archived symbols).
+        'chrome_build_id%': '',
+        'conditions': [
+          # Use shared stlport library when system one used.
+          # Figure this out early since it needs symbols from libgcc.a, so it
+          # has to be before that in the set of libraries.
+          ['use_system_stlport==1', {
+            'android_stlport_library': 'stlport',
+          }, {
+            'conditions': [
+              ['component=="shared_library"', {
+                  'android_stlport_library': 'stlport_shared',
+              }, {
+                  'android_stlport_library': 'stlport_static',
+              }],
+            ],
+          }],
+        ],
+
+        # Placing this variable here prevents from forking libvpx, used
+        # by remoting.  Remoting is off, so it needn't built,
+        # so forking it's deps seems like overkill.
+        # But this variable need defined to properly run gyp.
+        # A proper solution is to have an OS==android conditional
+        # in third_party/libvpx/libvpx.gyp to define it.
+        'libvpx_path': 'lib/linux/arm',
+      },
+      'target_defaults': {
+        'variables': {
+          'release_extra_cflags%': '',
+        },
+
+        'target_conditions': [
+          # Settings for building device targets using Android's toolchain.
+          # These are based on the setup.mk file from the Android NDK.
+          #
+          # The NDK Android executable link step looks as follows:
+          #  $LDFLAGS
+          #  $(TARGET_CRTBEGIN_DYNAMIC_O)  <-- crtbegin.o
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #  $(TARGET_CRTEND_O)            <-- crtend.o
+          #
+          # For now the above are approximated for executables by adding
+          # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+          # of 'libraries'.
+          #
+          # The NDK Android shared library link step looks as follows:
+          #  $LDFLAGS
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  -l,--whole-archive
+          #  $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+          #  -l,--no-whole-archive
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #
+          # For now, assume that whole static libraries are not needed.
+          #
+          # For both executables and shared libraries, add the proper
+          # libgcc.a to the start of libraries which puts it in the
+          # proper spot after .o and .a files get linked in.
+          #
+          # TODO: The proper thing to do longer-tem would be proper gyp
+          # support for a custom link command line.
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'cflags': [
+              '-ffunction-sections',
+              '-funwind-tables',
+              '-g',
+              '-fstack-protector',
+              '-fno-short-enums',
+              '-finline-limit=64',
+              '-Wa,--noexecstack',
+              '<@(release_extra_cflags)',
+            ],
+            'defines': [
+              'ANDROID',
+              '__GNU_SOURCE=1',  # Necessary for clone()
+              'USE_STLPORT=1',
+              '_STLP_USE_PTR_SPECIALIZATIONS=1',
+              'CHROME_BUILD_ID="<(chrome_build_id)"',
+            ],
+            'ldflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'ldflags': [
+              '-nostdlib',
+              '-Wl,--no-undefined',
+              # Don't export symbols from statically linked libraries.
+              '-Wl,--exclude-libs=ALL',
+            ],
+            'libraries': [
+              '-l<(android_stlport_library)',
+              # Manually link the libgcc.a that the cross compiler uses.
+              '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+              '-lc',
+              '-ldl',
+              '-lstdc++',
+              '-lm',
+            ],
+            'conditions': [
+              ['component=="shared_library"', {
+                'libraries': [
+                  '-lgnustl_shared',
+                ],
+                'ldflags!': [
+                  '-Wl,--exclude-libs=ALL',
+                ],
+              }],
+              ['clang==1', {
+                'cflags': [
+                  # Work around incompatibilities between bionic and clang
+                  # headers.
+                  '-D__compiler_offsetof=__builtin_offsetof',
+                  '-Dnan=__builtin_nan',
+                ],
+                'conditions': [
+                  ['target_arch=="arm"', {
+                    'cflags': [
+                      '-target arm-linux-androideabi',
+                      '-mllvm -arm-enable-ehabi',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'cflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                  }],
+                ],
+              }],
+              ['asan==1', {
+                'cflags': [
+                  # Android build relies on -Wl,--gc-sections removing
+                  # unreachable code. ASan instrumentation for globals inhibits
+                  # this and results in a library with unresolvable relocations.
+                  # TODO(eugenis): find a way to reenable this.
+                  '-mllvm -asan-globals=0',
+                ],
+              }],
+              ['android_build_type==0', {
+                'defines': [
+                  # The NDK has these things, but doesn't define the constants
+                  # to say that it does. Define them here instead.
+                  'HAVE_SYS_UIO_H',
+                ],
+                'cflags': [
+                  '--sysroot=<(android_ndk_sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(android_ndk_sysroot)',
+                ],
+              }],
+              ['android_build_type==1', {
+                'include_dirs': [
+                  # OpenAL headers from the Android tree.
+                  '<(android_src)/frameworks/wilhelm/include',
+                ],
+                'cflags': [
+                  # Android predefines this as 1; undefine it here so Chromium
+                  # can redefine it later to be 2 for chromium code and unset
+                  # for third party code. This works because cflags are added
+                  # before defines.
+                  '-U_FORTIFY_SOURCE',
+                  # Chromium builds its own (non-third-party) code with
+                  # -Werror to make all warnings into errors. However, Android
+                  # enables warnings that Chromium doesn't, so some of these
+                  # extra warnings trip and break things.
+                  # For now, we leave these warnings enabled but prevent them
+                  # from being treated as errors.
+                  #
+                  # Things that are part of -Wextra:
+                  '-Wno-error=extra', # Enabled by -Wextra, but no specific flag
+                  '-Wno-error=ignored-qualifiers',
+                  '-Wno-error=type-limits',
+                  # Other things unrelated to -Wextra:
+                  '-Wno-error=non-virtual-dtor',
+                  '-Wno-error=sign-promo',
+                ],
+                'cflags_cc': [
+                  # Disabling c++0x-compat should be handled in WebKit, but
+                  # this currently doesn't work because gcc_version is not set
+                  # correctly when building with the Android build system.
+                  # TODO(torne): Fix this in WebKit.
+                  '-Wno-error=c++0x-compat',
+                ],
+              }],
+              ['android_build_type==1 and chromium_code==0', {
+                'cflags': [
+                  # There is a class of warning which:
+                  #  1) Android always enables and also treats as errors
+                  #  2) Chromium ignores in third party code
+                  # For now, I am leaving these warnings enabled but preventing
+                  # them from being treated as errors here.
+                  '-Wno-error=address',
+                  '-Wno-error=format-security',
+                  '-Wno-error=non-virtual-dtor',
+                  '-Wno-error=return-type',
+                  '-Wno-error=sequence-point',
+                ],
+              }],
+              ['target_arch == "arm"', {
+                'ldflags': [
+                  # Enable identical code folding to reduce size.
+                  '-Wl,--icf=safe',
+                ],
+              }],
+              # NOTE: The stlport header include paths below are specified in
+              # cflags rather than include_dirs because they need to come
+              # after include_dirs. Think of them like system headers, but
+              # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+              # toolchain (circa Gingerbread) will exhibit strange errors.
+              # The include ordering here is important; change with caution.
+              ['use_system_stlport==1', {
+                'cflags': [
+                  # For libstdc++/include, which is used by stlport.
+                  '-I<(android_src)/bionic',
+                  '-I<(android_src)/external/stlport/stlport',
+                ],
+              }, { # else: use_system_stlport!=1
+                'cflags': [
+                  '-I<(android_ndk_root)/sources/cxx-stl/stlport/stlport',
+                  '-I<(android_ndk_root)/sources/cxx-stl/gnu-libstdc++/4.6/include',
+                ],
+                'conditions': [
+                  ['target_arch=="arm" and armv7==1', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi-v7a',
+                      '-L<(android_ndk_root)/sources/cxx-stl/gnu-libstdc++/4.6/libs/armeabi-v7a',
+                    ],
+                  }],
+                  ['target_arch=="arm" and armv7==0', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi',
+                      '-L<(android_ndk_root)/sources/cxx-stl/gnu-libstdc++/4.6/libs/armeabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/x86',
+                      '-L<(android_ndk_root)/sources/cxx-stl/gnu-libstdc++/4.6/libs/x86',
+                    ],
+                  }],
+                ],
+              }],
+              ['target_arch=="ia32"', {
+                # The x86 toolchain currently has problems with stack-protector.
+                'cflags!': [
+                  '-fstack-protector',
+                ],
+                'cflags': [
+                  '-fno-stack-protector',
+                ],
+              }],
+            ],
+            'target_conditions': [
+              ['_type=="executable"', {
+                'ldflags': [
+                  '-Bdynamic',
+                  '-Wl,-dynamic-linker,/system/bin/linker',
+                  '-Wl,--gc-sections',
+                  '-Wl,-z,nocopyreloc',
+                  # crtbegin_dynamic.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_dynamic.o',
+                ],
+                'libraries': [
+                  # crtend_android.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_android.o',
+                ],
+                'conditions': [
+                  ['asan==1', {
+                    'cflags': [
+                      '-fPIE',
+                    ],
+                    'ldflags': [
+                      '-pie',
+                    ],
+                  }],
+                ],
+              }],
+              ['_type=="shared_library" or _type=="loadable_module"', {
+                'ldflags': [
+                  '-Wl,-shared,-Bsymbolic',
+                  # crtbegin_so.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_so.o',
+                ],
+                'libraries': [
+                  # crtend_so.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_so.o',
+                ],
+              }],
+            ],
+          }],
+          # Settings for building host targets using the system toolchain.
+          ['_toolset=="host"', {
+            'cflags!': [
+              # Due to issues in Clang build system, using ASan on 32-bit
+              # binaries on x86_64 host is problematic.
+              # TODO(eugenis): re-enable.
+              '-fsanitize=address',
+              '-w',  # http://crbug.com/162783
+            ],
+            'ldflags!': [
+              '-fsanitize=address',
+              '-Wl,-z,noexecstack',
+              '-Wl,--gc-sections',
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'sources/': [
+              ['exclude', '_android(_unittest)?\\.cc$'],
+              ['exclude', '(^|/)android/']
+            ],
+          }],
+        ],
+      },
+    }],
+    ['OS=="solaris"', {
+      'cflags!': ['-fvisibility=hidden'],
+      'cflags_cc!': ['-fvisibility-inlines-hidden'],
+    }],
+    ['OS=="mac" or OS=="ios"', {
+      'target_defaults': {
+        'mac_bundle': 0,
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          # Don't link in libarclite_macosx.a, see http://crbug.com/156530.
+          'CLANG_LINK_OBJC_RUNTIME': 'NO',          # -fno-objc-link-runtime
+          'GCC_C_LANGUAGE_STANDARD': 'c99',         # -std=c99
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+          'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+          'GCC_OBJC_CALL_CXX_CDTORS': 'YES',        # -fobjc-call-cxx-cdtors
+          'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',      # -fvisibility=hidden
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',    # -Werror
+          'GCC_VERSION': '4.2',
+          'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',  # -Wnewline-eof
+          'USE_HEADERMAP': 'NO',
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-Wextra',
+            # Don't warn about unused function parameters.
+            '-Wno-unused-parameter',
+            # Don't warn about the "struct foo f = {0};" initialization
+            # pattern.
+            '-Wno-missing-field-initializers',
+          ],
+          'conditions': [
+            ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+                                 {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+            ],
+            # Note that the prebuilt Clang binaries should not be used for iOS
+            # development except for ASan builds.
+            ['clang==1', {
+              'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+              'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+
+              # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
+              # when building with clang. This warning is triggered when the
+              # override keyword is used via the OVERRIDE macro from
+              # base/compiler_specific.h.
+              'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+              # Warn if automatic synthesis is triggered with
+              # the -Wobjc-missing-property-synthesis flag.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'WARNING_CFLAGS': [
+                '-Wheader-hygiene',
+
+                # This warns on using ints as initializers for floats in
+                # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+                # which happens in several places in chrome code. Not sure if
+                # this is worth fixing.
+                '-Wno-c++11-narrowing',
+
+                # This warns about code like |"0x%08"NACL_PRIxPTR| -- with C++11
+                # user-defined literals, this is now a string literal with a UD
+                # suffix. However, this is used heavily in NaCl code, so disable
+                # the warning for now.
+                '-Wno-reserved-user-defined-literal',
+
+                # Don't die on dtoa code that uses a char as an array index.
+                # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+                '-Wno-char-subscripts',
+                # Clang spots more unused functions.
+                '-Wno-unused-function',
+
+                # Warns on switches on enums that cover all enum values but
+                # also contain a default: branch. Chrome is full of that.
+                '-Wno-covered-switch-default',
+              ],
+              'OTHER_CPLUSPLUSFLAGS': [
+                # gnu++11 instead of c++11 so that __ANSI_C__ doesn't get
+                # defined.  (Else e.g. finite() in base/float_util.h needs to
+                # be isfinite() which doesn't exist on the android bots.)
+                # typeof() is also disabled in c++11 (but we could use
+                # decltype() instead).
+                # TODO(thakis): Use CLANG_CXX_LANGUAGE_STANDARD instead once all
+                # bots use xcode 4 -- http://crbug.com/147515).
+                # TODO(thakis): Eventually switch this to c++11 instead of
+                # gnu++11 (once typeof can be removed, which is blocked on c++11
+                # being available everywhere).
+                '$(inherited)', '-std=gnu++11',
+              ],
+            }],
+            ['clang==1 and clang_use_chrome_plugins==1', {
+              'OTHER_CFLAGS': [
+                '<@(clang_chrome_plugins_flags)',
+              ],
+            }],
+            ['clang==1 and clang_load!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-load', '-Xclang', '<(clang_load)',
+              ],
+            }],
+            ['clang==1 and clang_add_plugin!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+              ],
+            }],
+            ['clang==1 and "<(GENERATOR)"=="ninja"', {
+              'OTHER_CFLAGS': [
+                # See http://crbug.com/110262
+                '-fcolor-diagnostics',
+              ],
+            }],
+          ],
+        },
+        'conditions': [
+          ['clang==1', {
+            'variables': {
+              'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+            },
+          }],
+          ['asan==1', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=address',
+                '-w',  # http://crbug.com/162783
+              ],
+            },
+            'defines': [
+              'ADDRESS_SANITIZER',
+            ],
+          }],
+        ],
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+            'conditions': [
+              ['asan==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-fsanitize=address',
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['_mac_bundle', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac" or OS=="ios"
+    ['OS=="mac"', {
+      'target_defaults': {
+        'variables': {
+          # These should end with %, but there seems to be a bug with % in
+          # variables that are intended to be set to different values in
+          # different targets, like these.
+          'mac_pie': 1,        # Most executables can be position-independent.
+          'mac_real_dsym': 0,  # Fake .dSYMs are fine in most cases.
+          # Strip debugging symbols from the target.
+          'mac_strip': '<(mac_strip_release)',
+        },
+        'xcode_settings': {
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+          'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+          # Keep pch files below xcodebuild/.
+          'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+          'OTHER_CFLAGS': [
+            '-fno-strict-aliasing',  # See http://crbug.com/32204
+          ],
+        },
+        'target_conditions': [
+          ['_type=="executable"', {
+            'postbuilds': [
+              {
+                # Arranges for data (heap) pages to be protected against
+                # code execution when running on Mac OS X 10.7 ("Lion"), and
+                # ensures that the position-independent executable (PIE) bit
+                # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+                'variables': {
+                  # Define change_mach_o_flags in a variable ending in _path
+                  # so that GYP understands it's a path and performs proper
+                  # relativization during dict merging.
+                  'change_mach_o_flags_path':
+                      'mac/change_mach_o_flags_from_xcode.sh',
+                  'change_mach_o_flags_options%': [
+                  ],
+                  'target_conditions': [
+                    ['mac_pie==0 or release_valgrind_build==1', {
+                      # Don't enable PIE if it's unwanted. It's unwanted if
+                      # the target specifies mac_pie=0 or if building for
+                      # Valgrind, because Valgrind doesn't understand slide.
+                      # See the similar mac_pie/release_valgrind_build check
+                      # below.
+                      'change_mach_o_flags_options': [
+                        '--no-pie',
+                      ],
+                    }],
+                  ],
+                },
+                'postbuild_name': 'Change Mach-O Flags',
+                'action': [
+                  '<(change_mach_o_flags_path)',
+                  '>@(change_mach_o_flags_options)',
+                ],
+              },
+            ],
+            'conditions': [
+              ['asan==1', {
+                'variables': {
+                 'asan_saves_file': 'asan.saves',
+                },
+                'xcode_settings': {
+                  'CHROMIUM_STRIP_SAVE_FILE': '<(asan_saves_file)',
+                },
+              }],
+            ],
+            'target_conditions': [
+              ['mac_pie==1 and release_valgrind_build==0', {
+                # Turn on position-independence (ASLR) for executables. When
+                # PIE is on for the Chrome executables, the framework will
+                # also be subject to ASLR.
+                # Don't do this when building for Valgrind, because Valgrind
+                # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+                # understand slide, and get rid of the Valgrind check.
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-pie',  # Position-independent executable (MH_PIE)
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['(_type=="executable" or _type=="shared_library" or \
+             _type=="loadable_module") and mac_strip!=0', {
+            'target_conditions': [
+              ['mac_real_dsym == 1', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'configurations': {
+                  'Release_Base': {
+                    'xcode_settings': {
+                      'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                      'DEPLOYMENT_POSTPROCESSING': 'YES',
+                      'STRIP_INSTALLED_PRODUCT': 'YES',
+                      'target_conditions': [
+                        ['_type=="shared_library" or _type=="loadable_module"', {
+                          # The Xcode default is to strip debugging symbols
+                          # only (-S).  Local symbols should be stripped as
+                          # well, which will be handled by -x.  Xcode will
+                          # continue to insert -S when stripping even when
+                          # additional flags are added with STRIPFLAGS.
+                          'STRIPFLAGS': '-x',
+                        }],  # _type=="shared_library" or _type=="loadable_module"'
+                      ],  # target_conditions
+                    },  # xcode_settings
+                  },  # configuration "Release"
+                },  # configurations
+              }, {  # mac_real_dsym != 1
+                # To get a fast fake .dSYM bundle, use a post-build step to
+                # produce the .dSYM and strip the executable.  strip_from_xcode
+                # only operates in the Release configuration.
+                'postbuilds': [
+                  {
+                    'variables': {
+                      # Define strip_from_xcode in a variable ending in _path
+                      # so that gyp understands it's a path and performs proper
+                      # relativization during dict merging.
+                      'strip_from_xcode_path': 'mac/strip_from_xcode',
+                    },
+                    'postbuild_name': 'Strip If Needed',
+                    'action': ['<(strip_from_xcode_path)'],
+                  },
+                ],  # postbuilds
+              }],  # mac_real_dsym
+            ],  # target_conditions
+          }],  # (_type=="executable" or _type=="shared_library" or
+               #  _type=="loadable_module") and mac_strip!=0
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac"
+    ['OS=="ios"', {
+      'target_defaults': {
+        'xcode_settings' : {
+          'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+
+          # This next block is mostly common with the 'mac' section above,
+          # but keying off (or setting) 'clang' isn't valid for iOS as it
+          # also seems to mean using the custom build of clang.
+
+          # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
+          # when building with clang. This warning is triggered when the
+          # override keyword is used via the OVERRIDE macro from
+          # base/compiler_specific.h.
+          'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+          # Warn if automatic synthesis is triggered with
+          # the -Wobjc-missing-property-synthesis flag.
+          'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+          'WARNING_CFLAGS': [
+            '-Wheader-hygiene',
+            # Don't die on dtoa code that uses a char as an array index.
+            # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+            '-Wno-char-subscripts',
+            # Clang spots more unused functions.
+            '-Wno-unused-function',
+            # See comments on this flag higher up in this file.
+            '-Wno-unnamed-type-template-args',
+            # This (rightfully) complains about 'override', which we use
+            # heavily.
+            '-Wno-c++11-extensions',
+          ],
+        },
+        'target_conditions': [
+          ['_type=="executable"', {
+            'configurations': {
+              'Release_Base': {
+                'xcode_settings': {
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              },
+              'Debug_Base': {
+                'xcode_settings': {
+                  # Remove dSYM to reduce build time.
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf',
+                },
+              },
+            },
+            'xcode_settings': {
+              'conditions': [
+                ['chromium_ios_signing', {
+                  # iOS SDK wants everything for device signed.
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+                }, {
+                  'CODE_SIGNING_REQUIRED': 'NO',
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+                }],
+              ],
+            },
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="ios"
+    ['OS=="win"', {
+      'target_defaults': {
+        'defines': [
+          '_WIN32_WINNT=0x0602',
+          'WINVER=0x0602',
+          'WIN32',
+          '_WINDOWS',
+          'NOMINMAX',
+          'PSAPI_VERSION=1',
+          '_CRT_RAND_S',
+          'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+          'WIN32_LEAN_AND_MEAN',
+          '_ATL_NO_OPENGL',
+        ],
+        'conditions': [
+          ['buildtype=="Official"', {
+              # In official builds, targets can self-select an optimization
+              # level by defining a variable named 'optimize', and setting it
+              # to one of
+              # - "size", optimizes for minimal code size - the default.
+              # - "speed", optimizes for speed over code size.
+              # - "max", whole program optimization and link-time code
+              #   generation. This is very expensive and should be used
+              #   sparingly.
+              'variables': {
+                'optimize%': 'size',
+              },
+              'target_conditions': [
+                ['optimize=="size"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="speed"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="max"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                        # This implies link time code generation.
+                        'WholeProgramOptimization': 'true',
+                      },
+                    },
+                  },
+                ],
+              ],
+            },
+          ],
+          ['component=="static_library"', {
+            'defines': [
+              '_HAS_EXCEPTIONS=0',
+            ],
+          }],
+          ['MSVS_VERSION=="2008"', {
+            'defines': [
+              '_HAS_TR1=0',
+            ],
+          }],
+          ['secure_atl', {
+            'defines': [
+              '_SECURE_ATL',
+            ],
+          }],
+          ['msvs_express', {
+            'configurations': {
+              'x86_Base': {
+                'msvs_settings': {
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/i386'],
+                  },
+                  'VCLibrarianTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/i386'],
+                  },
+                },
+              },
+              'x64_Base': {
+                'msvs_settings': {
+                  'VCLibrarianTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/amd64'],
+                  },
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/amd64'],
+                  },
+                },
+              },
+            },
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # Explicitly required when using the ATL with express
+                'AdditionalDependencies': ['atlthunk.lib'],
+
+                # ATL 8.0 included in WDK 7.1 makes the linker to generate
+                # almost eight hundred LNK4254 and LNK4078 warnings:
+                #   - warning LNK4254: section 'ATL' (50000040) merged into
+                #     '.rdata' (40000040) with different attributes
+                #   - warning LNK4078: multiple 'ATL' sections found with
+                #     different attributes
+                'AdditionalOptions': ['/ignore:4254', '/ignore:4078'],
+              },
+            },
+            'msvs_system_include_dirs': [
+              '<(windows_driver_kit_path)/inc/atl71',
+              '<(windows_driver_kit_path)/inc/mfc42',
+            ],
+          }],
+        ],
+        'msvs_system_include_dirs': [
+          '<(windows_sdk_path)/Include/shared',
+          '<(windows_sdk_path)/Include/um',
+          '<(windows_sdk_path)/Include/winrt',
+          '<(directx_sdk_path)/Include',
+          '$(VSInstallDir)/VC/atlmfc/include',
+        ],
+        'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
+        'msvs_disabled_warnings': [4351, 4396, 4503, 4819,
+          # TODO(maruel): These warnings are level 4. They will be slowly
+          # removed as code is fixed.
+          4100, 4121, 4125, 4127, 4130, 4131, 4189, 4201, 4238, 4244, 4245,
+          4310, 4355, 4428, 4481, 4505, 4510, 4512, 4530, 4610, 4611, 4701,
+          4702, 4706,
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'AdditionalOptions': ['/MP'],
+            'MinimalRebuild': 'false',
+            'BufferSecurityCheck': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'RuntimeTypeInfo': 'false',
+            'WarningLevel': '4',
+            'WarnAsError': 'true',
+            'DebugInformationFormat': '3',
+            'conditions': [
+              ['component=="shared_library"', {
+                'ExceptionHandling': '1',  # /EHsc
+              }, {
+                'ExceptionHandling': '0',
+              }],
+            ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': ['/ignore:4221'],
+            'AdditionalLibraryDirectories': [
+              '<(directx_sdk_path)/Lib/x86',
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+          },
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'wininet.lib',
+              'dnsapi.lib',
+              'version.lib',
+              'msimg32.lib',
+              'ws2_32.lib',
+              'usp10.lib',
+              'psapi.lib',
+              'dbghelp.lib',
+              'winmm.lib',
+              'shlwapi.lib',
+            ],
+
+            'conditions': [
+              ['MSVS_VERSION=="2005e"', {
+                # Non-express versions link automatically to these
+                'AdditionalDependencies': [
+                  'advapi32.lib',
+                  'comdlg32.lib',
+                  'ole32.lib',
+                  'shell32.lib',
+                  'user32.lib',
+                  'winspool.lib',
+                ],
+              }],
+            ],
+            'AdditionalLibraryDirectories': [
+              '<(directx_sdk_path)/Lib/x86',
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+            'GenerateDebugInformation': 'true',
+            'MapFileName': '$(OutDir)\\$(TargetName).map',
+            'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+            'FixedBaseAddress': '1',
+            # SubSystem values:
+            #   0 == not set
+            #   1 == /SUBSYSTEM:CONSOLE
+            #   2 == /SUBSYSTEM:WINDOWS
+            # Most of the executables we'll ever create are tests
+            # and utilities with console output.
+            'SubSystem': '1',
+          },
+          'VCMIDLTool': {
+            'GenerateStublessProxies': 'true',
+            'TypeLibraryName': '$(InputName).tlb',
+            'OutputDirectory': '$(IntDir)',
+            'HeaderFileName': '$(InputName).h',
+            'DLLDataFileName': '$(InputName).dlldata.c',
+            'InterfaceIdentifierFileName': '$(InputName)_i.c',
+            'ProxyFileName': '$(InputName)_p.c',
+          },
+          'VCResourceCompilerTool': {
+            'Culture' : '1033',
+            'AdditionalIncludeDirectories': [
+              '<(DEPTH)',
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+        },
+      },
+    }],
+    ['disable_nacl==1', {
+      'target_defaults': {
+        'defines': [
+          'DISABLE_NACL',
+        ],
+      },
+    }],
+    ['OS=="win" and msvs_use_common_linker_extras', {
+      'target_defaults': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'DelayLoadDLLs': [
+              'dbghelp.dll',
+              'dwmapi.dll',
+              'shell32.dll',
+              'uxtheme.dll',
+            ],
+          },
+        },
+        'configurations': {
+          'x86_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  '/safeseh',
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+          'x64_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  # safeseh is not compatible with x64
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+        },
+      },
+    }],
+    ['enable_new_npdevice_api==1', {
+      'target_defaults': {
+        'defines': [
+          'ENABLE_NEW_NPDEVICE_API',
+        ],
+      },
+    }],
+    ['clang==1', {
+      'conditions': [
+        ['OS=="android"', {
+          # Android could use the goma with clang.
+          'make_global_settings': [
+            ['CC', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang)'],
+            ['CXX', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang++)'],
+            ['LINK', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang++)'],
+            ['CC.host', '$(CC)'],
+            ['CXX.host', '$(CXX)'],
+            ['LINK.host', '$(LINK)'],
+          ],
+        }, {
+          'make_global_settings': [
+            ['CC', '<(make_clang_dir)/bin/clang'],
+            ['CXX', '<(make_clang_dir)/bin/clang++'],
+            ['LINK', '$(CXX)'],
+            ['CC.host', '$(CC)'],
+            ['CXX.host', '$(CXX)'],
+            ['LINK.host', '$(LINK)'],
+          ],
+        }],
+      ],
+    }],
+    ['OS=="android" and clang==0', {
+      # Hardcode the compiler names in the Makefile so that
+      # it won't depend on the environment at make time.
+      'make_global_settings': [
+        ['CC', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-gcc)'],
+        ['CXX', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-g++)'],
+        ['LINK', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-gcc)'],
+        ['CC.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which gcc))'],
+        ['CXX.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which g++))'],
+        ['LINK.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which g++))'],
+      ],
+    }],
+  ],
+  'xcode_settings': {
+    # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+    # This block adds *project-wide* configuration settings to each project
+    # file.  It's almost always wrong to put things here.  Specify your
+    # custom xcode_settings in target_defaults to add them to targets instead.
+
+    'conditions': [
+      # In an Xcode Project Info window, the "Base SDK for All Configurations"
+      # setting sets the SDK on a project-wide basis. In order to get the
+      # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+      # here at the project level.
+      ['OS=="mac"', {
+        'conditions': [
+          ['mac_sdk_path==""', {
+            'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+          }, {
+            'SDKROOT': '<(mac_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        'conditions': [
+          ['ios_sdk_path==""', {
+            'SDKROOT': 'iphoneos<(ios_sdk)',  # -isysroot
+          }, {
+            'SDKROOT': '<(ios_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        'ARCHS': '$(ARCHS_UNIVERSAL_IPHONE_OS)',
+        # Just build armv7, until armv7s is correctly tested.
+        'VALID_ARCHS': 'armv7 i386',
+        'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)',
+        # Target both iPhone and iPad.
+        'TARGETED_DEVICE_FAMILY': '1,2',
+      }],
+    ],
+
+    # The Xcode generator will look for an xcode_settings section at the root
+    # of each dict and use it to apply settings on a file-wide basis.  Most
+    # settings should not be here, they should be in target-specific
+    # xcode_settings sections, or better yet, should use non-Xcode-specific
+    # settings in target dicts.  SYMROOT is a special case, because many other
+    # Xcode variables depend on it, including variables such as
+    # PROJECT_DERIVED_FILE_DIR.  When a source group corresponding to something
+    # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+    # files to appear (when present) in the UI as actual files and not red
+    # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+    # and therefore SYMROOT, needs to be set at the project level.
+    'SYMROOT': '<(DEPTH)/xcodebuild',
+  },
+}
diff --git a/src/build/common_untrusted.gypi b/src/build/common_untrusted.gypi
new file mode 100644
index 0000000..086264e
--- /dev/null
+++ b/src/build/common_untrusted.gypi
@@ -0,0 +1,29 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This GYP file should be included for every target in Chromium that is built
+# using the NaCl toolchain.
+{
+  'includes': [
+    '../native_client/build/untrusted.gypi',
+  ],
+  'target_defaults': {
+    'conditions': [
+      ['target_arch=="arm"', {
+        'variables': {
+          'clang': 1,
+        },
+        'defines': [
+          # Needed by build/build_config.h processor architecture detection.
+          '__ARMEL__',
+          # Needed by base/third_party/nspr/prtime.cc.
+          '__arm__',
+          # Disable ValGrind. The assembly code it generates causes the build
+          # to fail.
+          'NVALGRIND',
+        ],
+      }],
+    ],
+  },
+}
\ No newline at end of file
diff --git a/src/build/compiler_version.py b/src/build/compiler_version.py
new file mode 100755
index 0000000..b349199
--- /dev/null
+++ b/src/build/compiler_version.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+def GetVersion(compiler):
+  try:
+    # Note that compiler could be something tricky like "distcc g++".
+    compiler = compiler + " -dumpversion"
+    pipe = subprocess.Popen(compiler, shell=True,
+                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    gcc_output, gcc_error = pipe.communicate()
+    if pipe.returncode:
+      raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+    result = re.match(r"(\d+)\.(\d+)", gcc_output)
+    return result.group(1) + result.group(2)
+  except Exception, e:
+    if gcc_error:
+      sys.stderr.write(gcc_error)
+    print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+    print >> sys.stderr, e
+    return ""
+
+def main():
+  # Check if CXX environment variable exists and
+  # if it does use that compiler.
+  cxx = os.getenv("CXX", None)
+  if cxx:
+    cxxversion = GetVersion(cxx)
+    if cxxversion != "":
+      print cxxversion
+      return 0
+  else:
+    # Otherwise we check the g++ version.
+    gccversion = GetVersion("g++")
+    if gccversion != "":
+      print gccversion
+      return 0
+
+  return 1
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/copy_test_data_ios.gypi b/src/build/copy_test_data_ios.gypi
new file mode 100644
index 0000000..56a222f
--- /dev/null
+++ b/src/build/copy_test_data_ios.gypi
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to copy test data files into
+# an iOS app bundle. To use this the following variables need to be defined:
+#   test_data_files: list: paths to test data files or directories
+#   test_data_prefix: string: a directory prefix that will be prepended to each
+#                             output path.  Generally, this should be the base
+#                             directory of the gypi file containing the unittest
+#                             target (e.g. "base" or "chrome").
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_unittests',
+#   'conditions': [
+#     ['OS == "ios"', {
+#       'actions': [
+#         {
+#           'action_name': 'copy_test_data',
+#           'variables': {
+#             'test_data_files': [
+#               'path/to/datafile.txt',
+#               'path/to/data/directory/',
+#             ]
+#             'test_data_prefix' : 'prefix',
+#           },
+#           'includes': ['path/to/this/gypi/file'],
+#         },
+#       ],
+#     }],
+# }
+#
+
+{
+  'inputs': [
+    '<!@pymod_do_main(copy_test_data_ios --inputs <(test_data_files))',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(copy_test_data_ios -o <(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix) --outputs <(test_data_files))',
+  ],
+  'action': [
+    'python',
+    '<(DEPTH)/build/copy_test_data_ios.py',
+    '-o', '<(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix)',
+    '<@(_inputs)',
+  ],
+}
diff --git a/src/build/copy_test_data_ios.py b/src/build/copy_test_data_ios.py
new file mode 100755
index 0000000..6f0302f
--- /dev/null
+++ b/src/build/copy_test_data_ios.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+  pass
+
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+  """Returns a list of all the files under a given path."""
+  output = []
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
+    return output
+
+  # Files get returned without modification.
+  if not os.path.isdir(path):
+    output.append(path)
+    return output
+
+  # Directories get recursively expanded.
+  contents = os.listdir(path)
+  for item in contents:
+    full_path = os.path.join(path, item)
+    output.extend(ListFilesForPath(full_path))
+  return output
+
+def CalcInputs(inputs):
+  """Computes the full list of input files for a set of command-line arguments.
+  """
+  # |inputs| is a list of paths, which may be directories.
+  output = []
+  for input in inputs:
+    output.extend(ListFilesForPath(input))
+  return output
+
+def CopyFiles(relative_filenames, output_basedir):
+  """Copies files to the given output directory."""
+  for file in relative_filenames:
+    relative_dirname = os.path.dirname(file)
+    output_dir = os.path.join(output_basedir, relative_dirname)
+    output_filename = os.path.join(output_basedir, file)
+
+    # In cases where a directory has turned into a file or vice versa, delete it
+    # before copying it below.
+    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+      os.remove(output_dir)
+    if os.path.exists(output_filename) and os.path.isdir(output_filename):
+      shutil.rmtree(output_filename)
+
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    shutil.copy(file, output_filename)
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+  parser.add_option('--inputs', action='store_true', dest='list_inputs')
+  parser.add_option('--outputs', action='store_true', dest='list_outputs')
+  options, arglist = parser.parse_args(argv)
+
+  if len(arglist) == 0:
+    raise WrongNumberOfArgumentsException('<input_files> required.')
+
+  files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+  if options.list_inputs:
+    return '\n'.join(escaped_files)
+
+  if not options.output_dir:
+    raise WrongNumberOfArgumentsException('-o required.')
+
+  if options.list_outputs:
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+    return '\n'.join(outputs)
+
+  CopyFiles(files_to_copy, options.output_dir)
+  return
+
+def main(argv):
+  try:
+    result = DoMain(argv[1:])
+  except WrongNumberOfArgumentsException, e:
+    print >>sys.stderr, e
+    return 1
+  if result:
+    print result
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/cp.py b/src/build/cp.py
new file mode 100755
index 0000000..dd98e1d
--- /dev/null
+++ b/src/build/cp.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import shutil
+import sys
+
+
+def Main(src, dst):
+  # Use copy instead of copyfile to ensure the executable bit is copied.
+  return shutil.copy(src, dst)
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/src/build/dir_exists.py b/src/build/dir_exists.py
new file mode 100755
index 0000000..0a89bc8
--- /dev/null
+++ b/src/build/dir_exists.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(str(os.path.isdir(sys.argv[1])))
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/download_nacl_toolchains.py b/src/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..c2007d0
--- /dev/null
+++ b/src/build/download_nacl_toolchains.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import sys
+
+
+def Main(args):
+  # Exit early if disable_nacl=1.
+  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+    return 0
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  download_script = os.path.join(nacl_build_dir, 'download_toolchains.py')
+  if not os.path.exists(download_script):
+    print "Can't find '%s'" % download_script
+    print 'Presumably you are intentionally building without NativeClient.'
+    print 'Skipping NativeClient toolchain download.'
+    sys.exit(0)
+  sys.path.insert(0, nacl_build_dir)
+  import download_toolchains
+
+  # TODO (robertm): Finish getting PNaCl ready for prime time.
+  # BUG:
+  # We remove this --optional-pnacl argument, and instead replace it with
+  # --no-pnacl for most cases.  However, if the bot name is the pnacl_sdk
+  # bot then we will go ahead and download it.  This prevents increasing the
+  # gclient sync time for developers, or standard Chrome bots.
+  if '--optional-pnacl' in args:
+    args.remove('--optional-pnacl')
+    # By default we don't use PNaCl toolchain yet, unless on ARM, where
+    # there is no other toolchain to build untrusted code at the moment.
+    # So analyze if we're building for ARM, or on SDK buildbot.
+    # TODO(olonho): we need to invent more reliable way to get build
+    # configuration info, to know if we're building for ARM.
+    use_pnacl = False
+    if 'target_arch=arm' in os.environ.get('GYP_DEFINES', ''):
+      use_pnacl = True
+    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+    if buildbot_name.find('pnacl') >= 0 and  buildbot_name.find('sdk') >= 0:
+      use_pnacl = True
+    if use_pnacl:
+      print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
+    else:
+      args.append('--no-pnacl')
+
+  # Append the name of the file to use as a version and hash source.
+  # NOTE:  While not recommended, it is possible to redirect this file to
+  # a chrome location to avoid branching NaCl if just a toolchain needs
+  # to be bumped.
+  args.append(os.path.join(nacl_dir,'TOOL_REVISIONS'))
+
+  download_toolchains.main(args)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/escape_unicode.py b/src/build/escape_unicode.py
new file mode 100755
index 0000000..859ba5d
--- /dev/null
+++ b/src/build/escape_unicode.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert any unicode characters found in the input file to C literals."""
+
+import codecs
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> <input_file>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+
+  options, arglist = parser.parse_args(argv)
+
+  if not options.output_dir:
+    print "output_dir required"
+    return 1
+
+  if len(arglist) != 2:
+    print "input_file required"
+    return 1
+
+  in_filename = arglist[1]
+
+  if not in_filename.endswith('.utf8'):
+    print "input_file should end in .utf8"
+    return 1
+
+  out_filename = os.path.join(options.output_dir, os.path.basename(
+      os.path.splitext(in_filename)[0]))
+
+  WriteEscapedFile(in_filename, out_filename)
+  return 0
+
+
+def WriteEscapedFile(in_filename, out_filename):
+  input_data = codecs.open(in_filename, 'r', 'utf8').read()
+  with codecs.open(out_filename, 'w', 'ascii') as out_file:
+    for i, char in enumerate(input_data):
+      if ord(char) > 127:
+        out_file.write(repr(char.encode('utf8'))[1:-1])
+        if input_data[i + 1:i + 2] in '0123456789abcdefABCDEF':
+          out_file.write('""')
+      else:
+        out_file.write(char.encode('ascii'))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/extract_from_cab.py b/src/build/extract_from_cab.py
new file mode 100755
index 0000000..1c928af
--- /dev/null
+++ b/src/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+  """Run 'expand' supressing noisy output. Returns returncode from process."""
+  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+  out, _ = popen.communicate()
+  if popen.returncode:
+    # expand emits errors to stdout, so if we fail, then print that out.
+    print out
+  return popen.returncode
+
+def main():
+  if len(sys.argv) != 4:
+    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/filename_rules.gypi b/src/build/filename_rules.gypi
new file mode 100644
index 0000000..9fd40db
--- /dev/null
+++ b/src/build/filename_rules.gypi
@@ -0,0 +1,107 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform.  It is
+# included by common.gypi for chromium_code.
+
+{
+  'target_conditions': [
+    ['OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_win(_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)win/'],
+                    ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+    }],
+    ['OS!="mac" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)(cocoa|mac)/'] ],
+    }],
+    ['OS!="ios" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)ios/'] ],
+    }],
+    ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '\\.mm?$' ] ],
+    }],
+    # Do not exclude the linux files on *BSD since most of them can be
+    # shared at this point.
+    # In case a file is not needed, it is going to be excluded later on.
+    # TODO(evan): the above is not correct; we shouldn't build _linux
+    # files on non-linux.
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)linux/'],
+      ],
+    }],
+    ['OS!="android" and (OS!="lb_shell" or "<(target_arch)"!="android")', {
+      'sources/': [
+        ['exclude', '_android(_unittest)?\\.cc$'],
+        ['exclude', '(^|/)android/'],
+      ],
+    }],
+    ['OS=="lb_shell"', {
+      'sources/': [
+        # Re-include things in media/audio and media/filters
+        ['include', 'audio/shell_.*<(target_arch)'],
+        ['include', 'filters/shell_.*<(target_arch)'],
+        # Re-include things in lbshell
+        ['include', 'lbshell/src/platform/<(target_arch)'],
+        ['include', 'lbshell/src/platform/<(actual_target_arch)'],
+      ],
+    }],
+    ['(OS=="win" and >(nacl_untrusted_build)==0) or OS=="lb_shell" or OS=="starboard"', {
+      'sources/': [
+        ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)posix/'],
+      ],
+    }],
+    ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['>(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_(x|x11)(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+      ],
+    }],
+    ['<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)gtk/'],
+        ['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'],
+      ],
+    }],
+    ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_views\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aura(_unittest)?\\.(h|cc)$'],
+                    ['exclude', '_aura(_browsertest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)aura/'],
+      ]
+    }],
+    ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurax11\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+    }],
+    ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)ash/'],
+      ]
+    }],
+  ]
+}
diff --git a/src/build/gdb-add-index b/src/build/gdb-add-index
new file mode 100755
index 0000000..4975532
--- /dev/null
+++ b/src/build/gdb-add-index
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+
+set -e
+
+if [[ ! $# == 1 ]]; then
+  echo "Usage: $0 path-to-binary"
+  exit 1
+fi
+
+FILENAME="$1"
+if [[ ! -f "$FILENAME" ]]; then
+  echo "Path $FILENAME does not exist."
+  exit 1
+fi
+
+# We're good to go! Create temp directory for index files.
+DIRECTORY=$(mktemp -d)
+echo "Made temp directory $DIRECTORY."
+
+# Always remove directory on exit.
+trap "{ echo -n Removing temp directory $DIRECTORY...;
+  rm -rf $DIRECTORY; echo done; }" EXIT
+
+# Grab all the chromium shared library files.
+so_files=$(ldd "$FILENAME" 2>/dev/null \
+  | grep $(dirname "$FILENAME") \
+  | sed "s/.*[ \t]\(.*\) (.*/\1/")
+
+# Add index to binary and the shared library dependencies.
+for file in "$FILENAME" $so_files; do
+  basename=$(basename "$file")
+  echo -n "Adding index to $basename..."
+  readelf_out=$(readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    echo "already contains index. Skipped."
+  else
+    gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
+    objcopy --add-section .gdb_index="$DIRECTORY"/$basename.gdb-index \
+      --set-section-flags .gdb_index=readonly "$file" "$file"
+    echo "done."
+  fi
+done
diff --git a/src/build/grit_action.gypi b/src/build/grit_action.gypi
new file mode 100644
index 0000000..ef67ab5
--- /dev/null
+++ b/src/build/grit_action.gypi
@@ -0,0 +1,36 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   grit_grd_file: string: grd file path
+#   grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don’t currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+  'variables': {
+    'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+    'grit_resource_ids%': 'GRIT_DIR/../gritsettings/resource_ids',
+  },
+  'inputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) --inputs <(grit_grd_file) '
+        '-f "<(grit_resource_ids)")',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) --outputs \'<(grit_out_dir)\' '
+        '<(grit_grd_file) -f "<(grit_resource_ids)")',
+  ],
+  'action': ['<@(grit_cmd)',
+             '-i', '<(grit_grd_file)', 'build',
+             '-f<(grit_resource_ids)',
+             '-o', '<(grit_out_dir)',
+             '<@(grit_defines)' ],
+  'msvs_cygwin_shell': 0,
+  'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/src/build/grit_target.gypi b/src/build/grit_target.gypi
new file mode 100644
index 0000000..fe9900b
--- /dev/null
+++ b/src/build/grit_target.gypi
@@ -0,0 +1,30 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+#   grit_out_dir: string: the output directory path
+
+# NOTE: This file is optional, not all targets that use grit include it, some
+# do their own custom directives instead.
+{
+  'conditions': [
+    # If the target is a direct binary, it needs to be able to find the header,
+    # otherwise it probably a supporting target just for grit so the include
+    # dir needs to be set on anything that depends on this action.
+    ['_type=="executable" or _type=="shared_library" or \
+      _type=="loadable_module" or _type=="static_library"', {
+      'include_dirs': [
+        '<(grit_out_dir)',
+      ],
+    }, {
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(grit_out_dir)',
+        ],
+      },
+    }],
+  ],
+}
diff --git a/src/build/gyp_chromium b/src/build/gyp_chromium
new file mode 100755
index 0000000..ee5b3f3
--- /dev/null
+++ b/src/build/gyp_chromium
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import glob
+import gyp_helper
+import os
+import shlex
+import subprocess
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+  try:
+    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+    import psyco
+  except:
+    psyco = None
+else:
+  psyco = None
+
+def additional_include_files(args=[]):
+  """
+  Returns a list of additional (.gypi) files to include, without
+  duplicating ones that are already specified on the command line.
+  """
+  # Determine the include files specified on the command line.
+  # This doesn't cover all the different option formats you can use,
+  # but it's mainly intended to avoid duplicating flags on the automatic
+  # makefile regeneration which only uses this format.
+  specified_includes = set()
+  for arg in args:
+    if arg.startswith('-I') and len(arg) > 2:
+      specified_includes.add(os.path.realpath(arg[2:]))
+
+  result = []
+  def AddInclude(path):
+    if os.path.realpath(path) not in specified_includes:
+      result.append(path)
+
+  # Always include common.gypi.
+  AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+  # Optionally add supplemental .gypi files if present.
+  supplements = glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+  for supplement in supplements:
+    AddInclude(supplement)
+
+  return result
+
+if __name__ == '__main__':
+  args = sys.argv[1:]
+
+  # Use the Psyco JIT if available.
+  if psyco:
+    psyco.profile()
+    print "Enabled Psyco JIT."
+
+  # Fall back on hermetic python if we happen to get run under cygwin.
+  # TODO(bradnelson): take this out once this issue is fixed:
+  #    http://code.google.com/p/gyp/issues/detail?id=177
+  if sys.platform == 'cygwin':
+    python_dir = os.path.join(chrome_src, 'third_party', 'python_26')
+    env = os.environ.copy()
+    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+    p = subprocess.Popen(
+       [os.path.join(python_dir, 'python.exe')] + sys.argv,
+       env=env, shell=False)
+    p.communicate()
+    sys.exit(p.returncode)
+
+  gyp_helper.apply_chromium_gyp_env()
+
+  # This could give false positives since it doesn't actually do real option
+  # parsing.  Oh well.
+  gyp_file_specified = False
+  for arg in args:
+    if arg.endswith('.gyp'):
+      gyp_file_specified = True
+      break
+
+  # If we didn't get a file, check an env var, and then fall back to
+  # assuming 'all.gyp' from the same directory as the script.
+  if not gyp_file_specified:
+    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+    if gyp_file:
+      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+      # path separators even on Windows due to the use of shlex.split().
+      args.extend(shlex.split(gyp_file))
+    else:
+      args.append(os.path.join(script_dir, 'all.gyp'))
+
+  args.extend(['-I' + i for i in additional_include_files(args)])
+
+  # There shouldn't be a circular dependency relationship between .gyp files,
+  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+  # currently exist.  The check for circular dependencies is currently
+  # bypassed on other platforms, but is left enabled on the Mac, where a
+  # violation of the rule causes Xcode to misbehave badly.
+  # TODO(mark): Find and kill remaining circular dependencies, and remove this
+  # option.  http://crbug.com/35878.
+  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+  # list.
+  if sys.platform not in ('darwin',):
+    args.append('--no-circular-check')
+
+  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+  # to enfore syntax checking.
+  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+  if syntax_check and int(syntax_check):
+    args.append('--check')
+
+  print 'Updating projects from gyp files...'
+  sys.stdout.flush()
+
+  # Off we go...
+  sys.exit(gyp.main(args))
diff --git a/src/build/gyp_chromium.py b/src/build/gyp_chromium.py
new file mode 100644
index 0000000..29bac98
--- /dev/null
+++ b/src/build/gyp_chromium.py
@@ -0,0 +1,9 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.

+# Use of this source code is governed by a BSD-style license that can be

+# found in the LICENSE file.

+

+# This is a workaround for multiprocessing on Windows. Importing in Python on

+# Windows doesn't search for imports that don't end in .py (and aren't

+# directories with an __init__.py). So, add this wrapper to avoid having

+# people change their command line to add a .py when running gyp_chromium.

+__import__('gyp_chromium')

diff --git a/src/build/gyp_helper.py b/src/build/gyp_helper.py
new file mode 100644
index 0000000..63debcd
--- /dev/null
+++ b/src/build/gyp_helper.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+  if not os.path.exists(file_path):
+    return
+  with open(file_path, 'rU') as f:
+    file_contents = f.read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = (
+      'CC',
+      'CHROMIUM_GYP_FILE',
+      'CHROMIUM_GYP_SYNTAX_CHECK',
+      'CXX',
+      'GYP_DEFINES',
+      'GYP_GENERATOR_FLAGS',
+      'GYP_GENERATOR_OUTPUT',
+      'GYP_GENERATORS',
+  )
+  for var in supported_vars:
+    file_val = file_data.get(var)
+    if file_val:
+      if var in os.environ:
+        print 'INFO: Environment value for "%s" overrides value in %s.' % (
+            var, os.path.abspath(file_path)
+        )
+      else:
+        os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+    apply_gyp_environment_from_file(path)
diff --git a/src/build/install-build-deps-android.sh b/src/build/install-build-deps-android.sh
new file mode 100755
index 0000000..0a90d3b
--- /dev/null
+++ b/src/build/install-build-deps-android.sh
@@ -0,0 +1,101 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android that
+# requires sudo privileges.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
+# a license agreement, so upon installation it will prompt the user. To get
+# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+if [ "x$(id -u)" != x0 ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# The temporary directory used to store output of update-java-alternatives
+TEMPDIR=$(mktemp -d)
+cleanup() {
+  local status=${?}
+  trap - EXIT
+  rm -rf "${TEMPDIR}"
+  exit ${status}
+}
+trap cleanup EXIT
+
+sudo apt-get update
+
+# Fix deps
+sudo apt-get -f install
+
+# Install deps
+# This step differs depending on what Ubuntu release we are running
+# on since the package names are different, and Sun's Java must
+# be installed manually on late-model versions.
+
+# common
+sudo apt-get -y install python-pexpect xvfb x11-utils
+
+if /usr/bin/lsb_release -r -s | grep -q "12."; then
+  # Ubuntu 12.x
+  sudo apt-get -y install ant
+
+  # Java can not be installed via ppa on Ubuntu 12.04+ so we'll
+  # simply check to see if it has been setup properly -- if not
+  # let the user know.
+
+  if ! java -version 2>&1 | grep -q "Java(TM)"; then
+    echo "****************************************************************"
+    echo "You need to install the Oracle Java SDK from http://goo.gl/uPRSq"
+    echo "and configure it as the default command-line Java environment."
+    echo "****************************************************************"
+    exit
+  fi
+
+else
+  # Ubuntu 10.x
+
+  sudo apt-get -y install ant1.8
+
+  # Install sun-java6 stuff
+  sudo apt-get -y install sun-java6-bin sun-java6-jre sun-java6-jdk
+
+  # Switch version of Java to java-6-sun
+  # Sun's java is missing certain Java plugins (e.g. for firefox, mozilla).
+  # These are not required to build, and thus are treated only as warnings.
+  # Any errors in updating java alternatives which are not '*-javaplugin.so'
+  # will cause errors and stop the script from completing successfully.
+  if ! sudo update-java-alternatives -s java-6-sun \
+            >& "${TEMPDIR}"/update-java-alternatives.out
+  then
+    # Check that there are the expected javaplugin.so errors for the update
+    if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
+           /dev/null
+    then
+      # Print as warnings all the javaplugin.so errors
+      echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
+      grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+    fi
+    # Check if there are any errors that are not javaplugin.so
+    if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
+           >& /dev/null
+    then
+      # If there are non-javaplugin.so errors, treat as errors and exit
+      echo 'ERRORS: Failed to update alternatives for java-6-sun:'
+      grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+      exit 1
+    fi
+  fi
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/src/build/install-build-deps.sh b/src/build/install-build-deps.sh
new file mode 100755
index 0000000..a7cd0e3
--- /dev/null
+++ b/src/build/install-build-deps.sh
@@ -0,0 +1,451 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--no-prompt: silently select standard options/defaults"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  --lib32)                  do_inst_lib32=1;;
+  --no-lib32)               do_inst_lib32=0;;
+  --arm)                    do_inst_arm=1;;
+  --no-arm)                 do_inst_arm=0;;
+  --no-prompt)              do_default=1
+                            do_quietly="-qq --assume-yes"
+    ;;
+  *) usage;;
+  esac
+  shift
+done
+
+ubuntu_versions="10\.04|10\.10|11\.04|11\.10|12\.04"
+ubuntu_codenames="lucid|maverick|natty|oneiric|precise"
+
+if ! egrep -q "Ubuntu ($ubuntu_versions|$ubuntu_codenames)" /etc/issue; then
+  echo "ERROR: Only Ubuntu 10.04 (lucid) through 12.04 (precise) are currently"\
+      "supported" >&2
+  exit 1
+fi
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+if [ "x$(id -u)" != x0 ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libpulse-dev"
+
+# Packages need for development
+dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
+          language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
+          libcairo2-dev libcups2-dev libcurl4-gnutls-dev libelf-dev
+          libgconf2-dev libgl1-mesa-dev libglib2.0-dev libglu1-mesa-dev
+          libgnome-keyring-dev libgtk2.0-dev libkrb5-dev libnspr4-dev
+          libnss3-dev libpam0g-dev libpci-dev libsctp-dev libspeechd-dev
+          libsqlite3-dev libssl-dev libudev-dev libwww-perl libxslt1-dev
+          libxss-dev libxt-dev libxtst-dev mesa-common-dev patch perl php5-cgi
+          pkg-config python python-cherrypy3 python-dev python-psutil rpm ruby
+          subversion ttf-dejavu-core ttf-indic-fonts ttf-kochi-gothic
+          ttf-kochi-mincho ttf-thai-tlwg wdiff git-core
+          $chromeos_dev_list"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries. These are always needed, regardless of whether or not we want
+# the full 32-bit "cross-compile" support (--lib32).
+if [ "$(uname -m)" = "x86_64" ]; then
+  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0 libcurl4-gnutls-dev"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcups2 libexpat1
+          libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+          libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0
+          libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libudev0 libx11-6
+          libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6
+          libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1
+          libxtst6 zlib1g $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg
+          libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg
+          libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg
+          libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg
+          libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg
+          libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg"
+
+# arm cross toolchain packages needed to build chrome on arm
+arm_list="libc6-armel-cross libc6-dev-armel-cross libgcc1-armel-cross
+          libgomp1-armel-cross linux-libc-dev-armel-cross
+          libgcc1-dbg-armel-cross libgomp1-dbg-armel-cross
+          binutils-arm-linux-gnueabi cpp-arm-linux-gnueabi
+          gcc-arm-linux-gnueabi g++-arm-linux-gnueabi
+          libmudflap0-dbg-armel-cross"
+
+# Plugin lists needed for tests.
+plugin_list="flashplugin-installer"
+
+# Some package names have changed over time
+if apt-cache show ttf-mscorefonts-installer >/dev/null 2>&1; then
+  dev_list="${dev_list} ttf-mscorefonts-installer"
+else
+  dev_list="${dev_list} msttcorefonts"
+fi
+if apt-cache show libnspr4-dbg >/dev/null 2>&1; then
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if apt-cache show libjpeg-dev >/dev/null 2>&1; then
+ dev_list="${dev_list} libjpeg-dev"
+else
+ dev_list="${dev_list} libjpeg62-dev"
+fi
+
+# Some packages are only needed, if the distribution actually supports
+# installing them.
+if apt-cache show appmenu-gtk >/dev/null 2>&1; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+  if [ 0 -ne "${do_default-0}" ] ; then
+    return $1
+  fi
+  local c
+  while :; do
+    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+         stty -echo iuclc -icanon 2>/dev/null
+         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+    case "$c" in
+      " 0a") if [ -n "$1" ]; then
+               [ $1 -eq 0 ] && echo "Y" || echo "N"
+               return $1
+             fi
+             ;;
+      " 79") echo "Y"
+             return 0
+             ;;
+      " 6e") echo "N"
+             return 1
+             ;;
+      "")    echo "Aborted" >&2
+             exit 1
+             ;;
+      *)     # The user pressed an unrecognized key. As we are not echoing
+             # any incorrect user input, alert the user by ringing the bell.
+             (tput bel) 2>/dev/null
+             ;;
+    esac
+  done
+}
+
+if test "$do_inst_syms" = ""
+then
+  echo "This script installs all tools and libraries needed to build Chromium."
+  echo ""
+  echo "For most of the libraries, it can also install debugging symbols, which"
+  echo "will allow you to debug code in the system libraries. Most developers"
+  echo "won't need these symbols."
+  echo -n "Do you want me to install them for you (y/N) "
+  if yes_no 1; then
+    do_inst_syms=1
+  fi
+fi
+if test "$do_inst_syms" = "1"; then
+  echo "Installing debugging symbols."
+else
+  echo "Skipping installation of debugging symbols."
+  dbg_list=
+fi
+
+# When cross building for arm on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if [ "$(uname -m)" = "x86_64" ]; then
+  arm_list="$arm_list g++-multilib"
+fi
+
+if test "$do_inst_arm" = "1"; then
+  . /etc/lsb-release
+  if test "$DISTRIB_CODENAME" != "precise"; then
+    echo "ERROR: Installing the ARM cross toolchain is only available on" \
+         "Ubuntu precise." >&2
+    exit 1
+  fi
+  echo "Installing ARM cross toolchain."
+else
+  echo "Skipping installation of ARM cross toolchain."
+  arm_list=
+fi
+
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+packages="${dev_list} ${lib_list} ${dbg_list} ${plugin_list} ${arm_list}"
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANG=C $new_list_cmd)"; then
+  # We probably never hit this following line.
+  echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+  # We expect apt-get to have exit status of 1.
+  # This indicates that we cancelled the install with "yes n|".
+  new_list=$(echo "$new_list" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
+  new_list=$(echo "$new_list" | sed 's/ *$//')
+  if [ -z "$new_list" ] ; then
+    echo "No missing packages, and the packages are up-to-date."
+  else
+    echo "Installing missing packages: $new_list."
+    sudo apt-get install ${do_quietly-} ${new_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around new_list_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${new_list_cmd}
+  echo
+  echo "It produces the following output:"
+  yes n | $new_list_cmd || true
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Install 32bit backwards compatibility support for 64bit systems
+if [ "$(uname -m)" = "x86_64" ]; then
+  if test "$do_inst_lib32" != "1"
+  then
+    echo "NOTE: If you were expecting the option to install 32bit libs,"
+    echo "please run with the --lib32 flag."
+    echo
+    echo "Installation complete."
+    exit 0
+  fi
+
+  echo "WARNING"
+  echo
+  echo "We no longer recommend that you use this script to install"
+  echo "32bit libraries on a 64bit system. Instead, consider using the"
+  echo "install-chroot.sh script to help you set up a 32bit environment"
+  echo "for building and testing 32bit versions of Chrome."
+  echo
+  echo "The code for installing 32bit libraries on a 64bit system is"
+  echo "unmaintained and might not work with modern versions of Ubuntu"
+  echo "or Debian."
+  echo
+  echo -n "Are you sure you want to proceed (y/N) "
+  if yes_no 1; then
+    do_inst_lib32=1
+  fi
+  if test "$do_inst_lib32" != "1"
+  then
+    exit 0
+  fi
+
+  # Standard 32bit compatibility libraries
+  echo "First, installing the limited existing 32-bit support..."
+  cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
+            lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
+  if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
+    cmp_list="${cmp_list} lib32readline-gplv2-dev"
+  else
+    cmp_list="${cmp_list} lib32readline5-dev"
+  fi
+  sudo apt-get install ${do_quietly-} $cmp_list
+
+  tmp=/tmp/install-32bit.$$
+  trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
+  mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
+  touch "${tmp}/status"
+
+  [ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
+  cat >>"${tmp}/apt/apt.conf" <<EOF
+        Apt::Architecture "i386";
+        Dir::Cache "${tmp}/cache";
+        Dir::Cache::Archives "${tmp}/";
+        Dir::State::Lists "${tmp}/apt/lists/";
+        Dir::State::status "${tmp}/status";
+EOF
+
+  # Download 32bit packages
+  echo "Computing list of available 32bit packages..."
+  sudo apt-get -c="${tmp}/apt/apt.conf" update
+
+  echo "Downloading available 32bit packages..."
+  sudo apt-get -c="${tmp}/apt/apt.conf" \
+          --yes --download-only --force-yes --reinstall install \
+          ${lib_list} ${dbg_list}
+
+  # Open packages, remove everything that is not a library, move the
+  # library to a lib32 directory and package everything as a *.deb file.
+  echo "Repackaging and installing 32bit packages for use on 64bit systems..."
+  for i in ${lib_list} ${dbg_list}; do
+    orig="$(echo "${tmp}/${i}"_*_i386.deb)"
+    compat="$(echo "${orig}" |
+              sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
+    rm -rf "${tmp}/staging"
+    msg="$(fakeroot -u sh -exc '
+      # Unpack 32bit Debian archive
+      umask 022
+      mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
+      cd "'"${tmp}"'/staging"
+      ar x "'${orig}'"
+      tar zCfx dpkg data.tar.gz
+      tar zCfx dpkg/DEBIAN control.tar.gz
+
+      # Create a posix extended regular expression fragment that will
+      # recognize the includes which have changed. Should be rare,
+      # will almost always be empty.
+      includes=`sed -n -e "s/^[0-9a-z]*  //g" \
+                       -e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
+                  xargs -n 1 -I FILE /bin/sh -c \
+                    "cmp -s dpkg/FILE /FILE || echo FILE" |
+                  tr "\n" "|" |
+                  sed -e "s,|$,,"`
+
+      # If empty, set it to not match anything.
+      test -z "$includes" && includes="^//"
+
+      # Turn the conflicts into an extended RE for removal from the
+      # Provides line.
+      conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
+                   dpkg/DEBIAN/control`
+
+      # Rename package, change architecture, remove conflicts and dependencies
+      sed -r -i                              \
+          -e "/Package/s/$/-ia32/"           \
+          -e "/Architecture/s/:.*$/: amd64/" \
+          -e "/Depends/s/:.*/: ia32-libs/"   \
+          -e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1"   \
+          -e "/Recommends/d"                 \
+          -e "/Conflicts/d"                  \
+        dpkg/DEBIAN/control
+
+      # Only keep files that live in "lib" directories or the includes
+      # that have changed.
+      sed -r -i                                                               \
+          -e "/\/lib64\//d" -e "/\/.?bin\//d"                                 \
+          -e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g"   \
+          -e "s, lib/, lib32/,g"                                              \
+          -e "s,/lib/,/lib32/,g"                                              \
+          -e "t;d"                                                            \
+          -e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
+        dpkg/DEBIAN/md5sums
+
+      # Re-run ldconfig after installation/removal
+      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
+        >dpkg/DEBIAN/postinst
+      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
+        >dpkg/DEBIAN/postrm
+      chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
+
+      # Remove any other control files
+      find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
+                       -name postinst -o -name postrm ")" -o -print |
+        xargs -r rm -rf
+
+      # Remove any files/dirs that live outside of "lib" directories,
+      # or are not in our list of changed includes.
+      find dpkg -mindepth 1 -regextype posix-extended \
+          "(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
+          -prune -o -print | tac |
+        xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
+      find dpkg -name lib64 -o -name bin -o -name "?bin" |
+        tac | xargs -r rm -rf
+
+      # Remove any symbolic links that were broken by the above steps.
+      find -L dpkg -type l -print | tac | xargs -r rm -rf
+
+      # Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
+      # That is where gdb looks for them.
+      find dpkg -type d -o -path "*/lib/*" -print |
+        xargs -r -n 1 sh -c "
+          i=\$(echo \"\${0}\" |
+               sed -e s,/lib/,/lib32/,g \
+               -e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
+          mkdir -p \"\${i%/*}\";
+          mv \"\${0}\" \"\${i}\""
+
+      # Rename include to include32.
+      [ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
+
+      # Prune any empty directories
+      find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
+
+      # Create our own Debian package
+      cd ..
+      dpkg --build staging/dpkg .' 2>&1)"
+    compat="$(eval echo $(echo "${compat}" |
+                          sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
+    [ -r "${compat}" ] || {
+      echo "${msg}" >&2
+      echo "Failed to build new Debian archive!" >&2
+      exit 1
+    }
+
+    msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
+        echo "Installed ${compat##*/}"
+      } || {
+        # echo "${msg}" >&2
+        echo "Skipped ${compat##*/}"
+      }
+  done
+
+  # Add symbolic links for developing 32bit code
+  echo "Adding missing symbolic links, enabling 32bit code development..."
+  for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
+             sed -e 's/[.]so[.][0-9].*/.so/' |
+             sort -u); do
+    [ "x${i##*/}" = "xld-linux.so" ] && continue
+    [ -r "$i" ] && continue
+    j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
+         sort -n | tail -n 1)"
+    [ -r "$i.$j" ] || continue
+    sudo ln -s "${i##*/}.$j" "$i"
+  done
+fi
diff --git a/src/build/install-chroot.sh b/src/build/install-chroot.sh
new file mode 100755
index 0000000..af85fb1
--- /dev/null
+++ b/src/build/install-chroot.sh
@@ -0,0 +1,809 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the prefered name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-b dir       additional directories that should be bind mounted,"
+  echo '             or "NONE".'
+  echo "             Default: if local filesystems present, ask user for help"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
+  echo "-l           List all installed chroot environments"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":b:g:lm:sch" OPTNAME; do
+    case "$OPTNAME" in
+      b)
+        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+          bind_mounts="${OPTARG}"
+        else
+          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+               ! -d "${OPTARG}" ]; then
+            echo "Invalid -b option(s)"
+            usage
+            exit 1
+          fi
+          bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+        fi
+        ;;
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      l)
+        list_all_chroots
+        exit
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+list_all_chroots() {
+  for i in /var/lib/chroot/*; do
+    i="${i##*/}"
+    [ "${i}" = "*" ] && continue
+    [ -x "/usr/local/bin/${i%bit}" ] || continue
+    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+    [ -r "/etc/schroot/script-${i}" -a \
+      -r "/etc/schroot/mount-${i}" ] || continue
+    echo "${i%bit}"
+  done
+}
+
+getkey() {
+  (
+    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+    stty -echo iuclc -icanon 2>/dev/null
+    dd count=1 bs=1 2>/dev/null
+  )
+}
+
+chr() {
+  printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+  stat -c %T -f "$1/" 2>/dev/null |
+    egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+  echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  while :; do
+    echo "This chroot already exists on your machine."
+    if schroot -l --all-sessions 2>&1 |
+       sed 's/^session://' |
+       grep -qs "^${target%bit}-"; then
+      echo "And it appears to be in active use. Terminate all programs that"
+      echo "are currently using the chroot environment and then re-run this"
+      echo "script."
+      echo "If you still get an error message, you might have stale mounts"
+      echo "that you forgot to delete. You can always clean up mounts by"
+      echo "executing \"${target%bit} -c\"."
+      exit 1
+    fi
+    echo "I can abort installation, I can overwrite the existing chroot,"
+    echo "or I can delete the old one and then exit. What would you like to"
+    printf "do (a/o/d)? "
+    read choice
+    case "${choice}" in
+      a|A) exit 1;;
+      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
+                       "/usr/local/bin/${target%bit}"   \
+                       "/etc/schroot/mount-${target}"   \
+                       "/etc/schroot/script-${target}"
+           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+                       "/etc/schroot/schroot.conf"
+           trap '' INT TERM QUIT HUP
+           trap '' EXIT
+           echo "Deleted!"
+           exit 0;;
+    esac
+  done
+  echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    printf "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+                 $2 !~ "^/media" && $2 !~ "^/run" &&
+                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+                   print $2
+                 }' /proc/mounts |
+            head -n26)"
+  if [ -n "${mounts}" ]; then
+    echo "You appear to have non-standard mount points that you"
+    echo "might want to import into the chroot environment:"
+    echo
+    sel=
+    while :; do
+      # Print a menu, listing all non-default mounts of local or network
+      # file systems.
+      j=1; for m in ${mounts}; do
+        c="$(printf $(printf '\\%03o' $((64+$j))))"
+        echo "$sel" | grep -qs $c &&
+          state="mounted in chroot" || state="$(tput el)"
+        printf "   $c) %-40s${state}\n" "$m"
+        j=$(($j+1))
+      done
+      # Allow user to interactively (de-)select any of the entries
+      echo
+      printf "Select mount points that you want to be included or press %s" \
+             "SPACE to continue"
+      c="$(getkey | tr a-z A-Z)"
+      [ "$c" == " " ] && { echo; echo; break; }
+      if [ -z "$c" ] ||
+         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+          # Invalid input, ring the console bell
+          tput bel
+      else
+        # Toggle the selection for the given entry
+        if echo "$sel" | grep -qs $c; then
+          sel="$(printf "$sel" | sed "s/$c//")"
+        else
+          sel="$sel$c"
+        fi
+      fi
+      # Reposition cursor to the top of the list of entries
+      tput cuu $(($j + 1))
+      echo
+    done
+  fi
+  j=1; for m in ${mounts}; do
+    c="$(chr $(($j + 64)))"
+    if echo "$sel" | grep -qs $c; then
+      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+    fi
+    j=$(($j+1))
+  done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="${admin},$(id -gn)"
+fi
+# Older versions of schroot wanted a "priority=" line, whereas recent
+# versions deprecate "priority=" and warn if they see it. We don't have
+# a good feature test, but scanning for the string "priority=" in the
+# existing "schroot.conf" file is a good indication of what to do.
+priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+           echo 'priority=3' || :)
+sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+# Set up a list of mount points that is specific to this
+# chroot environment.
+sed '/^FSTAB=/s,"[^"]*","/etc/schroot/mount-'"${target}"'",' \
+         /etc/schroot/script-defaults |
+  sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+  /etc/schroot/mount-defaults |
+  sudo sh -c 'cat > /etc/schroot/mount-'"${target}"
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+  printf "${bind_mounts}" |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+   ! grep -qs '^/media' /etc/schroot/mount-"${target}"; then
+  echo '/media /media none rw,rbind 0 0' |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Share /dev/shm and possibly /run/shm
+grep -qs '^/dev/shm' /etc/schroot/mount-"${target}" ||
+  echo '/dev/shm /dev/shm none rw,bind 0 0' |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+if [ -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run/shm' /etc/schroot/mount-"${target}"; then
+  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+  sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+  # insert the same number of spaces as the number of characters in the
+  # parameter(s) passed to this function.
+  # If the "fold" program cannot be found, or if the actual width of the
+  # terminal cannot be determined, this function doesn't attempt to do any
+  # wrapping.
+  local f="$(type -P fold)"
+  [ -z "${f}" ] && { cat; return; }
+  local c="$(stty -a </dev/tty 2>/dev/null |
+             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+  [ -z "${c}" ] && { cat; return; }
+  local i="$(echo "$*"|sed 's/./ /g')"
+  local j="$(printf %s "${i}"|wc -c)"
+  if [ "${c}" -gt "${j}" ]; then
+    dd bs=1 count="${j}" 2>/dev/null
+    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+  else
+    "${f}" -sw "${c}"
+  fi
+}
+
+help() {
+  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+  echo "  help:      print this message"                                                | wrap "             "
+  echo "  list:      list all known chroot environments"                                | wrap "             "
+  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
+  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
+  exit 0
+}
+
+clean() {
+  local s t rc
+  rc=0
+  for s in $(schroot -l --all-sessions); do
+    if [ -n "$1" ]; then
+      t="${s#session:}"
+      [ "${t#${chroot}-}" == "${t}" ] && continue
+    fi
+    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+       fgrep -qs "/var/lib/schroot/mount/${t}"; then
+      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+      rc=1
+      continue
+    fi
+    sudo schroot -c "${s}" -e || rc=1
+  done
+  exit ${rc}
+}
+
+list() {
+  for e in $(schroot -l); do
+    e="${e#chroot:}"
+    [ -x "/usr/local/bin/${e}" ] || continue
+    if schroot -l --all-sessions 2>/dev/null |
+       sed 's/^session://' |
+       grep -qs "^${e}-"; then
+      echo "${e} is currently active"
+    else
+      echo "${e}"
+    fi
+  done
+  exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+  case "$1" in
+    --)             shift; break;;
+    -h|--help)      shift; help;;
+    -l|--list)      shift; list;;
+    -c|--clean)     shift; clean "${chroot}";;
+    -C|--clean-all) shift; clean;;
+    *)              break;;
+  esac
+done
+
+session="$(schroot -c "${chroot}" -b)"
+
+if [ $# -eq 0 ]; then
+  schroot -c "${session}" -r -p
+else
+  p="$1"; shift
+  schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+     awk '{ print $1 }') 2>/dev/null
+while [ -n "$i" ]; do
+  pids=$(ls -id1 /proc/*/root/. 2>/dev/null |
+         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+                 t
+                 d') >/dev/null 2>&1
+  [ -z "$pids" ] && break
+  kill -9 $pids
+done
+schroot -c "${session}" -e
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+   >&/dev/null; then
+  sudo sh -c '
+    echo "deb http://archive.canonical.com/ubuntu" \
+         "'"${distname}"' partner" \
+      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+             s/^deb\([^-]\)/deb-src\1/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+  sudo sh -c '
+    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+  sudo sed -i 's/ / [arch=amd64,i386] /' \
+              "/var/lib/chroot/${target}/etc/apt/sources.list"
+  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+  echo foreign-architecture \
+       $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+    sudo sh -c "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1
+  dep=
+  for i in binutils gdb; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep -s "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+      sudo cp $path/$lib* \
+              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+    done
+  done
+  for i in gdb ld; do
+    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+  /usr/local/lib/amd64/$i "\$@"
+EOF
+    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+  done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+  while :; do
+    echo
+    echo "If you plan on building Chrome inside of the new chroot environment,"
+    echo "you now have to install the build dependencies. Do you want me to"
+    printf "start the script that does this for you (y/n)? "
+    read install_deps
+    case "${install_deps}" in
+      y|Y)
+        echo
+        # We prefer running the script in-place, but this might not be
+        # possible, if it lives on a network filesystem that denies
+        # access to root.
+        tmp_script=
+        if ! sudo /usr/local/bin/"${target%bit}" \
+            sh -c "[ -x '${script}' ]" >&/dev/null; then
+          tmp_script="/tmp/${script##*/}"
+          cp "${script}" "${tmp_script}"
+        fi
+        # Some distributions automatically start an instance of the system-
+        # wide dbus daemon, cron daemon or of the logging daemon, when
+        # installing the Chrome build depencies. This prevents the chroot
+        # session from being closed.  So, we always try to shut down any running
+        # instance of dbus and rsyslog.
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script} --no-lib32;
+              rc=$?;
+              /etc/init.d/cron stop >/dev/null 2>&1 || :;
+              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+              exit $rc"
+        rc=$?
+        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+        [ $rc -ne 0 ] && exit $rc
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+     is_network_drive "${HOME}/chroot"; } &&
+   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+  echo "${HOME}/chroot is currently located on the same device as your"
+  echo "home directory."
+  echo "This might not be what you want. Do you want me to move it somewhere"
+  echo "else?"
+  # If the computer has multiple spindles, many users configure all or part of
+  # the secondary hard disk to be writable by the primary user of this machine.
+  # Make some reasonable effort to detect this type of configuration and
+  # then offer a good location for where to put the ~/chroot directory.
+  suggest=
+  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+       ! is_network_drive "$i"; then
+      suggest="$i"
+    else
+      for j in "$i/"*; do
+        if [ -d "$j" -a -w "$j" -a \
+             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+           ! is_network_drive "$j"; then
+          suggest="$j"
+        else
+          for k in "$j/"*; do
+            if [ -d "$k" -a -w "$k" -a \
+                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+               ! is_network_drive "$k"; then
+              suggest="$k"
+              break
+            fi
+          done
+        fi
+        [ -n "${suggest}" ] && break
+      done
+    fi
+    [ -n "${suggest}" ] && break
+  done
+  def_suggest="${HOME}"
+  if [ -n "${suggest}" ]; then
+    # For home directories that reside on network drives, make our suggestion
+    # the default option. For home directories that reside on a local drive,
+    # require that the user manually enters the new location.
+    if is_network_drive "${HOME}"; then
+      def_suggest="${suggest}"
+    else
+      echo "A good location would probably be in \"${suggest}\""
+    fi
+  fi
+  while :; do
+    printf "Physical location [${def_suggest}]: "
+    read dir
+    [ -z "${dir}" ] && dir="${def_suggest}"
+    [ "${dir%%/}" == "${HOME%%/}" ] && break
+    if ! [ -d "${dir}" -a -w "${dir}" ] ||
+       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+      echo "Cannot write to ${dir}/chroot. Please try again"
+    else
+      mv "${HOME}/chroot" "${dir}/chroot"
+      ln -s "${dir}/chroot" "${HOME}/chroot"
+      for i in $(list_all_chroots); do
+        sudo "$i" mkdir -p "${dir}/chroot"
+      done
+      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+      break
+    fi
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/src/build/internal/README.chromium b/src/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/src/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/src/build/internal/release_defaults.gypi b/src/build/internal/release_defaults.gypi
new file mode 100644
index 0000000..1bf674a
--- /dev/null
+++ b/src/build/internal/release_defaults.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'StringPooling': 'true',
+    },
+    'VCLinkerTool': {
+      # No incremental linking.
+      'LinkIncremental': '1',
+      # Eliminate Unreferenced Data (/OPT:REF).
+      'OptimizeReferences': '2',
+      # Folding on (/OPT:ICF).
+      'EnableCOMDATFolding': '2',
+    },
+  },
+}
diff --git a/src/build/internal/release_impl.gypi b/src/build/internal/release_impl.gypi
new file mode 100644
index 0000000..5ac0e09
--- /dev/null
+++ b/src/build/internal/release_impl.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+  },
+}
diff --git a/src/build/internal/release_impl_official.gypi b/src/build/internal/release_impl_official.gypi
new file mode 100644
index 0000000..d084ae3
--- /dev/null
+++ b/src/build/internal/release_impl_official.gypi
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'defines': ['OFFICIAL_BUILD'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'InlineFunctionExpansion': '2',
+      'EnableIntrinsicFunctions': 'true',
+      'EnableFiberSafeOptimizations': 'true',
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+    'VCLibrarianTool': {
+      'AdditionalOptions': [
+        '/ltcg',
+        '/expectedoutputsize:120000000'
+      ],
+    },
+    'VCLinkerTool': {
+      'AdditionalOptions': [
+        '/time',
+        # This may reduce memory fragmentation during linking.
+        # The expected size is 40*1024*1024, which gives us about 10M of
+        # headroom as of Dec 16, 2011.
+        '/expectedoutputsize:41943040',
+      ],
+      'LinkTimeCodeGeneration': '1',
+      # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+      # the generated PDB. According to MSDN documentation, this flag is only
+      # available (or perhaps supported) in the Enterprise (team development)
+      # version of Visual Studio. If this blocks your official build, simply
+      # comment out this line, then  re-run "gclient runhooks".
+      'Profile': 'true',
+    },
+  },
+}
diff --git a/src/build/ios/chrome_ios.croc b/src/build/ios/chrome_ios.croc
new file mode 100644
index 0000000..938a2e9
--- /dev/null
+++ b/src/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Exclude everything to negate whatever is in src/build/common.croc
+    {
+      'regexp' : '.*',
+      'include' : 0,
+    },
+
+    # Include all directories (but not the files in the directories).
+    # This is a workaround for how croc.py walks the directory tree. See the
+    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+    {
+      'regexp' : '.*/$',
+      'include' : 1,
+    },
+
+    # Include any file with an 'ios' directory in the path.
+    {
+      'regexp' : '.*/ios/.*',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+    
+    # Include any file that ends with _ios.
+    {
+      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+
+    # Include any file that ends with _ios_unittest (and label it a test).
+    {
+      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+      'group' : 'test',
+    },
+
+    # Don't scan for executable lines in uninstrumented header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Don't measure coverage of perftests.
+    {
+      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+      'include' : 0,
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/src/build/ios/clean_env.py b/src/build/ios/clean_env.py
new file mode 100755
index 0000000..548e2b9
--- /dev/null
+++ b/src/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+  """This is like 'env -i', but it uses a whitelist of env variables to allow
+  through to the command being run.  It attempts to strip off Xcode-added
+  values from PATH.
+  """
+  # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+  # but that fails to set the things set by login (USER, etc.), so instead
+  # the only approach that seems to work is to have a whitelist.
+  env_key_whitelist = (
+    'HOME',
+    'LOGNAME',
+    # 'PATH' added below (but filtered).
+    'PWD',
+    'SHELL',
+    'TEMP',
+    'TMPDIR',
+    'USER'
+  )
+
+  # Need something to run.
+  # TODO(lliabraa): Make this output a usage string and exit (here and below).
+  assert(len(argv) > 0)
+
+  add_to_path = [];
+  first_entry = argv[0];
+  if first_entry.startswith('ADD_TO_PATH='):
+    argv = argv[1:];
+    add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+  # Still need something to run.
+  assert(len(argv) > 0)
+
+  clean_env = {}
+
+  # Pull over the whitelisted keys.
+  for key in env_key_whitelist:
+    val = os.environ.get(key, None)
+    if not val is None:
+      clean_env[key] = val
+
+  # Collect the developer dir as set via Xcode, defaulting it.
+  dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+  if dev_prefix[-1:] != '/':
+    dev_prefix += '/'
+
+  # Now pull in PATH, but remove anything Xcode might have added.
+  initial_path = os.environ.get('PATH', '')
+  filtered_chunks = \
+      [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+  if filtered_chunks:
+    clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+  # Add any KEY=VALUE args before the command to the cleaned environment.
+  args = argv[:]
+  while '=' in args[0]:
+    (key, val) = args[0].split('=', 1)
+    clean_env[key] = val
+    args = args[1:]
+
+  # Still need something to run.
+  assert(len(args) > 0)
+
+  # Off it goes...
+  os.execvpe(args[0], args, clean_env)
+  # Should never get here, so return a distinctive, non-zero status code.
+  return 66
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/ios/grit_whitelist.txt b/src/build/ios/grit_whitelist.txt
new file mode 100644
index 0000000..e352eea
--- /dev/null
+++ b/src/build/ios/grit_whitelist.txt
@@ -0,0 +1,573 @@
+IDR_ABOUT_VERSION_HTML
+IDR_ABOUT_VERSION_JS
+IDR_BOOKMARK_BAR_FOLDER
+IDR_CRASHES_HTML
+IDR_CRASHES_JS
+IDR_CREDITS_HTML
+IDR_DEFAULT_FAVICON
+IDR_DIR_HEADER_HTML
+IDR_I18N_PROCESS_JS
+IDR_I18N_TEMPLATE2_JS
+IDR_I18N_TEMPLATE_JS
+IDR_INCOGNITO_TAB_HTML
+IDR_INFOBAR_AUTOLOGIN
+IDR_INFOBAR_RESTORE_SESSION
+IDR_INFOBAR_SAVE_PASSWORD
+IDR_INPUT_ALERT
+IDR_JSTEMPLATE_JS
+IDR_NET_ERROR_HTML
+IDR_NET_INTERNALS_HELP_HTML
+IDR_NET_INTERNALS_HELP_JS
+IDR_NET_INTERNALS_INDEX_HTML
+IDR_NET_INTERNALS_INDEX_JS
+IDR_OMAHA_HTML
+IDR_OMAHA_JS
+IDR_OMNIBOX_EXTENSION_APP
+IDR_OMNIBOX_HTTP
+IDR_OMNIBOX_HTTPS_INVALID
+IDR_OMNIBOX_HTTPS_VALID
+IDR_OMNIBOX_HTTPS_WARNING
+IDR_OMNIBOX_SEARCH
+IDR_OMNIBOX_STAR
+IDR_PAGEINFO_BAD
+IDR_PAGEINFO_GOOD
+IDR_PAGEINFO_INFO
+IDR_PAGEINFO_WARNING_MAJOR
+IDR_PAGEINFO_WARNING_MINOR
+IDR_PRERENDER
+IDR_PRODUCT_LOGO_26
+IDR_SAD_TAB
+IDR_SHARED_IMAGES_SELECT
+IDR_SHARED_JS_LOAD_TIME_DATA
+IDR_SHARED_JS_UTIL
+IDR_SSL_ERROR_HTML
+IDR_SSL_ROAD_BLOCK_HTML
+IDR_SYNC_INTERNALS_ABOUT_JS
+IDR_SYNC_INTERNALS_CHROME_SYNC_JS
+IDR_SYNC_INTERNALS_DATA_JS
+IDR_SYNC_INTERNALS_EVENTS_JS
+IDR_SYNC_INTERNALS_INDEX_HTML
+IDR_SYNC_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_NODE_BROWSER_JS
+IDR_SYNC_INTERNALS_NOTIFICATIONS_JS
+IDR_SYNC_INTERNALS_SEARCH_JS
+IDR_SYNC_INTERNALS_SYNC_LOG_JS
+IDR_SYNC_INTERNALS_SYNC_NODE_BROWSER_JS
+IDR_SYNC_INTERNALS_SYNC_SEARCH_JS
+IDR_SYNC_INTERNALS_TRAFFIC_JS
+IDR_TAB_CLOSE_MASK
+IDR_TAB_CLOSE_P
+IDR_THROBBER
+IDR_UPDATE_BADGE4
+IDR_UPDATE_MENU4
+IDS_ABOUT_MAC
+IDS_ABOUT_VERSION_COMMAND_LINE
+IDS_ABOUT_VERSION_COMPANY_NAME
+IDS_ABOUT_VERSION_COPYRIGHT
+IDS_ABOUT_VERSION_EXECUTABLE_PATH
+IDS_ABOUT_VERSION_OFFICIAL
+IDS_ABOUT_VERSION_OS
+IDS_ABOUT_VERSION_PATH_NOTFOUND
+IDS_ABOUT_VERSION_PROFILE_PATH
+IDS_ABOUT_VERSION_TITLE
+IDS_ABOUT_VERSION_UNOFFICIAL
+IDS_ABOUT_VERSION_USER_AGENT
+IDS_ABOUT_VERSION_VARIATIONS
+IDS_ACCEPT_LANGUAGES
+IDS_ACCNAME_BACK
+IDS_ACCNAME_CLEAR_TEXT
+IDS_ACCNAME_FORWARD
+IDS_ACCNAME_LOCATION
+IDS_ACCNAME_STAR
+IDS_ACCNAME_VOICE_SEARCH
+IDS_ALLOW_INSECURE_CONTENT_BUTTON
+IDS_APP_UNTITLED_SHORTCUT_FILE_NAME
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION
+IDS_AUTOLOGIN_INFOBAR_CANCEL_BUTTON
+IDS_AUTOLOGIN_INFOBAR_MESSAGE
+IDS_AUTOLOGIN_INFOBAR_OK_BUTTON
+IDS_BLOCKED_DISPLAYING_INSECURE_CONTENT
+IDS_BLOCK_INSECURE_CONTENT_BUTTON
+IDS_BOOKMARK_ADD_EDITOR_TITLE
+IDS_BOOKMARK_BAR_FOLDER_NAME
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME
+IDS_BOOKMARK_BUBBLE_CHOOSER_ANOTHER_FOLDER
+IDS_BOOKMARK_BUBBLE_REMOVE_BOOKMARK
+IDS_BOOKMARK_EDITOR_CONFIRM_DELETE
+IDS_BOOKMARK_EDITOR_NEW_FOLDER_NAME
+IDS_BOOKMARK_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_CHOOSER_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE_NEW
+IDS_BOOKMARK_MANAGER_BOOKMARK_ALL_TABS
+IDS_BOOKMARK_MANAGER_FOLDER_SECTION
+IDS_BOOKMARK_MANAGER_FOLDER_TITLE
+IDS_BOOKMARK_MANAGER_NAME_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_MANAGER_REMOVE_TITLE
+IDS_BOOKMARK_MANAGER_URL_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_NEW_FOLDER_BUTTON_TITLE
+IDS_CANCEL
+IDS_CERT_ERROR_AUTHORITY_INVALID_DESCRIPTION
+IDS_CERT_ERROR_AUTHORITY_INVALID_DETAILS
+IDS_CERT_ERROR_AUTHORITY_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_AUTHORITY_INVALID_EXTRA_INFO_3
+IDS_CERT_ERROR_AUTHORITY_INVALID_TITLE
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DESCRIPTION
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_COMMON_NAME_INVALID_TITLE
+IDS_CERT_ERROR_CONTAINS_ERRORS_DESCRIPTION
+IDS_CERT_ERROR_CONTAINS_ERRORS_DETAILS
+IDS_CERT_ERROR_CONTAINS_ERRORS_EXTRA_INFO_2
+IDS_CERT_ERROR_CONTAINS_ERRORS_TITLE
+IDS_CERT_ERROR_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_EXPIRED_DETAILS
+IDS_CERT_ERROR_EXPIRED_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_EXPIRED_TITLE
+IDS_CERT_ERROR_EXTRA_INFO_1
+IDS_CERT_ERROR_EXTRA_INFO_TITLE
+IDS_CERT_ERROR_INVALID_CERT_DESCRIPTION
+IDS_CERT_ERROR_INVALID_CERT_DETAILS
+IDS_CERT_ERROR_INVALID_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_INVALID_CERT_TITLE
+IDS_CERT_ERROR_NOT_IN_DNS_DESCRIPTION
+IDS_CERT_ERROR_NOT_IN_DNS_DETAILS
+IDS_CERT_ERROR_NOT_IN_DNS_EXTRA_INFO
+IDS_CERT_ERROR_NOT_IN_DNS_TITLE
+IDS_CERT_ERROR_NOT_YET_VALID_DESCRIPTION
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_NOT_YET_VALID_TITLE
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DESCRIPTION
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DETAILS
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_TITLE
+IDS_CERT_ERROR_REVOKED_CERT_DESCRIPTION
+IDS_CERT_ERROR_REVOKED_CERT_DETAILS
+IDS_CERT_ERROR_REVOKED_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_REVOKED_CERT_TITLE
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DESCRIPTION
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DETAILS
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_TITLE
+IDS_CERT_ERROR_UNKNOWN_ERROR_DESCRIPTION
+IDS_CERT_ERROR_UNKNOWN_ERROR_DETAILS
+IDS_CERT_ERROR_UNKNOWN_ERROR_TITLE
+IDS_CERT_ERROR_WEAK_KEY_DESCRIPTION
+IDS_CERT_ERROR_WEAK_KEY_DETAILS
+IDS_CERT_ERROR_WEAK_KEY_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_KEY_TITLE
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DESCRIPTION
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DETAILS
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_TITLE
+IDS_CHROME_TO_DEVICE_PRINT_TO_PHONE
+IDS_CHROME_TO_DEVICE_SNAPSHOTS
+IDS_COPY_URL_MAC
+IDS_COULDNT_OPEN_PROFILE_ERROR
+IDS_CRASHES_BUG_LINK_LABEL
+IDS_CRASHES_CRASH_COUNT_BANNER_FORMAT
+IDS_CRASHES_CRASH_HEADER_FORMAT
+IDS_CRASHES_CRASH_TIME_FORMAT
+IDS_CRASHES_DISABLED_HEADER
+IDS_CRASHES_DISABLED_MESSAGE
+IDS_CRASHES_NO_CRASHES_MESSAGE
+IDS_CRASHES_TITLE
+IDS_DEFAULT_AVATAR_NAME_10
+IDS_DEFAULT_AVATAR_NAME_11
+IDS_DEFAULT_AVATAR_NAME_12
+IDS_DEFAULT_AVATAR_NAME_13
+IDS_DEFAULT_AVATAR_NAME_14
+IDS_DEFAULT_AVATAR_NAME_15
+IDS_DEFAULT_AVATAR_NAME_16
+IDS_DEFAULT_AVATAR_NAME_17
+IDS_DEFAULT_AVATAR_NAME_18
+IDS_DEFAULT_AVATAR_NAME_19
+IDS_DEFAULT_AVATAR_NAME_20
+IDS_DEFAULT_AVATAR_NAME_21
+IDS_DEFAULT_AVATAR_NAME_22
+IDS_DEFAULT_AVATAR_NAME_23
+IDS_DEFAULT_AVATAR_NAME_24
+IDS_DEFAULT_AVATAR_NAME_25
+IDS_DEFAULT_AVATAR_NAME_8
+IDS_DEFAULT_AVATAR_NAME_9
+IDS_DEFAULT_ENCODING
+IDS_DEFAULT_PROFILE_NAME
+IDS_DEFAULT_TAB_TITLE
+IDS_DELETE
+IDS_DONE
+IDS_EDIT_FIND_MAC
+IDS_EMPTY_KEYWORD_VALUE
+IDS_ERRORPAGES_DETAILS_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_DETAILS_BAD_GATEWAY
+IDS_ERRORPAGES_DETAILS_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_DETAILS_BLOCKED
+IDS_ERRORPAGES_DETAILS_CACHE_MISS
+IDS_ERRORPAGES_DETAILS_CACHE_READ_FAILURE
+IDS_ERRORPAGES_DETAILS_CONNECTION_CLOSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_CONNECTION_REFUSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_RESET
+IDS_ERRORPAGES_DETAILS_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_DETAILS_EMPTY_RESPONSE
+IDS_ERRORPAGES_DETAILS_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_FILE_NOT_FOUND
+IDS_ERRORPAGES_DETAILS_FORBIDDEN
+IDS_ERRORPAGES_DETAILS_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_DETAILS_GONE
+IDS_ERRORPAGES_DETAILS_HTTP_VERSION_NOT_SUPPORTED
+IDS_ERRORPAGES_DETAILS_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_DETAILS_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_DETAILS_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_DETAILS_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_DETAILS_NOT_IMPLEMENTED
+IDS_ERRORPAGES_DETAILS_PINNING_FAILURE
+IDS_ERRORPAGES_DETAILS_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_DISPOSITION
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_LENGTH
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_LOCATION
+IDS_ERRORPAGES_DETAILS_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_DETAILS_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_DETAILS_SSL_UNSAFE_NEGOTIATION
+IDS_ERRORPAGES_DETAILS_TEMPLATE
+IDS_ERRORPAGES_DETAILS_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_DETAILS_TIMED_OUT
+IDS_ERRORPAGES_DETAILS_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_DETAILS_UNKNOWN
+IDS_ERRORPAGES_DETAILS_UNSUPPORTED_SCHEME_ERROR
+IDS_ERRORPAGES_HEADING_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_HEADING_BLOCKED
+IDS_ERRORPAGES_HEADING_CACHE_MISS
+IDS_ERRORPAGES_HEADING_CACHE_READ_FAILURE
+IDS_ERRORPAGES_HEADING_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_HEADING_DUPLICATE_HEADERS
+IDS_ERRORPAGES_HEADING_EMPTY_RESPONSE
+IDS_ERRORPAGES_HEADING_ESET_ANTI_VIRUS_SSL_INTERCEPTION
+IDS_ERRORPAGES_HEADING_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_HTTP_SERVER_ERROR
+IDS_ERRORPAGES_HEADING_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_HEADING_KASPERSKY_ANTI_VIRUS_SSL_INTERCEPTION
+IDS_ERRORPAGES_HEADING_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_HEADING_NOT_AVAILABLE
+IDS_ERRORPAGES_HEADING_NOT_FOUND
+IDS_ERRORPAGES_HEADING_PINNING_FAILURE
+IDS_ERRORPAGES_HEADING_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_HEADING_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_HEADING_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_HEADING_UNSUPPORTED_SCHEME_ERROR
+IDS_ERRORPAGES_HEADING_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_HTTP_DETAILS_TEMPLATE
+IDS_ERRORPAGES_HTTP_POST_WARNING
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION
+IDS_ERRORPAGES_SUGGESTION_DISABLE_PLATFORM
+IDS_ERRORPAGES_SUGGESTION_DNS_CONFIG
+IDS_ERRORPAGES_SUGGESTION_FIREWALL_CONFIG
+IDS_ERRORPAGES_SUGGESTION_HEADING
+IDS_ERRORPAGES_SUGGESTION_HOMEPAGE
+IDS_ERRORPAGES_SUGGESTION_LEARNMORE
+IDS_ERRORPAGES_SUGGESTION_NETWORK_PREDICTION
+IDS_ERRORPAGES_SUGGESTION_PROXY_CONFIG
+IDS_ERRORPAGES_SUGGESTION_PROXY_DISABLE_PLATFORM
+IDS_ERRORPAGES_SUGGESTION_RELOAD
+IDS_ERRORPAGES_SUMMARY_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_SUMMARY_BAD_GATEWAY
+IDS_ERRORPAGES_SUMMARY_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_SUMMARY_BLOCKED
+IDS_ERRORPAGES_SUMMARY_CACHE_MISS
+IDS_ERRORPAGES_SUMMARY_CACHE_READ_FAILURE
+IDS_ERRORPAGES_SUMMARY_CONNECTION_REFUSED
+IDS_ERRORPAGES_SUMMARY_CONNECTION_RESET
+IDS_ERRORPAGES_SUMMARY_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_SUMMARY_DUPLICATE_HEADERS
+IDS_ERRORPAGES_SUMMARY_EMPTY_RESPONSE
+IDS_ERRORPAGES_SUMMARY_ESET_ANTI_VIRUS_SSL_INTERCEPTION
+IDS_ERRORPAGES_SUMMARY_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_FORBIDDEN
+IDS_ERRORPAGES_SUMMARY_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_SUMMARY_GONE
+IDS_ERRORPAGES_SUMMARY_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_INSTRUCTIONS_TEMPLATE
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_PLATFORM
+IDS_ERRORPAGES_SUMMARY_KASPERSKY_ANTI_VIRUS_SSL_INTERCEPTION
+IDS_ERRORPAGES_SUMMARY_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_SUMMARY_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_SUMMARY_NOT_AVAILABLE
+IDS_ERRORPAGES_SUMMARY_NOT_FOUND
+IDS_ERRORPAGES_SUMMARY_PINNING_FAILURE
+IDS_ERRORPAGES_SUMMARY_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_SUMMARY_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_SUMMARY_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_SUMMARY_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_SUMMARY_TIMED_OUT
+IDS_ERRORPAGES_SUMMARY_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_SUMMARY_UNSUPPORTED_SCHEME_ERROR
+IDS_ERRORPAGES_SUMMARY_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_SUMMARY_WEBSITE_CANNOT_HANDLE
+IDS_ERRORPAGES_TITLE_ACCESS_DENIED
+IDS_ERRORPAGES_TITLE_BLOCKED
+IDS_ERRORPAGES_TITLE_LOAD_FAILED
+IDS_ERRORPAGES_TITLE_NOT_AVAILABLE
+IDS_ERRORPAGES_TITLE_NOT_FOUND
+IDS_EXTENSIONS_LOCKED_MANAGED_MODE
+IDS_EXTENSION_KEYWORD_COMMAND
+IDS_FEEDBACK_DESCRIPTION_LABEL
+IDS_FEEDBACK_REPORT_PAGE_TITLE
+IDS_FEEDBACK_REPORT_URL_LABEL
+IDS_FEEDBACK_SEND_REPORT
+IDS_FEEDBACK_SYSTEM_INFORMATION_URL_TEXT
+IDS_FEEDBACK_USER_EMAIL_LABEL
+IDS_FIND_IN_PAGE_CLOSE_TOOLTIP
+IDS_FIND_IN_PAGE_COUNT
+IDS_FIND_IN_PAGE_NEXT_TOOLTIP
+IDS_FIND_IN_PAGE_PREVIOUS_TOOLTIP
+IDS_FULLSCREEN
+IDS_GOOGLE_URL_TRACKER_INFOBAR_DONT_SWITCH
+IDS_GOOGLE_URL_TRACKER_INFOBAR_MESSAGE
+IDS_GOOGLE_URL_TRACKER_INFOBAR_SWITCH
+IDS_HTTP_POST_WARNING
+IDS_HTTP_POST_WARNING_RESEND
+IDS_HTTP_POST_WARNING_TITLE
+IDS_KEYWORD_SEARCH
+IDS_LEARN_MORE
+IDS_LOGIN_DIALOG_OK_BUTTON_LABEL
+IDS_LOGIN_DIALOG_PASSWORD_FIELD
+IDS_LOGIN_DIALOG_TITLE
+IDS_LOGIN_DIALOG_USERNAME_FIELD
+IDS_MOBILE_WELCOME_URL
+IDS_NETWORK_PREDICTION_ENABLED_DESCRIPTION
+IDS_NEW_INCOGNITO_WINDOW_MAC
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE
+IDS_NEW_TAB_MOST_VISITED
+IDS_NEW_TAB_RECENTLY_CLOSED
+IDS_NEW_TAB_RESTORE_THUMBNAILS_SHORT_LINK
+IDS_NEW_TAB_THUMBNAIL_REMOVED_NOTIFICATION
+IDS_NEW_TAB_TITLE
+IDS_NEW_TAB_UNDO_THUMBNAIL_REMOVE
+IDS_NUMBERED_PROFILE_NAME
+IDS_OK
+IDS_OMNIBOX_EMPTY_HINT
+IDS_OPEN_TABS_NOTYETSYNCED
+IDS_OPEN_TABS_PROMOCOMPUTER
+IDS_OPTIONS_ADVANCED_SECTION_TITLE_PRIVACY
+IDS_OPTIONS_DISABLE_WEB_SERVICES
+IDS_OPTIONS_ENABLE_LOGGING
+IDS_OPTIONS_GENERAL_TAB_LABEL
+IDS_OPTIONS_IMPROVE_BROWSING_EXPERIENCE
+IDS_OPTIONS_INTERNET_TAB_LABEL
+IDS_OPTIONS_PROXIES_CONFIGURE_BUTTON
+IDS_OPTIONS_SETTINGS_OPTIONS
+IDS_PAGEINFO_ADDRESS
+IDS_PAGEINFO_CERT_INFO_BUTTON
+IDS_PAGEINFO_PARTIAL_ADDRESS
+IDS_PAGE_INFO_EV_IDENTITY_TITLE
+IDS_PAGE_INFO_HELP_CENTER_LINK
+IDS_PAGE_INFO_INTERNAL_PAGE
+IDS_PAGE_INFO_SECURITY_BUTTON_ACCESSIBILITY_LABEL
+IDS_PAGE_INFO_SECURITY_TAB_COMPRESSION_DETAILS
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_ERROR
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_WARNING
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_SENTENCE_LINK
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS
+IDS_PAGE_INFO_SECURITY_TAB_FALLBACK_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_FIRST_VISITED_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_INSECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_NON_UNIQUE_NAME
+IDS_PAGE_INFO_SECURITY_TAB_NOT_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_NO_COMPRESSION
+IDS_PAGE_INFO_SECURITY_TAB_NO_REVOCATION_MECHANISM
+IDS_PAGE_INFO_SECURITY_TAB_RENEGOTIATION_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_EV
+IDS_PAGE_INFO_SECURITY_TAB_SSL_VERSION
+IDS_PAGE_INFO_SECURITY_TAB_UNABLE_TO_CHECK_REVOCATION
+IDS_PAGE_INFO_SECURITY_TAB_UNKNOWN_PARTY
+IDS_PAGE_INFO_SECURITY_TAB_VISITED_BEFORE_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_WEAK_ENCRYPTION_CONNECTION_TEXT
+IDS_PAGE_INFO_SITE_INFO_TITLE
+IDS_PASSWORDS_EXCEPTIONS_TAB_TITLE
+IDS_PASSWORDS_SHOW_PASSWORDS_TAB_TITLE
+IDS_PASSWORD_MANAGER_BLACKLIST_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_PASSWORD_PROMPT
+IDS_PAST_TIME_TODAY
+IDS_PAST_TIME_YESTERDAY
+IDS_PLATFORM_LABEL
+IDS_PREFERENCES_CORRUPT_ERROR
+IDS_PREFERENCES_UNREADABLE_ERROR
+IDS_PRINT
+IDS_PRIVACY_POLICY_URL
+IDS_PRODUCT_NAME
+IDS_PROFILES_LOCAL_PROFILE_STATE
+IDS_PROFILE_TOO_NEW_ERROR
+IDS_SAD_TAB_MESSAGE
+IDS_SAD_TAB_TITLE
+IDS_SECURE_CONNECTION_EV
+IDS_SESSION_CRASHED_VIEW_MESSAGE
+IDS_SESSION_CRASHED_VIEW_RESTORE_BUTTON
+IDS_SETTINGS_SHOW_ADVANCED_SETTINGS
+IDS_SKIP
+IDS_SPEECH_INPUT_ABORTED
+IDS_SPEECH_INPUT_MIC_ERROR
+IDS_SPEECH_INPUT_MIC_IN_USE
+IDS_SPEECH_INPUT_NET_ERROR
+IDS_SPEECH_INPUT_NO_MIC
+IDS_SPEECH_INPUT_NO_RESULTS
+IDS_SPEECH_INPUT_NO_SPEECH
+IDS_SSL_BLOCKING_PAGE_EXIT
+IDS_SSL_BLOCKING_PAGE_PROCEED
+IDS_SSL_BLOCKING_PAGE_TITLE
+IDS_SSL_ERROR_PAGE_BACK
+IDS_SSL_ERROR_PAGE_TITLE
+IDS_SYNC_ACCOUNT_DETAILS_NOT_ENTERED
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER_WITH_MANAGE_LINK
+IDS_SYNC_AUTHENTICATING_LABEL
+IDS_SYNC_CANNOT_ACCESS_ACCOUNT
+IDS_SYNC_CLEAR_USER_DATA
+IDS_SYNC_CONFIGURE_ENCRYPTION
+IDS_SYNC_CREATE_ACCOUNT
+IDS_SYNC_DATATYPE_BOOKMARKS
+IDS_SYNC_DATATYPE_PASSWORDS
+IDS_SYNC_DATATYPE_PREFERENCES
+IDS_SYNC_DATATYPE_TYPED_URLS
+IDS_SYNC_ENABLE_SYNC_ON_ACCOUNT
+IDS_SYNC_ENCRYPTION_DATA_TYPES_TITLE
+IDS_SYNC_ENCRYPT_ALL_DATA
+IDS_SYNC_ENCRYPT_SENSITIVE_DATA
+IDS_SYNC_ENTER_GOOGLE_PASSPHRASE_BODY
+IDS_SYNC_ENTER_OTHER_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_TITLE
+IDS_SYNC_ERROR_BUBBLE_VIEW_TITLE
+IDS_SYNC_ERROR_SIGNING_IN
+IDS_SYNC_GAIA_CAPTCHA_CASE_INSENSITIVE_TIP
+IDS_SYNC_GET_ACCESS_CODE_URL
+IDS_SYNC_INVALID_ACCESS_CODE_LABEL
+IDS_SYNC_INVALID_USER_CREDENTIALS
+IDS_SYNC_LOGIN_COULD_NOT_CONNECT
+IDS_SYNC_LOGIN_INFO_OUT_OF_DATE
+IDS_SYNC_LOGIN_SETTING_UP
+IDS_SYNC_MENU_SYNCED_LABEL
+IDS_SYNC_MENU_SYNC_ERROR_LABEL
+IDS_SYNC_NTP_PASSWORD_ENABLE
+IDS_SYNC_NTP_PASSWORD_PROMO
+IDS_SYNC_NTP_PASSWORD_PROMO,
+IDS_SYNC_NTP_SETUP_IN_PROGRESS
+IDS_SYNC_OPTIONS_GROUP_NAME
+IDS_SYNC_OTHER_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_PASSPHRASE_LABEL
+IDS_SYNC_PASSPHRASE_MISMATCH_ERROR
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT
+IDS_SYNC_PASSPHRASE_SECTION_TITLE
+IDS_SYNC_PASSWORD_SYNC_ATTENTION
+IDS_SYNC_PROMO_TAB_TITLE
+IDS_SYNC_RELOGIN_LINK_LABEL
+IDS_SYNC_SERVER_IS_UNREACHABLE
+IDS_SYNC_SERVICE_UNAVAILABLE
+IDS_SYNC_SETUP_ERROR
+IDS_SYNC_SIGNIN
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_SIGN_IN_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_START_SYNC_BUTTON_LABEL
+IDS_SYNC_STATUS_UNRECOVERABLE_ERROR
+IDS_SYNC_STOP_AND_RESTART_SYNC
+IDS_SYNC_TIME_JUST_NOW
+IDS_SYNC_TIME_NEVER
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_UNRECOVERABLE_ERROR_HELP_URL
+IDS_SYNC_UPGRADE_CLIENT
+IDS_SYNC_ZERO_DATA_TYPES_ERROR
+IDS_TIME_DAYS_DEFAULT
+IDS_TIME_DAYS_FEW
+IDS_TIME_DAYS_MANY
+IDS_TIME_DAYS_TWO
+IDS_TIME_DAYS_ZERO
+IDS_TIME_DAY_SINGULAR
+IDS_TIME_ELAPSED_DAYS_DEFAULT
+IDS_TIME_ELAPSED_DAYS_FEW
+IDS_TIME_ELAPSED_DAYS_MANY
+IDS_TIME_ELAPSED_DAYS_TWO
+IDS_TIME_ELAPSED_DAYS_ZERO
+IDS_TIME_ELAPSED_DAY_SINGULAR
+IDS_TIME_ELAPSED_HOURS_DEFAULT
+IDS_TIME_ELAPSED_HOURS_FEW
+IDS_TIME_ELAPSED_HOURS_MANY
+IDS_TIME_ELAPSED_HOURS_TWO
+IDS_TIME_ELAPSED_HOURS_ZERO
+IDS_TIME_ELAPSED_HOUR_SINGULAR
+IDS_TIME_ELAPSED_MINS_DEFAULT
+IDS_TIME_ELAPSED_MINS_FEW
+IDS_TIME_ELAPSED_MINS_MANY
+IDS_TIME_ELAPSED_MINS_TWO
+IDS_TIME_ELAPSED_MINS_ZERO
+IDS_TIME_ELAPSED_MIN_SINGULAR
+IDS_TIME_ELAPSED_SECS_DEFAULT
+IDS_TIME_ELAPSED_SECS_FEW
+IDS_TIME_ELAPSED_SECS_MANY
+IDS_TIME_ELAPSED_SECS_TWO
+IDS_TIME_ELAPSED_SECS_ZERO
+IDS_TIME_ELAPSED_SEC_SINGULAR
+IDS_TIME_HOURS_DEFAULT
+IDS_TIME_HOURS_FEW
+IDS_TIME_HOURS_MANY
+IDS_TIME_HOURS_TWO
+IDS_TIME_HOURS_ZERO
+IDS_TIME_HOUR_SINGULAR
+IDS_TIME_MINS_DEFAULT
+IDS_TIME_MINS_FEW
+IDS_TIME_MINS_MANY
+IDS_TIME_MINS_TWO
+IDS_TIME_MINS_ZERO
+IDS_TIME_MIN_SINGULAR
+IDS_TIME_REMAINING_DAYS_DEFAULT
+IDS_TIME_REMAINING_DAYS_FEW
+IDS_TIME_REMAINING_DAYS_MANY
+IDS_TIME_REMAINING_DAYS_TWO
+IDS_TIME_REMAINING_DAYS_ZERO
+IDS_TIME_REMAINING_DAY_SINGULAR
+IDS_TIME_REMAINING_HOURS_DEFAULT
+IDS_TIME_REMAINING_HOURS_FEW
+IDS_TIME_REMAINING_HOURS_MANY
+IDS_TIME_REMAINING_HOURS_TWO
+IDS_TIME_REMAINING_HOURS_ZERO
+IDS_TIME_REMAINING_HOUR_SINGULAR
+IDS_TIME_REMAINING_LONG_MINS_DEFAULT
+IDS_TIME_REMAINING_LONG_MINS_FEW
+IDS_TIME_REMAINING_LONG_MINS_MANY
+IDS_TIME_REMAINING_LONG_MINS_TWO
+IDS_TIME_REMAINING_LONG_MINS_ZERO
+IDS_TIME_REMAINING_LONG_MIN_SINGULAR
+IDS_TIME_REMAINING_MINS_DEFAULT
+IDS_TIME_REMAINING_MINS_FEW
+IDS_TIME_REMAINING_MINS_MANY
+IDS_TIME_REMAINING_MINS_TWO
+IDS_TIME_REMAINING_MINS_ZERO
+IDS_TIME_REMAINING_MIN_SINGULAR
+IDS_TIME_REMAINING_SECS_DEFAULT
+IDS_TIME_REMAINING_SECS_FEW
+IDS_TIME_REMAINING_SECS_MANY
+IDS_TIME_REMAINING_SECS_TWO
+IDS_TIME_REMAINING_SECS_ZERO
+IDS_TIME_REMAINING_SEC_SINGULAR
+IDS_TIME_SECS_DEFAULT
+IDS_TIME_SECS_FEW
+IDS_TIME_SECS_MANY
+IDS_TIME_SECS_TWO
+IDS_TIME_SECS_ZERO
+IDS_TIME_SEC_SINGULAR
+IDS_UNKNOWN_SEARCH_ENGINE_NAME
+IDS_UPGRADE_AVAILABLE
+IDS_UPGRADE_AVAILABLE_BUTTON
+IDS_WEB_FONT_FAMILY
+IDS_WEB_FONT_SIZE
diff --git a/src/build/ios/mac_build.gypi b/src/build/ios/mac_build.gypi
new file mode 100644
index 0000000..775ff26
--- /dev/null
+++ b/src/build/ios/mac_build.gypi
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Xcode throws an error if an iOS target depends on a Mac OS X target. So
+# any place a utility program needs to be build and run, an action is
+# used to run ninja as script to work around this.
+# Example:
+# {
+#   'target_name': 'foo',
+#   'type': 'none',
+#   'variables': {
+#     # The name of a directory used for ninja. This cannot be shared with
+#     # another mac build.
+#     'ninja_output_dir': 'ninja-foo',
+#     # The full path to the location in which the ninja executable should be
+#     # placed. This cannot be shared with another mac build.
+#    'ninja_product_dir':
+#      '<(DEPTH)/xcodebuild/<(ninja_output_dir)/<(CONFIGURATION_NAME)',
+#     # The list of all the gyp files that contain the targets to run.
+#     're_run_targets': [
+#       'foo.gyp',
+#     ],
+#   },
+#   'includes': ['path_to/mac_build.gypi'],
+#   'actions': [
+#     {
+#       'action_name': 'compile foo',
+#       'inputs': [],
+#       'outputs': [],
+#       'action': [
+#         '<@(ninja_cmd)',
+#         # All the targets to build.
+#         'foo1',
+#         'foo2',
+#       ],
+#     },
+#   ],
+# }
+{
+  'variables': {
+    # Common ninja command line flags.
+    'ninja_cmd': [
+      # Bounce through clean_env to clean up the environment so things
+      # set by the iOS build don't pollute the Mac build.
+      '<(DEPTH)/build/ios/clean_env.py',
+      # ninja must be found in the PATH.
+      'ADD_TO_PATH=<!(echo $PATH)',
+      'ninja',
+      '-C',
+      '<(ninja_product_dir)',
+    ],
+
+    # Common syntax to rerun gyp to generate the Mac projects.
+    're_run_gyp': [
+      'build/gyp_chromium',
+      # Don't use anything set for the iOS side of things.
+      '--ignore-environment',
+      # Generate for ninja
+      '--format=ninja',
+      # Generate files into xcodebuild/ninja
+      '-Goutput_dir=xcodebuild/<(ninja_output_dir)',
+      # nacl isn't in the iOS checkout, make sure it's turned off
+      '-Ddisable_nacl=1',
+      # Add a variable to handle specific cases for mac_build.
+      '-Dios_mac_build=1',
+      # Pass through the Mac SDK version.
+      '-Dmac_sdk=<(mac_sdk)',
+    ],
+
+    # Rerun gyp for each of the projects needed. This is what actually
+    # generates the projects on disk.
+    're_run_gyp_execution':
+      '<!(cd <(DEPTH) && <@(re_run_gyp) <@(re_run_targets))',
+  },
+  # Since these are used to generate things needed by other targets, make
+  # them hard dependencies so they are always built first.
+  'hard_dependency': 1,
+}
diff --git a/src/build/isolate.gypi b/src/build/isolate.gypi
new file mode 100644
index 0000000..8f5d71a
--- /dev/null
+++ b/src/build/isolate.gypi
@@ -0,0 +1,72 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to "build" .isolate files into a .isolated file.
+#
+# To use this, create a gyp target with the following form:
+# 'conditions': [
+#   ['test_isolation_mode != "noop"', {
+#     'targets': [
+#       {
+#         'target_name': 'foo_test_run',
+#         'type': 'none',
+#         'dependencies': [
+#           'foo_test',
+#         ],
+#         'includes': [
+#           '../build/isolate.gypi',
+#           'foo_test.isolate',
+#         ],
+#         'sources': [
+#           'foo_test.isolate',
+#         ],
+#       },
+#     ],
+#   }],
+# ],
+#
+# Note: foo_test.isolate is included and a source file. It is an inherent
+# property of the .isolate format. This permits to define GYP variables but is
+# a stricter format than GYP so isolate.py can read it.
+#
+# The generated .isolated file will be:
+#   <(PRODUCT_DIR)/foo_test.isolated
+
+{
+  'rules': [
+    {
+      'rule_name': 'isolate',
+      'extension': 'isolate',
+      'inputs': [
+        '<(RULE_INPUT_PATH)',
+        # Disable file tracking by the build driver for now. This means the
+        # project must have the proper build-time dependency for their runtime
+        # dependency. This improves the runtime of the build driver since it
+        # doesn't have to stat() all these files.
+        #
+        # More importantly, it means that even if a isolate_dependency_tracked
+        # file is missing, for example if a file was deleted and the .isolate
+        # file was not updated, that won't break the build, especially in the
+        # case where foo_tests_run is not built! This should be reenabled once
+        # the switch-over to running tests on Swarm is completed.
+        #'<@(isolate_dependency_tracked)',
+      ],
+      'outputs': [
+        '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+      ],
+      'action': [
+        'python',
+        '<(DEPTH)/tools/swarm_client/isolate.py',
+        '<(test_isolation_mode)',
+        '--outdir', '<(test_isolation_outdir)',
+        '--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
+        '--variable', 'OS', '<(OS)',
+        '--result', '<@(_outputs)',
+        '--isolate', '<(RULE_INPUT_PATH)',
+      ],
+      'msvs_cygwin_shell': 0,
+    },
+  ],
+}
diff --git a/src/build/jar_file_jni_generator.gypi b/src/build/jar_file_jni_generator.gypi
new file mode 100644
index 0000000..3c14cf8
--- /dev/null
+++ b/src/build/jar_file_jni_generator.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for system Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'android_jar_jni_headers',
+#   'type': 'none',
+#   'variables': {
+#     'jni_gen_dir': 'chrome',
+#     'input_java_class': 'java/io/InputStream.class',
+#     'input_jar_file': '<(android_sdk)/android.jar',
+#   },
+#   'includes': [ '../build/jar_file_jni_generator.gypi' ],
+# },
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+  },
+  'actions': [
+    {
+      'action_name': 'generate_jni_headers_from_jar_file',
+      'inputs': [
+        '<(jni_generator)',
+        '<(input_jar_file)',
+      ],
+      'variables': {
+        'java_class_name': '<!(basename <(input_java_class)|sed "s/\.class//")'
+      },
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni/<(java_class_name)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '-j',
+        '<(input_jar_file)',
+        '--input_file',
+        '<(input_java_class)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni',
+      ],
+      'message': 'Generating JNI bindings from  <(input_jar_file)/<(input_java_class)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/java.gypi b/src/build/java.gypi
new file mode 100644
index 0000000..0a1df8a
--- /dev/null
+++ b/src/build/java.gypi
@@ -0,0 +1,153 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'package_name': 'my-package',
+#     'java_in_dir': 'path/to/package/root',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The generated jar-file will be:
+#   <(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar
+# Required variables:
+#  package_name - Used to name the intermediate output directory and in the
+#    names of some output files.
+#  java_in_dir - The top-level java directory. The src should be in
+#    <java_in_dir>/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  javac_includes - A list of specific files to include. This is by default
+#    empty, which leads to inclusion of all files specified. May include
+#    wildcard, and supports '**/' for recursive path wildcards, ie.:
+#    '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
+#  has_java_resources - Set to 1 if the java target contains an
+#    Android-compatible resources folder named res.  If 1, R_package and
+#    R_package_relpath must also be set.
+#  R_package - The java package in which the R class (which maps resources to
+#    integer IDs) should be generated, e.g. org.chromium.content.
+#  R_package_relpath - Same as R_package, but replace each '.' with '/'.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/build_output_dirs_android.gyp:build_output_dirs'
+  ],
+  # This all_dependent_settings is used for java targets only. This will add the
+  # chromium_<(package_name) jar to the classpath of dependent java targets.
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar'],
+    },
+  },
+  'variables': {
+    'input_jars_paths': [],
+    'additional_src_dirs': [],
+    'javac_includes': [],
+    'additional_input_paths': ['>@(additional_R_files)'],
+    'generated_src_dirs': ['>@(generated_R_dirs)'],
+    'generated_R_dirs': [],
+    'additional_R_files': [],
+    'has_java_resources%': 0,
+  },
+  'conditions': [
+    ['has_java_resources == 1', {
+      'variables': {
+        'res_dir': '<(java_in_dir)/res',
+        'crunched_res_dir': '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/res',
+        'R_dir': '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/java_R',
+        'R_file': '<(R_dir)/<(R_package_relpath)/R.java',
+        'generated_src_dirs': ['<(R_dir)'],
+        'additional_input_paths': ['<(R_file)'],
+      },
+      'all_dependent_settings': {
+        'variables': {
+          # Dependent jars include this target's R.java file via
+          # generated_R_dirs and additional_R_files.
+          'generated_R_dirs': ['<(R_dir)'],
+          'additional_R_files': ['<(R_file)'],
+
+          # Dependent APKs include this target's resources via
+          # additional_res_dirs and additional_res_packages.
+          'additional_res_dirs': ['<(crunched_res_dir)', '<(res_dir)'],
+          'additional_res_packages': ['<(R_package)'],
+        },
+      },
+      'actions': [
+        # Generate R.java and crunch image resources.
+        {
+          'action_name': 'process_resources',
+          'message': 'processing resources for <(package_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/process_resources.py',
+            '<!@(find <(res_dir) -type f)',
+          ],
+          'outputs': [
+            '<(R_file)',
+          ],
+          'action': [
+            '<(DEPTH)/build/android/process_resources.py',
+            '--android-sdk', '<(android_sdk)',
+            '--android-sdk-tools', '<(android_sdk_tools)',
+            '--R-package', '<(R_package)',
+            '--R-dir', '<(R_dir)',
+            '--res-dir', '<(res_dir)',
+            '--crunched-res-dir', '<(crunched_res_dir)',
+          ],
+        },
+      ],
+    }],
+  ],
+  'actions': [
+    {
+      'action_name': 'ant_<(package_name)',
+      'message': 'Building <(package_name) java sources.',
+      'inputs': [
+        'android/ant/common.xml',
+        'android/ant/chromium-jars.xml',
+        '>!@(find >(java_in_dir) >(additional_src_dirs) -name "*.java")',
+        '>@(input_jars_paths)',
+        '>@(additional_input_paths)',
+      ],
+      'outputs': [
+        '<(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar',
+      ],
+      'action': [
+        'ant',
+        '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '-DANDROID_SDK=<(android_sdk)',
+        '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+        '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+        '-DANDROID_SDK_VERSION=<(android_sdk_version)',
+        '-DANDROID_GDBSERVER=<(android_gdbserver)',
+        '-DPRODUCT_DIR=<(ant_build_out)',
+
+        '-DADDITIONAL_SRC_DIRS=>(additional_src_dirs)',
+        '-DGENERATED_SRC_DIRS=>(generated_src_dirs)',
+        '-DINPUT_JARS_PATHS=>(input_jars_paths)',
+        '-DPACKAGE_NAME=<(package_name)',
+        '-DJAVAC_INCLUDES=>(javac_includes)',
+
+        '-Dbasedir=<(java_in_dir)',
+        '-buildfile',
+        '<(DEPTH)/build/android/ant/chromium-jars.xml'
+      ]
+    },
+  ],
+}
diff --git a/src/build/java_aidl.gypi b/src/build/java_aidl.gypi
new file mode 100644
index 0000000..bb8f2b8
--- /dev/null
+++ b/src/build/java_aidl.gypi
@@ -0,0 +1,78 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java aidl files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'aidl_aidl-file-name',
+#   'type': 'none',
+#   'variables': {
+#     'package_name': <name-of-package>
+#     'aidl_interface_file': '<interface-path>/<interface-file>.aidl',
+#     'aidl_import_include': '<(DEPTH)/<path-to-src-dir>',
+#   },
+#   'sources': {
+#     '<input-path1>/<input-file1>.aidl',
+#     '<input-path2>/<input-file2>.aidl',
+#     ...
+#   },
+#   'includes': ['<path-to-this-file>/java_aidl.gypi'],
+# }
+#
+#
+# The generated java files will be:
+#   <(PRODUCT_DIR)/lib.java/<input-file1>.java
+#   <(PRODUCT_DIR)/lib.java/<input-file2>.java
+#   ...
+#
+# Optional variables:
+#  aidl_import_include - This should be an absolute path to your java src folder
+#    that contains the classes that are imported by your aidl files.
+#
+# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed.
+
+{
+  'direct_dependent_settings': {
+    'variables': {
+      'generated_src_dirs': ['<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/'],
+    },
+  },
+  'variables': {
+    'aidl_import_include%': '',
+    'additional_aidl_arguments': [],
+    'additional_aidl_input_paths': [],
+  },
+  'conditions': [
+    ['"<(aidl_import_include)"!=""', {
+      'variables': {
+        'additional_aidl_arguments': [ '-I<(aidl_import_include)' ],
+        'additional_aidl_input_paths': [ '<!@(find <(aidl_import_include) -name "*.java")', ]
+      }
+    }],
+  ],
+  'rules': [
+    {
+      'rule_name': 'compile_aidl',
+      'extension': 'aidl',
+      'inputs': [
+        '<(android_sdk)/framework.aidl',
+        '<(aidl_interface_file)',
+        '<@(additional_aidl_input_paths)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/<(RULE_INPUT_ROOT).java',
+      ],
+      'action': [
+        '<(android_sdk_tools)/aidl',
+        '-p<(android_sdk)/framework.aidl',
+        '-p<(aidl_interface_file)',
+        '<@(additional_aidl_arguments)',
+        '<(RULE_INPUT_PATH)',
+        '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/<(RULE_INPUT_ROOT).java',
+      ],
+    },
+  ],
+}
diff --git a/src/build/java_apk.gypi b/src/build/java_apk.gypi
new file mode 100644
index 0000000..6cacee1
--- /dev/null
+++ b/src/build/java_apk.gypi
@@ -0,0 +1,220 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Android APKs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_package_apk',
+#   'type': 'none',
+#   'variables': {
+#     'package_name': 'my_package',
+#     'apk_name': 'MyPackage',
+#     'java_in_dir': 'path/to/package/root',
+#     'resource_dir': 'res',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Note that this assumes that there's an ant buildfile <package_name>_apk.xml in
+# java_in_dir. So, if you have package_name="content_shell" and
+# java_in_dir="content/shell/android/java" you should have a directory structure
+# like:
+#
+# content/shell/android/java/content_shell_apk.xml
+# content/shell/android/java/src/org/chromium/base/Foo.java
+# content/shell/android/java/src/org/chromium/base/Bar.java
+#
+# Required variables:
+#  package_name - Used to name the intermediate output directory and in the
+#    names of some output files.
+#  apk_name - The final apk will be named <apk_name>.apk
+#  java_in_dir - The top-level java directory. The src should be in
+#    <java_in_dir>/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_res_dirs - Additional directories containing Android resources.
+#  additional_res_packages - Package names of the R.java files corresponding to
+#    each directory in additional_res_dirs.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  asset_location - The directory where assets are located (default:
+#    <PRODUCT_DIR>/<package_name>/assets).
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  is_test_apk - Set to 1 if building a test apk.  This prevents resources from
+#    dependencies from being re-included.
+#  native_libs_paths - The path to any native library to be included in this
+#    target. This should be a path in <(SHARED_LIB_DIR). A stripped copy of
+#    the library will be included in the apk and symbolic links to the
+#    unstripped copy will be added to <(android_product_out) to enable native
+#    debugging.
+#  resource_dir - The directory for resources.
+#  R_package - A custom Java package to generate the resource file R.java in.
+#    By default, the package given in AndroidManifest.xml will be used.
+
+{
+  'variables': {
+    'asset_location%': '',
+    'additional_input_paths': [],
+    'input_jars_paths': [],
+    'additional_src_dirs': [],
+    'generated_src_dirs': [],
+    'app_manifest_version_name%': '<(android_app_version_name)',
+    'app_manifest_version_code%': '<(android_app_version_code)',
+    'proguard_enabled%': 'false',
+    'proguard_flags%': '',
+    'native_libs_paths': [],
+    'jar_name%': 'chromium_apk_<(package_name).jar',
+    'resource_dir%':'',
+    'R_package%':'',
+    'additional_res_dirs': [],
+    'additional_res_packages': [],
+    'is_test_apk%': 0,
+    'conditions' : [
+      ['OS == "lb_shell" and target_arch == "android"', {
+        'chromium_src_dir' : '<(ant_build_out)/../../../external/chromium',
+      },{
+        'chromium_src_dir' : '<(ant_build_out)/../..',
+      }],
+    ],
+  },
+  'sources': [
+      '<@(native_libs_paths)'
+  ],
+  # Pass the jar path to the apk's "fake" jar target.  This would be better as
+  # direct_dependent_settings, but a variable set by a direct_dependent_settings
+  # cannot be lifted in a dependent to all_dependent_settings.
+  'all_dependent_settings': {
+    'variables': {
+      'apk_output_jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
+    },
+  },
+  'rules': [
+    {
+      'rule_name': 'copy_and_strip_native_libraries',
+      'extension': 'so',
+      'variables': {
+        'stripped_library_path': '<(PRODUCT_DIR)/<(package_name)/libs/<(android_app_abi)/<(RULE_INPUT_ROOT).so',
+      },
+      'outputs': [
+        '<(stripped_library_path)',
+      ],
+      # There is no way to do 2 actions for each source library in gyp. So to
+      # both strip the library and create the link in <(link_dir) a separate
+      # script is required.
+      'action': [
+        '<(DEPTH)/build/android/prepare_library_for_apk',
+        '<(android_strip)',
+        '<(RULE_INPUT_PATH)',
+        '<(stripped_library_path)',
+      ],
+    },
+  ],
+  'actions': [
+    {
+      # This action updates the timestamp of AndroidManifest.xml
+      # The aapt package step doesn't know to repackage APK resources
+      # if only the version code changes,
+      # while a change to a resource file will cause a repackage.
+      # Note that ninja only runs this action
+      # when <(android_app_version_code) changes.
+      'action_name': 'touch_manifest_for_apk_repackage',
+      'message': 'Touching AndroidManifest.xml',
+      'inputs': [],
+      'outputs': ['<(java_in_dir)/AndroidManifest.xml'],
+      'action': [
+        # -c to 'touch' means don't create any files.
+        # <(android_app_version_code) is not a file, this is just a way to
+        # have ninja consider it a dependency.
+        'touch', '-c', '<(java_in_dir)/AndroidManifest.xml', '<(android_app_version_code)'
+      ],
+    },
+    {
+      'action_name': 'ant_<(package_name)_apk',
+      'message': 'Building <(package_name) apk.',
+      'inputs': [
+        '<(java_in_dir)/AndroidManifest.xml',
+        '<(DEPTH)/build/android/ant/chromium-apk.xml',
+        '<(DEPTH)/build/android/ant/common.xml',
+        '<(DEPTH)/build/android/ant/sdk-targets.xml',
+        # If there is a separate find for additional_src_dirs, it will find the
+        # wrong .java files when additional_src_dirs is empty.
+        '>!@(find >(java_in_dir) >(additional_src_dirs) -name "*.java")',
+        '>@(input_jars_paths)',
+        '>@(native_libs_paths)',
+        '>@(additional_input_paths)',
+      ],
+      'conditions': [
+        ['resource_dir!=""', {
+          'inputs': ['<!@(find <(java_in_dir)/<(resource_dir) -name "*")']
+        }],
+        ['is_test_apk == 1', {
+          'variables': {
+            'additional_res_dirs=': [],
+            'additional_res_packages=': [],
+          }
+        }],
+        ['proguard_enabled == "true" and proguard_flags != ""', {
+          'inputs': ['<(java_in_dir)/<(proguard_flags)']
+        }]
+      ],
+      'outputs': [
+        '<(PRODUCT_DIR)/apks/<(apk_name).apk',
+      ],
+      'action': [
+        'ant',
+        '-DAPP_ABI=<(android_app_abi)',
+        '-DANDROID_GDBSERVER=<(android_gdbserver)',
+        '-DANDROID_SDK=<(android_sdk)',
+        '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+        '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+        '-DANDROID_SDK_VERSION=<(android_sdk_version)',
+        '-DANDROID_TOOLCHAIN=<(android_toolchain)',
+        '-DCHROMIUM_SRC=<(chromium_src_dir)',
+        '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '-DPRODUCT_DIR=<(ant_build_out)',
+
+        '-DAPK_NAME=<(apk_name)',
+        '-DASSET_DIR=<(asset_location)',
+        '-DADDITIONAL_SRC_DIRS=>(additional_src_dirs)',
+        '-DGENERATED_SRC_DIRS=>(generated_src_dirs)',
+        '-DINPUT_JARS_PATHS=>(input_jars_paths)',
+        '-DJAR_NAME=<(jar_name)',
+        '-DPACKAGE_NAME=<(package_name)',
+        '-DRESOURCE_DIR=<(resource_dir)',
+        '-DADDITIONAL_RES_DIRS=>(additional_res_dirs)',
+        '-DADDITIONAL_RES_PACKAGES=>(additional_res_packages)',
+        '-DAPP_MANIFEST_VERSION_NAME=<(app_manifest_version_name)',
+        '-DAPP_MANIFEST_VERSION_CODE=<(app_manifest_version_code)',
+        '-DPROGUARD_FLAGS=>(proguard_flags)',
+        '-DPROGUARD_ENABLED=>(proguard_enabled)',
+
+        '-Dbasedir=<(java_in_dir)',
+        '-buildfile',
+        '<(DEPTH)/build/android/ant/chromium-apk.xml',
+
+        # Specify CONFIGURATION_NAME as the target for ant to build. The
+        # buildfile will then build the appropriate SDK tools target.
+        '<(CONFIGURATION_NAME)',
+      ]
+    },
+  ],
+  'conditions': [
+    ['R_package != ""', {
+      'variables': {
+        # We generate R.java in package R_package (in addition to the package
+        # listed in the AndroidManifest.xml, which is unavoidable).
+        'additional_res_dirs': ['<(DEPTH)/build/android/ant/empty/res'],
+        'additional_res_packages': ['<(R_package)'],
+      },
+    }],
+  ],
+}
diff --git a/src/build/jni_generator.gypi b/src/build/jni_generator.gypi
new file mode 100644
index 0000000..837d9ab
--- /dev/null
+++ b/src/build/jni_generator.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'base_jni_headers',
+#    'type': 'none',
+#    'sources': [
+#      'android/java/src/org/chromium/base/BuildInfo.java',
+#      ...
+#      ...
+#      'android/java/src/org/chromium/base/SystemMessageHandler.java',
+#    ],
+#    'variables': {
+#      'jni_gen_dir': 'base',
+#    },
+#    'includes': [ '../build/jni_generator.gypi' ],
+#  },
+#
+# The generated file name pattern can be seen on the "outputs" section below.
+# (note that RULE_INPUT_ROOT is the basename for the java file).
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+  },
+  'rules': [
+    {
+      'rule_name': 'generate_jni_headers',
+      'extension': 'java',
+      'inputs': [
+        '<(jni_generator)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni/<(RULE_INPUT_ROOT)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '--input_file',
+        '<(RULE_INPUT_PATH)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni',
+      ],
+      'message': 'Generating JNI bindings from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/json_schema_bundle_compile.gypi b/src/build/json_schema_bundle_compile.gypi
new file mode 100644
index 0000000..ecefe41
--- /dev/null
+++ b/src/build/json_schema_bundle_compile.gypi
@@ -0,0 +1,62 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   idl_schema_files: an array of idl files that comprise the api model.
+    #   cc_dir: path to generated files
+    #   root_namespace: the C++ namespace that all generated files go under
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+  },
+  'actions': [
+    {
+      'action_name': 'genapi_bundle',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/idl_schema.py',
+        '<(api_gen_dir)/json_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/schema_bundle_generator.py',
+        '<(api_gen_dir)/util_cc_helper.py',
+        '<@(idl_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_api.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--bundle',
+        '<@(idl_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code',
+      'process_outputs_as_sources': 1,
+    }
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/json_schema_compile.gypi b/src/build/json_schema_compile.gypi
new file mode 100644
index 0000000..6c8f69c
--- /dev/null
+++ b/src/build/json_schema_compile.gypi
@@ -0,0 +1,110 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   json_schema_files: a list of json files that comprise the api model.
+    #   idl_schema_files: a list of IDL files that comprise the api model.
+    #   cc_dir: path to generated files
+    #   root_namespace: the C++ namespace that all generated files go under
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+  },
+  'rules': [
+    {
+      'rule_name': 'genapi',
+      'extension': 'json',
+      'inputs': [
+        '<(api_gen_dir)/any.cc',
+        '<(api_gen_dir)/any.h',
+        '<(api_gen_dir)/any_helper.py',
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/json_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(json_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
+      'process_outputs_as_sources': 1,
+    },
+    {
+      'rule_name': 'genapi_idl',
+      'msvs_external_rule': 1,
+      'extension': 'idl',
+      'inputs': [
+        '<(api_gen_dir)/any.cc',
+        '<(api_gen_dir)/any.h',
+        '<(api_gen_dir)/any_helper.py',
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/idl_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(idl_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'dependencies':[
+    '<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/json_to_struct.gypi b/src/build/json_to_struct.gypi
new file mode 100644
index 0000000..130f6d1
--- /dev/null
+++ b/src/build/json_to_struct.gypi
@@ -0,0 +1,49 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_file: a json file that comprise the structure model.
+    #   namespace: the C++ namespace that all generated files go under
+    #   cc_dir: path to generated files
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'struct_gen_dir': '<(DEPTH)/tools/json_to_struct',
+    'struct_gen': '<(struct_gen_dir)/json_to_struct.py',
+  },
+  'rules': [
+    {
+      'rule_name': 'genstaticinit',
+      'extension': 'json',
+      'inputs': [
+        '<(struct_gen_dir)/element_generator.py',
+        '<(struct_gen_dir)/json_to_struct.py',
+        '<(struct_gen_dir)/struct_generator.py',
+        '<(schema_file)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(struct_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--destbase=<(SHARED_INTERMEDIATE_DIR)',
+        '--destdir=<(cc_dir)',
+        '--namespace=<(namespace)',
+        '--schema=<(schema_file)',
+      ],
+      'message': 'Generating C++ static initializers from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/landmines.py b/src/build/landmines.py
new file mode 100755
index 0000000..4cd5722
--- /dev/null
+++ b/src/build/landmines.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file holds a list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+
+This script runs every build as a hook. If it detects that the build should
+be clobbered, it will touch the file <build_dir>/.landmine_triggered. The
+various build scripts will then check for the presence of this file and clobber
+accordingly. The script will also emit the reasons for the clobber to stdout.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import functools
+import gyp_helper
+import logging
+import optparse
+import os
+import shlex
+import sys
+import time
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+def memoize(default=None):
+  """This decorator caches the return value of a parameterless pure function"""
+  def memoizer(func):
+    val = []
+    @functools.wraps(func)
+    def inner():
+      if not val:
+        ret = func()
+        val.append(ret if ret is not None else default)
+        if logging.getLogger().isEnabledFor(logging.INFO):
+          print '%s -> %r' % (func.__name__, val[0])
+      return val[0]
+    return inner
+  return memoizer
+
+
+@memoize()
+def IsWindows():
+  return sys.platform.startswith('win') or sys.platform == 'cygwin'
+
+
+@memoize()
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+@memoize()
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+@memoize()
+def gyp_defines():
+  """Parses and returns GYP_DEFINES env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+
+@memoize()
+def distributor():
+  """
+  Returns a string which is the distributed build engine in use (if any).
+  Possible values: 'goma', 'ib', ''
+  """
+  if 'goma' in gyp_defines():
+    return 'goma'
+  elif IsWindows():
+    if 'CHROME_HEADLESS' in os.environ:
+      return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+  """
+  Returns a string representing the platform this build is targetted for.
+  Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+  """
+  if 'OS' in gyp_defines():
+    if 'android' in gyp_defines()['OS']:
+      return 'android'
+    else:
+      return gyp_defines()['OS']
+  elif IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  else:
+    return 'mac'
+
+
+@memoize()
+def builder():
+  """
+  Returns a string representing the build engine (not compiler) to use.
+  Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+  """
+  if 'GYP_GENERATORS' in os.environ:
+    # for simplicity, only support the first explicit generator
+    generator = os.environ['GYP_GENERATORS'].split(',')[0]
+    if generator.endswith('-android'):
+      return generator.split('-')[0]
+    else:
+      return generator
+  else:
+    if platform() == 'android':
+      # Good enough for now? Do any android bots use make?
+      return 'ninja'
+    elif platform() == 'ios':
+      return 'xcode'
+    elif IsWindows():
+      return 'msvs'
+    elif IsLinux():
+      return 'make'
+    elif IsMac():
+      return 'xcode'
+    else:
+      assert False, 'Don\'t know what builder we\'re using!'
+
+
+def get_landmines(target):
+  """
+  ALL LANDMINES ARE DEFINED HERE.
+  target is 'Release' or 'Debug'
+  """
+  landmines = []
+  add = lambda item: landmines.append(item + '\n')
+
+  if (distributor() == 'goma' and platform() == 'win32' and
+      builder() == 'ninja'):
+    add('Need to clobber winja goma due to backend cwd cache fix.')
+  if platform() == 'android':
+    add('Clean android out directories to reduce zip size.')
+
+  return landmines
+
+
+def get_target_build_dir(build_tool, target, is_iphone=False):
+  """
+  Returns output directory absolute path dependent on build and targets.
+  Examples:
+    r'c:\b\build\slave\win\build\src\out\Release'
+    '/mnt/data/b/build/slave/linux/build/src/out/Debug'
+    '/b/build/slave/ios_rel_device/build/src/xcodebuild/Release-iphoneos'
+
+  Keep this function in sync with tools/build/scripts/slave/compile.py
+  """
+  ret = None
+  if build_tool == 'xcode':
+    ret = os.path.join(SRC_DIR, 'xcodebuild',
+        target + ('-iphoneos' if is_iphone else ''))
+  elif build_tool == 'make':
+    ret = os.path.join(SRC_DIR, 'out', target)
+  elif build_tool == 'ninja':
+    ret = os.path.join(SRC_DIR, 'out', target)
+  elif build_tool in ['msvs', 'vs', 'ib']:
+    ret = os.path.join(SRC_DIR, 'build', target)
+  elif build_tool == 'scons':
+    ret = os.path.join(SRC_DIR, 'sconsbuild', target)
+  else:
+    raise NotImplementedError()
+  return os.path.abspath(ret)
+
+
+def set_up_landmines(target):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_target_build_dir(builder(), target, platform() == 'ios')
+
+  landmines_path = os.path.join(out_dir, '.landmines')
+  if not os.path.exists(out_dir):
+    os.makedirs(out_dir)
+
+  new_landmines = get_landmines(target)
+
+  if not os.path.exists(landmines_path):
+    with open(landmines_path, 'w') as f:
+      f.writelines(new_landmines)
+  else:
+    triggered = os.path.join(out_dir, '.landmines_triggered')
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+
+      with open(triggered, 'w') as f:
+        f.writelines(diff)
+    elif os.path.exists(triggered):
+      # Remove false triggered landmines.
+      os.remove(triggered)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  gyp_helper.apply_chromium_gyp_env()
+
+  for target in ('Debug', 'Release'):
+    set_up_landmines(target)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/linux/chrome_linux.croc b/src/build/linux/chrome_linux.croc
new file mode 100644
index 0000000..f400306
--- /dev/null
+++ b/src/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include non-Linux platform dirs
+    {
+      'regexp' : '.*/(chromeos|views)/',
+      'include' : 0,
+    },
+    # Don't include chromeos, windows, or mac specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_linux\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/src/build/linux/dump_app_syms b/src/build/linux/dump_app_syms
new file mode 100755
index 0000000..632bcc7
--- /dev/null
+++ b/src/build/linux/dump_app_syms
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+set -e
+
+usage() {
+  echo -n "$0 <dump_syms_exe> <strip_binary> " >&2
+  echo "<binary_with_symbols> <symbols_output>" >&2
+}
+
+
+if [ $# -ne 4 ]; then
+  usage
+  exit 1
+fi
+
+SCRIPTDIR="$(readlink -f "$(dirname "$0")")"
+DUMPSYMS="$1"
+STRIP_BINARY="$2"
+INFILE="$3"
+OUTFILE="$4"
+
+# Dump the symbols from the given binary.
+if [ ! -e "$OUTFILE" -o "$INFILE" -nt "$OUTFILE" ]; then
+  "$DUMPSYMS" "$INFILE" > "$OUTFILE"
+fi
+
+if [ "$STRIP_BINARY" != "0" ]; then
+  strip "$INFILE"
+fi
diff --git a/src/build/linux/gsettings.h b/src/build/linux/gsettings.h
new file mode 100644
index 0000000..1ea6598
--- /dev/null
+++ b/src/build/linux/gsettings.h
@@ -0,0 +1,30 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_LINUX_GSETTINGS_H_
+#define BUILD_LINUX_GSETTINGS_H_
+
+#include <gio/gio.h>
+
+// The GSettings API was not part of GIO until GIO version 2.26,
+// while Ubuntu 10.04 Lucid ships with version 2.24.
+//
+// To allow compiling on Lucid those forward declarations are provided.
+//
+// If compiling with GIO version 2.26, these won't conflict,
+// because they're identical to the types defined.
+//
+// TODO(phajdan.jr): This will no longer be needed after switch to Precise,
+// see http://crbug.com/158577 .
+struct _GSettings;
+typedef struct _GSettings GSettings;
+GSettings* g_settings_new(const gchar* schema);
+GSettings* g_settings_get_child(GSettings* settings, const gchar* name);
+gboolean g_settings_get_boolean(GSettings* settings, const gchar* key);
+gchar* g_settings_get_string(GSettings* settings, const gchar* key);
+gint g_settings_get_int(GSettings* settings, const gchar* key);
+gchar** g_settings_get_strv(GSettings* settings, const gchar* key);
+const gchar* const* g_settings_list_schemas();
+
+#endif  // BUILD_LINUX_GSETTINGS_H_
diff --git a/src/build/linux/install-arm-sysroot.py b/src/build/linux/install-arm-sysroot.py
new file mode 100755
index 0000000..88e1fd6
--- /dev/null
+++ b/src/build/linux/install-arm-sysroot.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install arm choot image for cross building of arm chrome on linux.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script should be a no-op on non-linux
+# platforms.
+
+# The sysroot image could be constructed from scratch based on the current
+# state or precise/arm but for consistency we currently use a pre-built root
+# image which was originally designed for building trusted NaCl code. The image
+# will normally need to be rebuilt every time chrome's build dependancies are
+# changed.
+
+import os
+import shutil
+import subprocess
+import sys
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+URL_PREFIX = 'https://commondatastorage.googleapis.com/nativeclient-archive2/toolchain'
+REVISION = 8003
+
+
+def main(args):
+  if '--linux-only' in args:
+    # This argument is passed when run from the gclient hooks.
+    # In this case we return early on non-linux platforms
+    # or if GYP_DEFINES doesn't include target_arch=arm
+    if not sys.platform.startswith('linux'):
+      return 0
+
+    if "target_arch=arm" not in os.environ.get('GYP_DEFINES', ''):
+      return 0
+
+  src_root = os.path.dirname(os.path.dirname(SCRIPT_DIR))
+  sysroot = os.path.join(src_root, 'arm-sysroot')
+  url = "%s/%s/naclsdk_linux_arm-trusted.tgz" % (URL_PREFIX, REVISION)
+
+  stamp = os.path.join(sysroot, ".stamp")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        print "ARM root image already up-to-date: %s" % sysroot
+        return 0
+
+  print "Installing ARM root image: %s" % sysroot
+  if os.path.isdir(sysroot):
+    shutil.rmtree(sysroot)
+  os.mkdir(sysroot)
+  tarball = os.path.join(sysroot, 'naclsdk_linux_arm-trusted.tgz')
+  subprocess.check_call(['curl', '-L', url, '-o', tarball])
+  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+  os.remove(tarball)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/src/build/linux/pkg-config-wrapper b/src/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000..c39e5cd
--- /dev/null
+++ b/src/build/linux/pkg-config-wrapper
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+
+root="$1"
+shift
+target_arch="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+  echo "usage: $0 /path/to/sysroot target_arch [pkg-config-arguments] package" >&2
+  exit 1
+fi
+
+if [ "$target_arch" = "x64" ]
+then
+  libpath="lib64"
+else
+  libpath="lib"
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/src/build/linux/python_arch.sh b/src/build/linux/python_arch.sh
new file mode 100755
index 0000000..01e41d0
--- /dev/null
+++ b/src/build/linux/python_arch.sh
@@ -0,0 +1,42 @@
+#!/bin/sh
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This figures out the architecture of the version of Python we are building
+# pyautolib against.
+#
+#  python_arch.sh /usr/lib/libpython2.5.so.1.0
+#  python_arch.sh /path/to/sysroot/usr/lib/libpython2.4.so.1.0
+#
+
+python=$(readlink -f "$1")
+if [ ! -r "$python" ]; then
+  echo unknown
+  exit 0
+fi
+file_out=$(file "$python")
+if [ $? -ne 0 ]; then
+  echo unknown
+  exit 0
+fi
+
+echo $file_out | grep -qs "ARM"
+if [ $? -eq 0 ]; then
+  echo arm
+  exit 0
+fi
+
+echo $file_out | grep -qs "x86-64"
+if [ $? -eq 0 ]; then
+  echo x64
+  exit 0
+fi
+
+echo $file_out | grep -qs "Intel 80386"
+if [ $? -eq 0 ]; then
+  echo ia32
+  exit 0
+fi
+
+exit 1
diff --git a/src/build/linux/rewrite_dirs.py b/src/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..30f22f0
--- /dev/null
+++ b/src/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print line
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/linux/system.gyp b/src/build/linux/system.gyp
new file mode 100644
index 0000000..041afd5
--- /dev/null
+++ b/src/build/linux/system.gyp
@@ -0,0 +1,724 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'conditions': [
+      ['sysroot!=""', {
+        'pkg-config': './pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+      }, {
+        'pkg-config': 'pkg-config'
+      }]
+    ],
+
+    'linux_link_libpci%': 0,
+  },
+  'conditions': [
+    [ 'os_posix==1 and OS!="mac"', {
+      'variables': {
+        # We use our own copy of libssl3, although we still need to link against
+        # the rest of NSS.
+        'use_system_ssl%': 0,
+      },
+    }, {
+      'variables': {
+        'use_system_ssl%': 1,
+      },
+    }],
+    [ 'chromeos==0', {
+      # Hide GTK and related dependencies for Chrome OS, so they won't get
+      # added back to Chrome OS. Don't try to use GTK on Chrome OS.
+      'targets': [
+        {
+          'target_name': 'gtk',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'variables': {
+            # gtk requires gmodule, but it does not list it as a dependency
+            # in some misconfigured systems.
+            'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
+          },
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtkprint',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gdk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gdk-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+      ],  # targets
+    }],
+  ],  # conditions
+  'targets': [
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_openssl==1', {
+              'dependencies': [
+                '../../third_party/openssl/openssl.gyp:openssl',
+              ],
+            }],
+            ['use_openssl==0 and use_system_ssl==0', {
+              'dependencies': [
+                '../../net/third_party/nss/ssl.gyp:libssl',
+                '../../third_party/zlib/zlib.gyp:zlib',
+              ],
+              'direct_dependent_settings': {
+                'include_dirs+': [
+                  # We need for our local copies of the libssl3 headers to come
+                  # before other includes, as we are shadowing system headers.
+                  '<(DEPTH)/net/third_party/nss/ssl',
+                ],
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+                ],
+              },
+            }],
+            ['use_openssl==0 and use_system_ssl==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+                'defines': [
+                  'USE_SYSTEM_SSL',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss)',
+                ],
+              },
+            }],
+          ]
+        }],
+      ],
+    },
+    {
+      'target_name': 'freetype2',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags freetype2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l freetype2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'fontconfig',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags fontconfig)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l fontconfig)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gconf',
+      'type': 'none',
+      'conditions': [
+        ['use_gconf==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gconf-2.0)',
+            ],
+            'defines': [
+              'USE_GCONF',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gio',
+      'type': 'static_library',
+      'conditions': [
+        ['use_gio==1 and _toolset=="target"', {
+          'dependencies': [
+            '../../base/base.gyp:base',
+          ],
+          'cflags': [
+            '<!@(<(pkg-config) --cflags gio-2.0)',
+          ],
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gio-2.0)',
+            ],
+            'defines': [
+              'USE_GIO',
+            ],
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0 and OS=="linux"', {
+                'libraries': [
+                  '-ldl',
+                ],
+              }],
+            ],
+          },
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libgio.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libgio_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libgio_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibGioLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<gio/gio.h>',
+                         # TODO(phajdan.jr): This will no longer be needed
+                         # after switch to Precise, http://crbug.com/158577 .
+                         '--bundled-header', '"build/linux/gsettings.h"',
+                         '--link-directly=<(linux_link_gsettings)',
+                         'g_settings_new',
+                         'g_settings_get_child',
+                         'g_settings_get_string',
+                         'g_settings_get_boolean',
+                         'g_settings_get_int',
+                         'g_settings_get_strv',
+                         'g_settings_list_schemas',
+              ],
+              'message': 'Generating libgio library loader.',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'libpci',
+      'type': 'static_library',
+      'cflags': [
+        '<!@(<(pkg-config) --cflags libpci)',
+      ],
+      'dependencies': [
+        '../../base/base.gyp:base',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'conditions': [
+          ['linux_link_libpci==1', {
+            'link_settings': {
+              'ldflags': [
+                '<!@(<(pkg-config) --libs-only-L --libs-only-other libpci)',
+              ],
+              'libraries': [
+                '<!@(<(pkg-config) --libs-only-l libpci)',
+              ],
+            }
+          }],
+        ],
+      },
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libpci.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libpci_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+          },
+          'action_name': 'generate_libpci_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibPciLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<pci/pci.h>',
+                     # TODO(phajdan.jr): Report problem to pciutils project
+                     # and get it fixed so that we don't need --use-extern-c.
+                     '--use-extern-c',
+                     '--link-directly=<(linux_link_libpci)',
+                     'pci_alloc',
+                     'pci_init',
+                     'pci_cleanup',
+                     'pci_scan_bus',
+                     'pci_fill_info',
+                     'pci_lookup_name',
+          ],
+          'message': 'Generating libpci library loader.',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'x11',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags x11)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l x11 xi)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags x11)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l x11 xi)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'xext',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xext)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xext)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'xfixes',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xfixes)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xfixes)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libgcrypt',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target" and use_cups==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(libgcrypt-config --cflags)',
+            ],
+          },
+          'link_settings': {
+            'libraries': [
+              '<!@(libgcrypt-config --libs)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'selinux',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'link_settings': {
+            'libraries': [
+              '-lselinux',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gnome_keyring',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'conditions': [
+            ['linux_link_gnome_keyring!=0', {
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+                ],
+              },
+            }, {
+              'conditions': [
+                ['OS=="linux"', {
+                 'link_settings': {
+                   'libraries': [
+                     '-ldl',
+                   ],
+                 },
+                }],
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      # The unit tests use a few convenience functions from the GNOME
+      # Keyring library directly. We ignore linux_link_gnome_keyring and
+      # link directly in this version of the target to allow this.
+      # *** Do not use this target in the main binary! ***
+      'target_name': 'gnome_keyring_direct',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'dbus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-1)',
+        ],
+      },
+    },
+    {
+      'target_name': 'glib',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
+      },
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l <(glib_packages))',
+            ],
+          },
+        }],
+        ['chromeos==1', {
+          'link_settings': {
+            'libraries': [ '-lXtst' ]
+          }
+        }],
+      ],
+    },
+    {
+      'target_name': 'pangocairo',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags pangocairo)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l pangocairo)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags pangocairo)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other pangocairo)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l pangocairo)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libresolv',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lresolv',
+        ],
+      },
+    },
+    {
+      'target_name': 'ibus',
+      'type': 'none',
+      'conditions': [
+        ['use_ibus==1', {
+          'variables': {
+            'ibus_min_version': '1.3.99.20110425',
+          },
+          'direct_dependent_settings': {
+            'defines': ['HAVE_IBUS=1'],
+            'cflags': [
+              '<!@(<(pkg-config) --cflags "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'udev',
+      'type': 'none',
+      'conditions': [
+        # libudev is not available on *BSD
+        ['_toolset=="target" and os_bsd!=1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libudev)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libudev)',
+            ],
+          },
+        }],
+      ],
+    },
+  ],
+}
diff --git a/src/build/mac/change_mach_o_flags.py b/src/build/mac/change_mach_o_flags.py
new file mode 100755
index 0000000..c2aeaec
--- /dev/null
+++ b/src/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+  """A class for exceptions thrown by this module."""
+
+  pass
+
+
+def CheckedSeek(file, offset):
+  """Seeks the file-like object at |file| to offset |offset| and raises a
+  MachOError if anything funny happens."""
+
+  file.seek(offset, os.SEEK_SET)
+  new_offset = file.tell()
+  if new_offset != offset:
+    raise MachOError, \
+          'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+  """Reads |count| bytes from the file-like |file| object, raising a
+  MachOError if any other number of bytes is read."""
+
+  bytes = file.read(count)
+  if len(bytes) != count:
+    raise MachOError, \
+          'read: expected length %d, observed %d' % (count, len(bytes))
+
+  return bytes
+
+
+def ReadUInt32(file, endian):
+  """Reads an unsinged 32-bit integer from the file-like |file| object,
+  treating it as having endianness specified by |endian| (per the |struct|
+  module), and returns it as a number. Raises a MachOError if the proper
+  length of data can't be read from |file|."""
+
+  bytes = CheckedRead(file, 4)
+
+  (uint32,) = struct.unpack(endian + 'I', bytes)
+  return uint32
+
+
+def ReadMachHeader(file, endian):
+  """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+  file-like |file| object, treating it as having endianness specified by
+  |endian| (per the |struct| module), and returns a 7-tuple of its members
+  as numbers. Raises a MachOError if the proper length of data can't be read
+  from |file|."""
+
+  bytes = CheckedRead(file, 28)
+
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      struct.unpack(endian + '7I', bytes)
+  return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+  """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+  |file| object, treating it as having endianness specified by |endian|
+  (per the |struct| module), and returns a 5-tuple of its members as numbers.
+  Raises a MachOError if the proper length of data can't be read from
+  |file|."""
+
+  bytes = CheckedRead(file, 20)
+
+  cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+  return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+  """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+  object, treating it as having endianness specified by |endian| (per the
+  |struct| module)."""
+
+  bytes = struct.pack(endian + 'I', uint32)
+  assert len(bytes) == 4
+
+  file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+  """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+  and rewrites the header's |flags| field if appropriate. The header's
+  endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+  (mach_header and mach_header_64). Raises MachOError if used on a header that
+  does not have a known magic number or is not of type MH_EXECUTE. The
+  MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+  according to |options| and written to |file| if any changes need to be made.
+  If already set or clear as specified by |options|, nothing is written."""
+
+  CheckedSeek(file, offset)
+  magic = ReadUInt32(file, '<')
+  if magic == MH_MAGIC or magic == MH_MAGIC_64:
+    endian = '<'
+  elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+    endian = '>'
+  else:
+    raise MachOError, \
+          'Mach-O file at offset %d has illusion of magic' % offset
+
+  CheckedSeek(file, offset)
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      ReadMachHeader(file, endian)
+  assert magic == MH_MAGIC or magic == MH_MAGIC_64
+  if filetype != MH_EXECUTE:
+    raise MachOError, \
+          'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+              (offset, filetype)
+
+  original_flags = flags
+
+  if options.no_heap_execution:
+    flags |= MH_NO_HEAP_EXECUTION
+  else:
+    flags &= ~MH_NO_HEAP_EXECUTION
+
+  if options.pie:
+    flags |= MH_PIE
+  else:
+    flags &= ~MH_PIE
+
+  if flags != original_flags:
+    CheckedSeek(file, offset + 24)
+    WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+  """Seeks the file-like |file| object to |offset| and loops over its
+  |fat_header| entries, calling HandleMachOFile for each."""
+
+  CheckedSeek(file, fat_offset)
+  magic = ReadUInt32(file, '>')
+  assert magic == FAT_MAGIC
+
+  nfat_arch = ReadUInt32(file, '>')
+
+  for index in xrange(0, nfat_arch):
+    cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+    assert size >= 28
+
+    # HandleMachOFile will seek around. Come back here after calling it, in
+    # case it sought.
+    fat_arch_offset = file.tell()
+    HandleMachOFile(file, options, offset)
+    CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+  parser = optparse.OptionParser('%prog [options] <executable_path>')
+  parser.add_option('--executable-heap', action='store_false',
+                    dest='no_heap_execution', default=True,
+                    help='Clear the MH_NO_HEAP_EXECUTION bit')
+  parser.add_option('--no-pie', action='store_false',
+                    dest='pie', default=True,
+                    help='Clear the MH_PIE bit')
+  (options, loose_args) = parser.parse_args(args)
+  if len(loose_args) != 1:
+    parser.print_usage()
+    return 1
+
+  executable_path = loose_args[0]
+  executable_file = open(executable_path, 'rb+')
+
+  magic = ReadUInt32(executable_file, '<')
+  if magic == FAT_CIGAM:
+    # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+    HandleFatFile(executable_file, options)
+  elif magic == MH_MAGIC or magic == MH_CIGAM or \
+      magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+    HandleMachOFile(executable_file, options)
+  else:
+    raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+  executable_file.close()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/src/build/mac/change_mach_o_flags_from_xcode.sh b/src/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 0000000..1824f8d
--- /dev/null
+++ b/src/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+     "${@}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/src/build/mac/chrome_mac.croc b/src/build/mac/chrome_mac.croc
new file mode 100644
index 0000000..8cde00c
--- /dev/null
+++ b/src/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, linux, or windows specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_mac\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/src/build/mac/copy_framework_unversioned.sh b/src/build/mac/copy_framework_unversioned.sh
new file mode 100755
index 0000000..380cc90
--- /dev/null
+++ b/src/build/mac/copy_framework_unversioned.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Copies a framework to its new home, "unversioning" it.
+#
+# Normally, frameworks are versioned bundles.  The contents of a framework are
+# stored in a versioned directory within the bundle, and symbolic links
+# provide access to the actual code and resources.  See
+# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
+#
+# The symbolic links usually found in frameworks create problems.  Symbolic
+# links are excluded from code signatures.  That means that it's possible to
+# remove or retarget a symbolic link within a framework without affecting the
+# seal.  In Chrome's case, the outer .app bundle contains a framework where
+# all application code and resources live.  In order for the signature on the
+# .app to be meaningful, it encompasses the framework.  Because framework
+# resources are accessed through the framework's symbolic links, this
+# arrangement results in a case where the resources can be altered without
+# affecting the .app signature's validity.
+#
+# Indirection through symbolic links also carries a runtime performance
+# penalty on open() operations, although open() typically completes so quickly
+# that this is not considered a major performance problem.
+#
+# To resolve these problems, the frameworks that ship within Chrome's .app
+# bundle are unversioned.  Unversioning is simple: instead of using the
+# original outer .framework directory as the framework that ships within the
+# .app, the inner versioned directory is used.  Instead of accessing bundled
+# resources through symbolic links, they are accessed directly.  In normal
+# situations, the only hard-coded use of the versioned directory is by dyld,
+# when loading the framework's code, but this is handled through a normal
+# Mach-O load command, and it is easy to adjust the load command to point to
+# the unversioned framework code rather than the versioned counterpart.
+#
+# The resulting framework bundles aren't strictly conforming, but they work
+# as well as normal versioned framework bundles.
+#
+# An option to skip running install_name_tool is available. By passing -I as
+# the first argument to this script, install_name_tool will be skipped. This
+# is only suitable for copied frameworks that will not be linked against, or
+# when install_name_tool will be run on any linker output when something is
+# linked against the copied framework. This option exists to allow signed
+# frameworks to pass through without subjecting them to any modifications that
+# would break their signatures.
+
+set -e
+
+RUN_INSTALL_NAME_TOOL=1
+if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
+  shift
+  RUN_INSTALL_NAME_TOOL=
+fi
+
+if [ $# -ne 2 ] ; then
+  echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
+  exit 1
+fi
+
+# FRAMEWORK should be a path to a versioned framework bundle, ending in
+# .framework.  DESTINATION_DIR is the directory that the unversioned framework
+# bundle will be copied to.
+
+FRAMEWORK="${1}"
+DESTINATION_DIR="${2}"
+
+FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
+if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
+  exit 1
+fi
+FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
+
+# Find the current version.
+VERSIONS="${FRAMEWORK}/Versions"
+CURRENT_VERSION_LINK="${VERSIONS}/Current"
+CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
+CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
+
+# Make sure that the framework's structure makes sense as a versioned bundle.
+if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
+  exit 1
+fi
+
+DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
+
+# Copy the versioned directory within the versioned framework to its
+# destination location.
+mkdir -p "${DESTINATION_DIR}"
+rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
+    --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
+
+if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
+  # Adjust the Mach-O LC_ID_DYLIB load command in the framework.  This does not
+  # change the LC_LOAD_DYLIB load commands in anything that may have already
+  # linked against the framework.  Not all frameworks will actually need this
+  # to be changed.  Some frameworks may already be built with the proper
+  # LC_ID_DYLIB for use as an unversioned framework.  Xcode users can do this
+  # by setting LD_DYLIB_INSTALL_NAME to
+  # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
+  # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
+  # at link time.
+  FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
+                         grep -A10 "^ *cmd LC_ID_DYLIB$" |
+                         grep -m1 "^ *name" |
+                         sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
+  VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
+                     sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
+
+  if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
+    install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
+  fi
+fi
diff --git a/src/build/mac/edit_xibs.sh b/src/build/mac/edit_xibs.sh
new file mode 100755
index 0000000..a305455
--- /dev/null
+++ b/src/build/mac/edit_xibs.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
+# with the Xcode generator (as you likely use ninja). Documentation:
+#   http://dev.chromium.org/developers/design-documents/mac-xib-files
+
+set -e
+
+RELSRC=$(dirname "$0")/../..
+SRC=$(cd "$RELSRC" && pwd)
+GYP_GENERATORS=xcode python "$SRC/tools/gyp/gyp" "$SRC/chrome/chrome_nibs.gyp"
+echo "You can now edit XIB files in Xcode using:"
+echo "  $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/src/build/mac/find_sdk.py b/src/build/mac/find_sdk.py
new file mode 100755
index 0000000..067be63
--- /dev/null
+++ b/src/build/mac/find_sdk.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+import sys
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+Usage:
+  python find_sdk.py 10.6  # Ignores SDKs < 10.6
+"""
+
+from optparse import OptionParser
+
+
+def parse_version(version_str):
+  """'10.6' => [10, 6]"""
+  return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+  parser = OptionParser()
+  parser.add_option("--verify",
+                    action="store_true", dest="verify", default=False,
+                    help="return the sdk argument and warn if it doesn't exist")
+  parser.add_option("--sdk_path",
+                    action="store", type="string", dest="sdk_path", default="",
+                    help="user-specified SDK path; bypasses verification")
+  (options, args) = parser.parse_args()
+  min_sdk_version = args[0]
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print >>sys.stderr, out
+    print >>sys.stderr, err
+    raise Exception(('Error %d running xcode-select, you might have to run '
+      '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
+      'if you are using Xcode 4.') % job.returncode)
+  # The Developer folder moved in Xcode 4.3.
+  xcode43_sdk_path = os.path.join(
+      out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+  if os.path.isdir(xcode43_sdk_path):
+    sdk_dir = xcode43_sdk_path
+  else:
+    sdk_dir = os.path.join(out.rstrip(), 'SDKs')
+  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
+
+  if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+    print >>sys.stderr, ''
+    print >>sys.stderr, '                                           vvvvvvv'
+    print >>sys.stderr, ''
+    print >>sys.stderr, \
+        'This build requires the %s SDK, but it was not found on your system.' \
+        % min_sdk_version
+    print >>sys.stderr, \
+        'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+    print >>sys.stderr, ''
+    print >>sys.stderr, '                                           ^^^^^^^'
+    print >>sys.stderr, ''
+    return min_sdk_version
+
+  return best_sdk
+
+
+if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
+  print main()
diff --git a/src/build/mac/make_more_helpers.sh b/src/build/mac/make_more_helpers.sh
new file mode 100755
index 0000000..6f5c474
--- /dev/null
+++ b/src/build/mac/make_more_helpers.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: make_more_helpers.sh <directory_within_contents> <app_name>
+#
+# This script creates additional helper .app bundles for Chromium, based on
+# the existing helper .app bundle, changing their Mach-O header's flags to
+# enable and disable various features. Based on Chromium Helper.app, it will
+# create Chromium Helper EH.app, which has the MH_NO_HEAP_EXECUTION bit
+# cleared to support Chromium child processes that require an executable heap,
+# and Chromium Helper NP.app, which has the MH_PIE bit cleared to support
+# Chromium child processes that cannot tolerate ASLR.
+#
+# This script expects to be called from the chrome_exe target as a postbuild,
+# and operates directly within the built-up browser app's versioned directory.
+#
+# Each helper is adjusted by giving it the proper bundle name, renaming the
+# executable, adjusting several Info.plist keys, and changing the executable's
+# Mach-O flags.
+
+set -eu
+
+make_helper() {
+  local containing_dir="${1}"
+  local app_name="${2}"
+  local feature="${3}"
+  local flags="${4}"
+
+  local helper_name="${app_name} Helper"
+  local helper_stem="${containing_dir}/${helper_name}"
+  local original_helper="${helper_stem}.app"
+  if [[ ! -d "${original_helper}" ]]; then
+    echo "${0}: error: ${original_helper} is a required directory" >& 2
+    exit 1
+  fi
+  local original_helper_exe="${original_helper}/Contents/MacOS/${helper_name}"
+  if [[ ! -f "${original_helper_exe}" ]]; then
+    echo "${0}: error: ${original_helper_exe} is a required file" >& 2
+    exit 1
+  fi
+
+  local feature_helper="${helper_stem} ${feature}.app"
+
+  rsync -acC --delete --include '*.so' "${original_helper}/" "${feature_helper}"
+
+  local helper_feature="${helper_name} ${feature}"
+  local helper_feature_exe="${feature_helper}/Contents/MacOS/${helper_feature}"
+  mv "${feature_helper}/Contents/MacOS/${helper_name}" "${helper_feature_exe}"
+
+  local change_flags="$(dirname "${0}")/change_mach_o_flags.py"
+  "${change_flags}" ${flags} "${helper_feature_exe}"
+
+  local feature_info="${feature_helper}/Contents/Info"
+  local feature_info_plist="${feature_info}.plist"
+
+  defaults write "${feature_info}" "CFBundleDisplayName" "${helper_feature}"
+  defaults write "${feature_info}" "CFBundleExecutable" "${helper_feature}"
+
+  cfbundleid="$(defaults read "${feature_info}" "CFBundleIdentifier")"
+  feature_cfbundleid="${cfbundleid}.${feature}"
+  defaults write "${feature_info}" "CFBundleIdentifier" "${feature_cfbundleid}"
+
+  cfbundlename="$(defaults read "${feature_info}" "CFBundleName")"
+  feature_cfbundlename="${cfbundlename} ${feature}"
+  defaults write "${feature_info}" "CFBundleName" "${feature_cfbundlename}"
+
+  # As usual, defaults might have put the plist into whatever format excites
+  # it, but Info.plists get converted back to the expected XML format.
+  plutil -convert xml1 "${feature_info_plist}"
+
+  # `defaults` also changes the file permissions, so make the file
+  # world-readable again.
+  chmod a+r "${feature_info_plist}"
+}
+
+if [[ ${#} -ne 2 ]]; then
+  echo "usage: ${0} <directory_within_contents> <app_name>" >& 2
+  exit 1
+fi
+
+DIRECTORY_WITHIN_CONTENTS="${1}"
+APP_NAME="${2}"
+
+CONTENTS_DIR="${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}"
+CONTAINING_DIR="${CONTENTS_DIR}/${DIRECTORY_WITHIN_CONTENTS}"
+
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "EH" "--executable-heap"
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "NP" "--no-pie"
diff --git a/src/build/mac/strip_from_xcode b/src/build/mac/strip_from_xcode
new file mode 100755
index 0000000..c26b9fb
--- /dev/null
+++ b/src/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it.  This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+  # Only strip in release mode.
+  exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+   [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+  # Strip everything (no special flags).  No-op.
+  true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+     [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+  # Strip debugging symbols and local symbols
+  FLAGS[${#FLAGS[@]}]=-S
+  FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+  # Don't strip static libraries.
+  exit 0
+else
+  # Warn, but don't treat this as an error.
+  echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+  exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+  # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+  # Flags".
+  for stripflag in "${STRIPFLAGS}" ; do
+    FLAGS[${#FLAGS[@]}]="${stripflag}"
+  done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+  # An Xcode project can communicate a file listing symbols to saved in this
+  # environment variable by setting it as a build setting.  This isn't a
+  # standard Xcode setting.  It's used in preference to STRIPFLAGS to
+  # eliminate quoting ambiguity concerns.
+  FLAGS[${#FLAGS[@]}]=-s
+  FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/src/build/mac/strip_save_dsym b/src/build/mac/strip_save_dsym
new file mode 100755
index 0000000..ef08d83
--- /dev/null
+++ b/src/build/mac/strip_save_dsym
@@ -0,0 +1,341 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility.  Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing.  That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow.  On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast.  Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file.  The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture.  On error, returns an empty list.  Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+  macho_types = ["executable",
+                 "dynamically linked shared library",
+                 "bundle"]
+  macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+  file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+                              stdout=subprocess.PIPE)
+
+  archs = []
+
+  type_line = file_cmd.stdout.readline()
+  type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+  if type_match:
+    archs.append(type_match.group(1))
+    return [type_match.group(1)]
+  else:
+    type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+                          type_line)
+    if type_match:
+      for i in range(0, int(type_match.group(1))):
+        arch_line = file_cmd.stdout.readline()
+        arch_match = re.match(
+                     "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+                     arch_line)
+        if arch_match:
+          archs.append(arch_match.group(1))
+
+  if file_cmd.wait() != 0:
+    archs = []
+
+  if len(archs) == 0:
+    print >> sys.stderr, "No architectures in %s" % macho
+
+  return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+  uuids = {}
+
+  archs = macho_archs(macho)
+  if len(archs) == 0:
+    return uuids
+
+  for arch in archs:
+    if arch == "":
+      continue
+
+    otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+                                  macho],
+                                 stdout=subprocess.PIPE)
+    # state 0 is when nothing UUID-related has been seen yet.  State 1 is
+    # entered after a load command begins, but it may not be an LC_UUID load
+    # command.  States 2, 3, and 4 are intermediate states while reading an
+    # LC_UUID command.  State 5 is the terminal state for a successful LC_UUID
+    # read.  State 6 is the error state.
+    state = 0
+    uuid = ""
+    for otool_line in otool_cmd.stdout:
+      if state == 0:
+        if re.match("^Load command .*$", otool_line):
+          state = 1
+      elif state == 1:
+        if re.match("^     cmd LC_UUID$", otool_line):
+          state = 2
+        else:
+          state = 0
+      elif state == 2:
+        if re.match("^ cmdsize 24$", otool_line):
+          state = 3
+        else:
+          state = 6
+      elif state == 3:
+        # The UUID display format changed in the version of otool shipping
+        # with the Xcode 3.2.2 prerelease.  The new format is traditional:
+        #    uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # and with Xcode 3.2.6, then line is indented one more space:
+        #     uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # The old format, from cctools-750 and older's otool, breaks the UUID
+        # up into a sequence of bytes:
+        #    uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+        #         0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+        new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+                                  otool_line)
+        if new_uuid_match:
+          uuid = new_uuid_match.group(1)
+
+          # Skip state 4, there is no second line to read.
+          state = 5
+        else:
+          old_uuid_match = re.match("^   uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+                                    "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                    otool_line)
+          if old_uuid_match:
+            state = 4
+            uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+                   old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+                   old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+                   old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+          else:
+            state = 6
+      elif state == 4:
+        old_uuid_match = re.match("^        0x(..) 0x(..) 0x(..) 0x(..) "
+                                  "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                  otool_line)
+        if old_uuid_match:
+          state = 5
+          uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+                  old_uuid_match.group(3) + old_uuid_match.group(4) + \
+                  old_uuid_match.group(5) + old_uuid_match.group(6) + \
+                  old_uuid_match.group(7) + old_uuid_match.group(8)
+        else:
+          state = 6
+
+    if otool_cmd.wait() != 0:
+      state = 6
+
+    if state == 5:
+      uuids[arch] = uuid.upper()
+
+  if len(uuids) == 0:
+    print >> sys.stderr, "No UUIDs in %s" % macho
+
+  return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+  # If building a bundle, the .dSYM should be placed next to the bundle.  Use
+  # WRAPPER_NAME to make this determination.  If called from xcodebuild,
+  # WRAPPER_NAME will be set to the name of the bundle.
+  dsym = ""
+  if "WRAPPER_NAME" in os.environ:
+    if "BUILT_PRODUCTS_DIR" in os.environ:
+      dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+                          os.environ["WRAPPER_NAME"])
+    else:
+      dsym = os.environ["WRAPPER_NAME"]
+  else:
+    dsym = macho
+
+  dsym += ".dSYM"
+
+  return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+  uuids = macho_uuids(macho)
+  if len(uuids) == 0:
+    return False
+
+  dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+  dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+  try:
+    os.makedirs(dwarf_dir)
+  except OSError, (err, error_string):
+    if err != errno.EEXIST:
+      raise
+  shutil.copyfile(macho, dwarf_file)
+
+  # info_template is the same as what dsymutil would have written, with the
+  # addition of the fake_dsym key.
+  info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+	<dict>
+		<key>CFBundleDevelopmentRegion</key>
+		<string>English</string>
+		<key>CFBundleIdentifier</key>
+		<string>com.apple.xcode.dsym.%(root_name)s</string>
+		<key>CFBundleInfoDictionaryVersion</key>
+		<string>6.0</string>
+		<key>CFBundlePackageType</key>
+		<string>dSYM</string>
+		<key>CFBundleSignature</key>
+		<string>????</string>
+		<key>CFBundleShortVersionString</key>
+		<string>1.0</string>
+		<key>CFBundleVersion</key>
+		<string>1</string>
+		<key>dSYM_UUID</key>
+		<dict>
+%(uuid_dict)s		</dict>
+		<key>fake_dsym</key>
+		<true/>
+	</dict>
+</plist>
+'''
+
+  root_name = os.path.basename(dsym)[:-5]  # whatever.dSYM without .dSYM
+  uuid_dict = ""
+  for arch in sorted(uuids):
+    uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+                 "\t\t\t<string>" + uuids[arch] + "</string>\n"
+  info_dict = {
+    "root_name": root_name,
+    "uuid_dict": uuid_dict,
+  }
+  info_contents = info_template % info_dict
+  info_file = os.path.join(dsym, "Contents", "Info.plist")
+  info_fd = open(info_file, "w")
+  info_fd.write(info_contents)
+  info_fd.close()
+
+  return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located.  If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+  dsym = dsym_path(macho)
+  macho_stat = os.stat(macho)
+  dsym_stat = None
+  try:
+    dsym_stat = os.stat(dsym)
+  except OSError, (err, error_string):
+    if err != errno.ENOENT:
+      raise
+
+  if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+    # Make a .dSYM bundle
+    if not make_fake_dsym(macho, dsym):
+      return False
+
+    # Strip the Mach-O file
+    remove_dsym = True
+    try:
+      strip_path = ""
+      if "SYSTEM_DEVELOPER_BIN_DIR" in os.environ:
+        strip_path = os.environ["SYSTEM_DEVELOPER_BIN_DIR"]
+      else:
+        strip_path = "/usr/bin"
+      strip_path = os.path.join(strip_path, "strip")
+      strip_cmdline = [strip_path] + sys.argv[1:]
+      strip_cmd = subprocess.Popen(strip_cmdline)
+      if strip_cmd.wait() == 0:
+        remove_dsym = False
+    finally:
+      if remove_dsym:
+        shutil.rmtree(dsym)
+
+    # Update modification time on the Mach-O file and .dSYM bundle
+    now = time.time()
+    os.utime(macho, (now, now))
+    os.utime(dsym, (now, now))
+
+  return True
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  # This only supports operating on one file at a time.  Look at the arguments
+  # to strip to figure out what the source to be stripped is.  Arguments are
+  # processed in the same way that strip does, although to reduce complexity,
+  # this doesn't do all of the same checking as strip.  For example, strip
+  # has no -Z switch and would treat -Z on the command line as an error.  For
+  # the purposes this is needed for, that's fine.
+  macho = None
+  process_switches = True
+  ignore_argument = False
+  for arg in argv[1:]:
+    if ignore_argument:
+      ignore_argument = False
+      continue
+    if process_switches:
+      if arg == "-":
+        process_switches = False
+      # strip has these switches accept an argument:
+      if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+        ignore_argument = True
+      if arg[0] == "-":
+        continue
+    if macho is None:
+      macho = arg
+    else:
+      print >> sys.stderr, "Too many things to strip"
+      return 1
+
+  if macho is None:
+    print >> sys.stderr, "Nothing to strip"
+    return 1
+
+  if not strip_and_make_fake_dsym(macho):
+    return 1
+
+  return 0
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/src/build/mac/tweak_info_plist.py b/src/build/mac/tweak_info_plist.py
new file mode 100755
index 0000000..761352e
--- /dev/null
+++ b/src/build/mac/tweak_info_plist.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+#    we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+#    the Info.plist has changed.  So even if we updated it, it's only looking
+#    at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place.  This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+from os import environ as env
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.join(env['SRCROOT'], '..')
+
+
+def _GetOutput(args):
+  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+  of the process. stderr is attached to the parent."""
+  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+  """Similar to _GetOutput() but ignores stderr. If there's an error launching
+  the child (like file not found), the exception will be caught and (None, 1)
+  will be returned to mimic quiet failure."""
+  try:
+    proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+  except OSError:
+    return (None, 1)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+  """Removes a varargs of keys from the plist."""
+  for key in keys:
+    try:
+      del plist[key]
+    except KeyError:
+      pass
+
+
+def _AddVersionKeys(plist, version=None):
+  """Adds the product version number into the plist. Returns True on success and
+  False on error. The error will be printed to stderr."""
+  if version:
+    match = re.match('\d+\.\d+\.(\d+\.\d+)$', version)
+    if not match:
+      print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+      return False
+
+    full_version = match.group(0)
+    bundle_version = match.group(1)
+
+  else:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'chrome/tools/build/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+
+    (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+    full_version = stdout.rstrip()
+
+    (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@BUILD@.@PATCH@'])
+    bundle_version = stdout.rstrip()
+
+    # If either of the two version commands finished with non-zero returncode,
+    # report the error up.
+    if retval1 or retval2:
+      return False
+
+  # Add public version info so "Get Info" works.
+  plist['CFBundleShortVersionString'] = full_version
+
+  # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
+  # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
+  # have been fixed in later OS release, but hasn't been tested (it's easy
+  # enough to find out with "lsregister -dump).
+  # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+  # BUILD will always be an increasing value, so BUILD_PATH gives us something
+  # unique that meetings what LS wants.
+  plist['CFBundleVersion'] = bundle_version
+
+  # Return with no error.
+  return True
+
+
+def _DoSCMKeys(plist, add_keys):
+  """Adds the SCM information, visible in about:version, to property list. If
+  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+  scm_revision = None
+  if add_keys:
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'chrome/tools/build/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+                                  '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
+
+  # See if the operation failed.
+  _RemoveKeys(plist, 'SCMRevision')
+  if scm_revision != None:
+    plist['SCMRevision'] = scm_revision
+  elif add_keys:
+    print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
+
+  # TODO(thakis): Remove this once m25 has reached stable.
+  _RemoveKeys(plist, 'SCMPath')
+  return True
+
+
+def _DoPDFKeys(plist, add_keys):
+  """Adds PDF support to the document types list. If add_keys is True, it will
+  add the type information dictionary. If it is False, it will remove it if
+  present."""
+
+  PDF_FILE_EXTENSION = 'pdf'
+
+  def __AddPDFKeys(sub_plist):
+    """Writes the keys into a sub-dictionary of the plist."""
+    sub_plist['CFBundleTypeExtensions'] = [PDF_FILE_EXTENSION]
+    sub_plist['CFBundleTypeIconFile'] = 'document.icns'
+    sub_plist['CFBundleTypeMIMETypes'] = 'application/pdf'
+    sub_plist['CFBundleTypeName'] = 'PDF Document'
+    sub_plist['CFBundleTypeRole'] = 'Viewer'
+
+  DOCUMENT_TYPES_KEY = 'CFBundleDocumentTypes'
+
+  # First get the list of document types, creating it if necessary.
+  try:
+    extensions = plist[DOCUMENT_TYPES_KEY]
+  except KeyError:
+    # If this plist doesn't have a type dictionary, create one if set to add the
+    # keys. If not, bail.
+    if not add_keys:
+      return
+    extensions = plist[DOCUMENT_TYPES_KEY] = []
+
+  # Loop over each entry in the list, looking for one that handles PDF types.
+  for i, ext in enumerate(extensions):
+    # If an entry for .pdf files is found...
+    if 'CFBundleTypeExtensions' not in ext:
+      continue
+    if PDF_FILE_EXTENSION in ext['CFBundleTypeExtensions']:
+      if add_keys:
+        # Overwrite the existing keys with new ones.
+        __AddPDFKeys(ext)
+      else:
+        # Otherwise, delete the entry entirely.
+        del extensions[i]
+      return
+
+  # No PDF entry exists. If one needs to be added, do so now.
+  if add_keys:
+    pdf_entry = {}
+    __AddPDFKeys(pdf_entry)
+    extensions.append(pdf_entry)
+
+
+def _AddBreakpadKeys(plist, branding):
+  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |branding| argument."""
+  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
+  plist['BreakpadProduct'] = '%s_Mac' % branding
+  plist['BreakpadProductDisplay'] = branding
+  plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+  # These are both deliberately strings and not boolean.
+  plist['BreakpadSendAndExit'] = 'YES'
+  plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+  """Removes any set Breakpad keys."""
+  _RemoveKeys(plist,
+      'BreakpadURL',
+      'BreakpadReportInterval',
+      'BreakpadProduct',
+      'BreakpadProductDisplay',
+      'BreakpadVersion',
+      'BreakpadSendAndExit',
+      'BreakpadSkipConfirm')
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |bundle_identifier| argument (com.example.product)."""
+  plist['KSVersion'] = plist['CFBundleShortVersionString']
+  plist['KSProductID'] = bundle_identifier
+  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+
+def _RemoveKeystoneKeys(plist):
+  """Removes any set Keystone keys."""
+  _RemoveKeys(plist,
+      'KSVersion',
+      'KSProductID',
+      'KSUpdateURL')
+
+
+def Main(argv):
+  parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--breakpad', dest='use_breakpad', action='store',
+      type='int', default=False, help='Enable Breakpad [1 or 0]')
+  parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
+      action='store', type='int', default=False,
+      help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
+  parser.add_option('--keystone', dest='use_keystone', action='store',
+      type='int', default=False, help='Enable Keystone [1 or 0]')
+  parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+      default=True, help='Add SCM metadata [1 or 0]')
+  parser.add_option('--pdf', dest='add_pdf_support', action='store', type='int',
+      default=False, help='Add PDF file handler support [1 or 0]')
+  parser.add_option('--branding', dest='branding', action='store',
+      type='string', default=None, help='The branding of the binary')
+  parser.add_option('--bundle_id', dest='bundle_identifier',
+      action='store', type='string', default=None,
+      help='The bundle id of the binary')
+  parser.add_option('--version', dest='version', action='store', type='string',
+      default=None, help='The version string [major.minor.build.patch]')
+  (options, args) = parser.parse_args(argv)
+
+  if len(args) > 0:
+    print >>sys.stderr, parser.get_usage()
+    return 1
+
+  # Read the plist into its parsed format.
+  DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
+  plist = plistlib.readPlist(DEST_INFO_PLIST)
+
+  # Insert the product version.
+  if not _AddVersionKeys(plist, version=options.version):
+    return 2
+
+  # Add Breakpad if configured to do so.
+  if options.use_breakpad:
+    if options.branding is None:
+      print >>sys.stderr, 'Use of Breakpad requires branding.'
+      return 1
+    _AddBreakpadKeys(plist, options.branding)
+    if options.breakpad_uploads:
+      plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+    else:
+      # This allows crash dumping to a file without uploading the
+      # dump, for testing purposes.  Breakpad does not recognise
+      # "none" as a special value, but this does stop crash dump
+      # uploading from happening.  We need to specify something
+      # because if "BreakpadURL" is not present, Breakpad will not
+      # register its crash handler and no crash dumping will occur.
+      plist['BreakpadURL'] = 'none'
+  else:
+    _RemoveBreakpadKeys(plist)
+
+  # Only add Keystone in Release builds.
+  if options.use_keystone and env['CONFIGURATION'] == 'Release':
+    if options.bundle_identifier is None:
+      print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+      return 1
+    _AddKeystoneKeys(plist, options.bundle_identifier)
+  else:
+    _RemoveKeystoneKeys(plist)
+
+  # Adds or removes any SCM keys.
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
+
+  # Adds or removes the PDF file handler entry.
+  _DoPDFKeys(plist, options.add_pdf_support)
+
+  # Now that all keys have been mutated, rewrite the file.
+  temp_info_plist = tempfile.NamedTemporaryFile()
+  plistlib.writePlist(plist, temp_info_plist.name)
+
+  # Info.plist will work perfectly well in any plist format, but traditionally
+  # applications use xml1 for this, so convert it to ensure that it's valid.
+  proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
+                           temp_info_plist.name])
+  proc.wait()
+  return proc.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/src/build/mac/verify_no_objc.sh b/src/build/mac/verify_no_objc.sh
new file mode 100755
index 0000000..955f9be
--- /dev/null
+++ b/src/build/mac/verify_no_objc.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes sure that no __OBJC,__image_info section appears in the
+# executable file built by the Xcode target that runs the script. If such a
+# section appears, the script prints an error message and exits nonzero.
+#
+# Why is this important?
+#
+# On 10.5, there's a bug in CFBundlePreflightExecutable that causes it to
+# crash when operating in an executable that has not loaded at its default
+# address (that is, when it's a position-independent executable with the
+# MH_PIE bit set in its mach_header) and the executable has an
+# __OBJC,__image_info section. See http://crbug.com/88697.
+#
+# Chrome's main executables don't use any Objective-C at all, and don't need
+# to carry this section around. Not linking them as Objective-C when they
+# don't need it anyway saves about 4kB in the linked executable, although most
+# of that 4kB is just filled with zeroes.
+#
+# This script makes sure that nobody goofs and accidentally introduces these
+# sections into the main executables.
+
+set -eu
+
+otool="${DEVELOPER_BIN_DIR:-/usr/bin}/otool"
+executable="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if "${otool}" -arch i386 -o "${executable}" | grep -q '^Contents.*section$'; \
+then
+  echo "${0}: ${executable} has an __OBJC,__image_info section" 2>&1
+  exit 1
+fi
+
+if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
+  echo "${0}: otool failed" 2>&1
+  exit 1
+fi
+
+exit 0
diff --git a/src/build/nocompile.gypi b/src/build/nocompile.gypi
new file mode 100644
index 0000000..f9021ae
--- /dev/null
+++ b/src/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_module_nc_unittests',
+#   'type': 'executable',
+#   'sources': [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+  # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+  'sources/': [['exclude', '\\.nc$']],
+  'conditions': [
+    [ 'OS=="linux" and clang==0', {
+      'rules': [
+        {
+          'variables': {
+            'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+            'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+                               '<(RULE_INPUT_ROOT)_nc.cc'),
+           },
+          'rule_name': 'run_nocompile',
+          'extension': 'nc',
+          'inputs': [
+            '<(nocompile_driver)',
+          ],
+          'outputs': [
+            '<(nc_result_path)'
+          ],
+          'action': [
+            'python',
+            '<(nocompile_driver)',
+            '4', # number of compilers to invoke in parallel.
+            '<(RULE_INPUT_PATH)',
+            '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+            '<(nc_result_path)',
+            ],
+          'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    }, {
+      'sources/': [['exclude', '\\.nc$']]
+    }],  # 'OS=="linux" and clang=="0"'
+  ],
+}
+
diff --git a/src/build/output_dll_copy.rules b/src/build/output_dll_copy.rules
new file mode 100644
index 0000000..c6e9051
--- /dev/null
+++ b/src/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+	Name="Output DLL copy"
+	Version="8.00"
+	>
+	<Rules>
+		<CustomBuildRule
+			Name="Output DLL copy"
+			CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+			Outputs="$(OutDir)\$(InputFileName)"
+			FileExtensions="*.dll"
+			>
+			<Properties>
+			</Properties>
+		</CustomBuildRule>
+	</Rules>
+</VisualStudioToolFile>
diff --git a/src/build/precompile.cc b/src/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/src/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/src/build/precompile.h b/src/build/precompile.h
new file mode 100644
index 0000000..c120812
--- /dev/null
+++ b/src/build/precompile.h
@@ -0,0 +1,109 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. Includes without
+// a numeric comment are generally included at least 5 times. It may
+// be possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#if !defined(__LB_XB360__)
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wincrypt.h>  // 4
+#include <wtypes.h>  // 2
+#endif  // defined(__LB_XB360__)
+// Defines in atlbase.h cause conflicts; if we could figure out how
+// this family of headers can be included in the PCH, it might speed
+// up the build as several of them are used frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h>  // 2
+#include <atlctrls.h>  // 2
+#include <atlmisc.h>  // 2
+#include <atlsafe.h>  // 1
+#include <atltheme.h>  // 1
+#include <atlwin.h>  // 2
+*/
+
+// Objbase.h and other files that rely on it bring in [ #define
+// interface struct ] which can cause problems in a multi-platform
+// build like Chrome's. #undef-ing it does not work as there are
+// currently 118 targets that break if we do this, so leaving out of
+// the precompiled header for now.
+//#include <commctrl.h>  // 2
+//#include <commdlg.h>  // 3
+//#include <cryptuiapi.h>  // 2
+//#include <Objbase.h>  // 2
+//#include <objidl.h>  // 1
+//#include <ole2.h>  // 1
+//#include <oleacc.h>  // 2
+//#include <oleauto.h>  // 1
+//#include <oleidl.h>  // 1
+//#include <propkey.h>  // 2
+//#include <propvarutil.h>  // 2
+//#include <pstore.h>  // 2
+//#include <shlguid.h>  // 1
+//#include <shlwapi.h>  // 1
+//#include <shobjidl.h>  // 4
+//#include <urlhist.h>  // 2
+
+// Caused other conflicts in addition to the 'interface' issue above.
+// #include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>  // 4
+#include <math.h>
+#include <memory.h>  // 1
+#include <signal.h>
+#include <stdarg.h>  // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>  // 4
+
+#include <algorithm>
+#include <bitset>  // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio>  // 3
+#include <cstdlib>  // 2
+#include <cstring>
+#include <deque>
+#include <fstream>  // 3
+#include <functional>
+#include <iomanip>  // 2
+#include <iosfwd>  // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>  // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/src/build/protoc.gypi b/src/build/protoc.gypi
new file mode 100644
index 0000000..52fb8a2
--- /dev/null
+++ b/src/build/protoc.gypi
@@ -0,0 +1,124 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. For Java-targets, see
+# protoc_java.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# If you need to add an EXPORT macro to a protobuf's c++ header, set the
+# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:'
+# e.g. 'dllexport_decl=BASE_EXPORT:'
+#
+# It is likely you also need to #include a file for the above EXPORT macro to
+# work. You can do so with the 'cc_include' variable.
+# e.g. 'base/base_export.h'
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'cc_generator_options%': '',
+    'cc_include%': '',
+    'proto_in_dir%': '.',
+    'conditions': [
+      ['use_system_protobuf==0', {
+        'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+      }, { # use_system_protobuf==1
+        'protoc': '<!(which protoc)',
+      }],
+    ],
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc_wrapper)',
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        'python',
+        '<(protoc_wrapper)',
+        '--include',
+        '<(cc_include)',
+        '--protobuf',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+        # Using the --arg val form (instead of --arg=val) allows gyp's msvs rule
+        # generation to correct 'val' which is a path.
+        '--proto-in-dir','<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '--proto-in-file','<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--use-system-protobuf=<(use_system_protobuf)',
+        '--',
+        '<(protoc)',
+        '--cpp_out', '<(cc_generator_options)<(cc_dir)',
+        '--python_out', '<(py_dir)',
+      ],
+      'msvs_cygwin_shell': 0,
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+      '<(DEPTH)',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/protoc_java.gypi b/src/build/protoc_java.gypi
new file mode 100644
index 0000000..4465c10
--- /dev/null
+++ b/src/build/protoc_java.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. This is only to be included
+# for Java targets. When including this file, a .jar-file will be generated.
+# For other targets, see protoc.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     'proto_in_dir': '.'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'output_java_files' variable specifies a list of output files that will
+# be generated. It is based on the package and java_outer_classname fields in
+# the proto. All the values must be prefixed with >(java_out_dir), since that
+# is the root directory of all the output.
+#
+# Implementation notes:
+# A target_name of foo and proto-specified 'package' java.package.path produces:
+#   <(PRODUCT_DIR)/java_proto/foo/{java/package/path/}{Foo,Bar}.java
+# where Foo and Bar are taken from 'java_outer_classname' of the protos.
+#
+# How the .jar-file is created is different than how protoc is used for other
+# targets, and as such, this lives in its own file.
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+    'java_out_dir': '<(PRODUCT_DIR)/java_proto/<(_target_name)/src',
+    'proto_in_dir%': '.',
+    'stamp_file': '<(java_out_dir).stamp',
+    'script': '<(DEPTH)/build/protoc_java.py',
+
+    # The rest of the variables here are for the java.gypi include.
+    'package_name': '<(_target_name)',
+    'java_in_dir': '<(DEPTH)/build/android/empty',
+    'generated_src_dirs': ['<(java_out_dir)'],
+    # Adding the |stamp_file| to |additional_input_paths| makes the actions in
+    # the include of java.gypi depend on the genproto_java action.
+    'additional_input_paths': ['<(stamp_file)'],
+  },
+  'actions': [
+    {
+      'action_name': 'genproto_java',
+      'inputs': [
+        '<(script)',
+        '<(protoc)',
+        '<@(_sources)',
+      ],
+      # We do not know the names of the generated files, so we use a stamp.
+      'outputs': [
+        '<(stamp_file)',
+      ],
+      'action': [
+        '<(script)',
+        '<(protoc)',
+        '<(proto_in_dir)',
+        '<(java_out_dir)',
+        '<(stamp_file)',
+        '<@(_sources)',
+      ],
+      'message': 'Generating Java code from <(proto_in_dir)',
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite_javalib',
+  ],
+  'includes': [ 'java.gypi' ],
+}
diff --git a/src/build/protoc_java.py b/src/build/protoc_java.py
new file mode 100755
index 0000000..42e2044
--- /dev/null
+++ b/src/build/protoc_java.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobufs
+
+Usage:
+    protoc_java.py {protoc} {proto_path} {java_out} {stamp_file} {proto_files}
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc.
+4. Creates a new stamp file.
+"""
+
+import os
+import shutil
+import subprocess
+import sys
+
+def main(argv):
+  if len(argv) < 5:
+    usage()
+    return 1
+
+  protoc_path, proto_path, java_out, stamp_file = argv[1:5]
+  proto_files = argv[5:]
+
+  # Delete all old sources
+  if os.path.exists(java_out):
+    shutil.rmtree(java_out)
+
+  # Create source directory
+  os.makedirs(java_out)
+
+  # Generate Java files using protoc
+  ret = subprocess.call(
+      [protoc_path, '--proto_path', proto_path, '--java_out', java_out]
+      + proto_files)
+
+  if ret == 0:
+    # Create a new stamp file
+    with file(stamp_file, 'a'):
+      os.utime(stamp_file, None)
+
+  return ret
+
+def usage():
+  print(__doc__);
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/src/build/release.gypi b/src/build/release.gypi
new file mode 100644
index 0000000..7595ef5
--- /dev/null
+++ b/src/build/release.gypi
@@ -0,0 +1,17 @@
+{
+  'conditions': [
+    # Handle build types.
+    ['buildtype=="Dev"', {
+      'includes': ['internal/release_impl.gypi'],
+    }],
+    ['buildtype=="Official"', {
+      'includes': ['internal/release_impl_official.gypi'],
+    }],
+    # TODO(bradnelson): may also need:
+    #     checksenabled
+    #     coverage
+    #     dom_stats
+    #     pgo_instrument
+    #     pgo_optimize
+  ],
+}
diff --git a/src/build/sanitize-mac-build-log.sed b/src/build/sanitize-mac-build-log.sed
new file mode 100755
index 0000000..3312eac
--- /dev/null
+++ b/src/build/sanitize-mac-build-log.sed
@@ -0,0 +1,35 @@
+#!/bin/echo Use sanitize-mac-build-log.sh or sed -f
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
+s|^CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
diff --git a/src/build/sanitize-mac-build-log.sh b/src/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..dc743fa
--- /dev/null
+++ b/src/build/sanitize-mac-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/src/build/sanitize-win-build-log.sed b/src/build/sanitize-win-build-log.sed
new file mode 100755
index 0000000..c8bffde
--- /dev/null
+++ b/src/build/sanitize-win-build-log.sed
@@ -0,0 +1,17 @@
+#!/bin/echo Use sanitize-win-build-log.sh or sed -f
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]\+>//
+
+# Shorten bindings generation lines
+s/^.*"perl".*generate-bindings.pl".*\("[^"]\+\.idl"\).*$/  generate-bindings \1/
diff --git a/src/build/sanitize-win-build-log.sh b/src/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..dc743fa
--- /dev/null
+++ b/src/build/sanitize-win-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/src/build/shim_headers.gypi b/src/build/shim_headers.gypi
new file mode 100644
index 0000000..cf0914d
--- /dev/null
+++ b/src/build/shim_headers.gypi
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to handle shim headers
+# in a consistent manner. To use this the following variables need to be
+# defined:
+#   headers_root_path: string: path to directory containing headers
+#   header_filenames: list: list of header file names
+
+{
+  'variables': {
+    'shim_headers_path': '<(INTERMEDIATE_DIR)/shim_headers',
+  },
+  'direct_dependent_settings': {
+    'include_dirs+': [
+      '<(shim_headers_path)',
+    ],
+  },
+  'actions': [
+    {
+      'variables': {
+        'generator_path': '<(DEPTH)/tools/generate_shim_headers/generate_shim_headers.py',
+        'generator_args': [
+          '--headers-root', '<(headers_root_path)',
+          '--output-directory', '<(shim_headers_path)',
+          '<@(header_filenames)',
+        ],
+      },
+      'action_name': 'generate_<(_target_name)_shim_headers',
+      'inputs': [
+        '<(generator_path)',
+      ],
+      'outputs': [
+        '<!@pymod_do_main(generate_shim_headers <@(generator_args) --outputs)',
+      ],
+      'action': ['python',
+                 '<(generator_path)',
+                 '<@(generator_args)',
+                 '--generate',
+      ],
+      'message': 'Generating <(_target_name) shim headers.',
+    },
+  ],
+}
diff --git a/src/build/some.gyp b/src/build/some.gyp
new file mode 100644
index 0000000..44a1dd5
--- /dev/null
+++ b/src/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'some',
+      'type': 'none',
+      'dependencies': [
+        # This file is intended to be locally modified. List the targets you use
+        # regularly. The generated some.sln will contains projects for only
+        # those targets and the targets they are transitively dependent on. This
+        # can result in a solution that loads and unloads faster in Visual
+        # Studio.
+        #
+        # Tip: Create a dummy CL to hold your local edits to this file, so they
+        # don't accidentally get added to another CL that you are editing.
+        #
+        # Example:
+        # '../chrome/chrome.gyp:chrome',
+      ],
+    },
+  ],
+}
diff --git a/src/build/temp_gyp/README.chromium b/src/build/temp_gyp/README.chromium
new file mode 100644
index 0000000..8045d61
--- /dev/null
+++ b/src/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files.  Contact mark for
+details.
diff --git a/src/build/temp_gyp/googleurl.gyp b/src/build/temp_gyp/googleurl.gyp
new file mode 100644
index 0000000..1c03153
--- /dev/null
+++ b/src/build/temp_gyp/googleurl.gyp
@@ -0,0 +1,110 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(mark): Upstream this file to googleurl.
+{
+  'variables': {
+    'chromium_code': 1,
+  },
+  'targets': [
+    {
+      'target_name': 'googleurl',
+      'type': '<(component)',
+      'dependencies': [
+        '../../base/base.gyp:base',
+        '../../third_party/icu/icu.gyp:icudata',
+        '../../third_party/icu/icu.gyp:icui18n',
+        '../../third_party/icu/icu.gyp:icuuc',
+      ],
+      'sources': [
+        '../../googleurl/src/gurl.cc',
+        '../../googleurl/src/gurl.h',
+        '../../googleurl/src/url_canon.h',
+        '../../googleurl/src/url_canon_etc.cc',
+        '../../googleurl/src/url_canon_fileurl.cc',
+        '../../googleurl/src/url_canon_filesystemurl.cc',
+        '../../googleurl/src/url_canon_host.cc',
+        '../../googleurl/src/url_canon_icu.cc',
+        '../../googleurl/src/url_canon_icu.h',
+        '../../googleurl/src/url_canon_internal.cc',
+        '../../googleurl/src/url_canon_internal.h',
+        '../../googleurl/src/url_canon_internal_file.h',
+        '../../googleurl/src/url_canon_ip.cc',
+        '../../googleurl/src/url_canon_ip.h',
+        '../../googleurl/src/url_canon_mailtourl.cc',
+        '../../googleurl/src/url_canon_path.cc',
+        '../../googleurl/src/url_canon_pathurl.cc',
+        '../../googleurl/src/url_canon_query.cc',
+        '../../googleurl/src/url_canon_relative.cc',
+        '../../googleurl/src/url_canon_stdstring.h',
+        '../../googleurl/src/url_canon_stdurl.cc',
+        '../../googleurl/src/url_file.h',
+        '../../googleurl/src/url_parse.cc',
+        '../../googleurl/src/url_parse.h',
+        '../../googleurl/src/url_parse_file.cc',
+        '../../googleurl/src/url_parse_internal.h',
+        '../../googleurl/src/url_util.cc',
+        '../../googleurl/src/url_util.h',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../..',
+        ],
+      },
+      'defines': [
+        'FULL_FILESYSTEM_URL_SUPPORT=1',
+      ],
+      'conditions': [
+        ['component=="shared_library"', {
+          'defines': [
+            'GURL_DLL',
+            'GURL_IMPLEMENTATION=1',
+          ],
+          'direct_dependent_settings': {
+            'defines': [
+              'GURL_DLL',
+            ],
+          },
+        }],
+        ['OS=="lb_shell"', {
+          'dependencies': [
+            '<(lbshell_root)/build/projects/posix_emulation.gyp:posix_emulation',
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'googleurl_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'googleurl',
+        '../../base/base.gyp:base_i18n',
+        '../../base/base.gyp:run_all_unittests',
+        '../../testing/gtest.gyp:gtest',
+        '../../third_party/icu/icu.gyp:icuuc',
+      ],
+      'sources': [
+        '../../googleurl/src/gurl_unittest.cc',
+        '../../googleurl/src/url_canon_unittest.cc',
+        '../../googleurl/src/url_parse_unittest.cc',
+        '../../googleurl/src/url_test_utils.h',
+        '../../googleurl/src/url_util_unittest.cc',
+      ],
+      'defines': [
+        'FULL_FILESYSTEM_URL_SUPPORT=1',
+      ],
+      'conditions': [
+        ['os_posix==1 and OS!="mac" and OS!="ios"', {
+          'conditions': [
+            ['linux_use_tcmalloc==1', {
+              'dependencies': [
+                '../../base/allocator/allocator.gyp:allocator',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/src/build/temp_gyp/pdfsqueeze.gyp b/src/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 0000000..2b3b1ff
--- /dev/null
+++ b/src/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'pdfsqueeze',
+      'type': 'executable',
+      'sources': [
+        '../../third_party/pdfsqueeze/pdfsqueeze.m',
+      ],
+      'defines': [
+        # Use defines to map the full path names that will be used for
+        # the vars into the short forms expected by pdfsqueeze.m.
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+      ],
+      'include_dirs': [
+        '<(INTERMEDIATE_DIR)',
+      ],
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+      ],
+      'actions': [
+        {
+          'action_name': 'Generate inline filter data',
+          'inputs': [
+            '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+          ],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+          ],
+          'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/src/build/update-linux-sandbox.sh b/src/build/update-linux-sandbox.sh
new file mode 100755
index 0000000..ebf8c10
--- /dev/null
+++ b/src/build/update-linux-sandbox.sh
@@ -0,0 +1,75 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/out/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+  exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+  exit 1
+fi
+
+installsandbox() {
+  echo "(using sudo so you may be asked for your password)"
+  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+    "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+  return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+  echo "If you are building in Release mode"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
+  echo "please make sure you build the chrome_sandbox target"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+  echo "installing it now."
+  installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+  exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+  echo "Your installed setuid sandbox is too old, installing it now."
+  if ! installsandbox; then
+    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+    exit 1
+  fi
+else
+  echo "Your setuid sandbox is up to date"
+  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+    echo -n "Make sure you have \"export "
+    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+    echo "somewhere in your .bashrc"
+    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+  fi
+fi
diff --git a/src/build/util/lastchange.py b/src/build/util/lastchange.py
new file mode 100755
index 0000000..dba8e54
--- /dev/null
+++ b/src/build/util/lastchange.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+  def __init__(self, url, revision):
+    self.url = url
+    self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion branch and revision for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  try:
+    proc = subprocess.Popen(['svn', 'info'],
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  attrs = {}
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    key, val = line.split(': ', 1)
+    attrs[key] = val
+
+  try:
+    match = svn_url_regex.search(attrs['URL'])
+    if match:
+      url = match.group(2)
+    else:
+      url = ''
+    revision = attrs['Revision']
+  except KeyError:
+    return None
+
+  return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    return proc
+  except OSError:
+    return None
+
+
+def FetchGitRevision(directory):
+  """
+  Fetch the Git hash for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      return VersionInfo('git', output[:7])
+  return None
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion URL and revision through Git.
+
+  Errors are swallowed.
+
+  Returns:
+    A tuple containing the Subversion URL and revision.
+  """
+  proc = RunGitCommand(directory, ['log', '-1',
+                                   '--grep=git-svn-id', '--format=%b'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        revision = match.group(2)
+        url_match = svn_url_regex.search(match.group(1))
+        if url_match:
+          url = url_match.group(2)
+        else:
+          url = ''
+        return url, revision
+  return None, None
+
+
+def FetchGitSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Git-SVN identifier for the local tree.
+
+  Errors are swallowed.
+  """
+  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
+  if url and revision:
+    return VersionInfo(url, revision)
+  return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+                     directory_regex_prior_to_src_url='chrome|svn'):
+  """
+  Returns the last change (in the form of a branch, revision tuple),
+  from some appropriate revision control system.
+  """
+  svn_url_regex = re.compile(
+      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+  version_info = (FetchSVNRevision(directory, svn_url_regex) or
+                  FetchGitSVNRevision(directory, svn_url_regex) or
+                  FetchGitRevision(directory))
+  if not version_info:
+    if default_lastchange and os.path.exists(default_lastchange):
+      revision = open(default_lastchange, 'r').read().strip()
+      version_info = VersionInfo(None, revision)
+    else:
+      version_info = VersionInfo(None, None)
+  return version_info
+
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = optparse.OptionParser(usage="lastchange.py [options]")
+  parser.add_option("-d", "--default-lastchange", metavar="FILE",
+                    help="default last change input FILE")
+  parser.add_option("-o", "--output", metavar="FILE",
+                    help="write last change to FILE")
+  parser.add_option("--revision-only", action='store_true',
+                    help="just print the SVN revision number")
+  parser.add_option("-s", "--source-dir", metavar="DIR",
+                    help="use repository in the given directory")
+  opts, args = parser.parse_args(argv[1:])
+
+  out_file = opts.output
+
+  while len(args) and out_file is None:
+    if out_file is None:
+      out_file = args.pop(0)
+  if args:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+    parser.print_help()
+    sys.exit(2)
+
+  if opts.source_dir:
+    src_dir = opts.source_dir
+  else:
+    src_dir = os.path.dirname(os.path.abspath(__file__))
+
+  version_info = FetchVersionInfo(opts.default_lastchange, src_dir)
+
+  if version_info.revision == None:
+    version_info.revision = '0'
+
+  if opts.revision_only:
+    print version_info.revision
+  else:
+    contents = "LASTCHANGE=%s\n" % version_info.revision
+    if out_file:
+      WriteIfChanged(out_file, contents)
+    else:
+      sys.stdout.write(contents)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/build/whitespace_file.txt b/src/build/whitespace_file.txt
new file mode 100644
index 0000000..b40f8bf
--- /dev/null
+++ b/src/build/whitespace_file.txt
@@ -0,0 +1,75 @@
+Copyright (c) 2012 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+=========================================================================
+
+Let's make a story. Add one sentence for every commit:
+
+CHAPTER 1.0:
+It was a dark and stormy night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the darkness. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt", he snickered.
+
+The pause was filled with the sound of thunder.
+
+
+CHAPTER 2.0:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3.0:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed him so far.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred, just the week before.
+
+Next time, there won't be any sushi. Why sushis with waffles anyway?  It's like
+salmon in a cereal bowl.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from. He tries to recall the cook's face.  Purple?
+
+CHAPTER 5:
+Many years later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun.
+Another day in the life...
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+
+---------
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+
+This commit will change the world as we know it. For the better.
diff --git a/src/build/win/chrome_win.croc b/src/build/win/chrome_win.croc
new file mode 100644
index 0000000..e1e3bb7
--- /dev/null
+++ b/src/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, posix, or linux specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_win\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/src/build/win/install-build-deps.py b/src/build/win/install-build-deps.py
new file mode 100755
index 0000000..d9e50b6
--- /dev/null
+++ b/src/build/win/install-build-deps.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+import os
+
+def patch_msbuild():
+  """VS2010 MSBuild has a ULDI bug that we patch here. See http://goo.gl/Pn8tj.
+  """
+  source_path = os.path.join(os.environ['ProgramFiles(x86)'],
+                             "MSBuild",
+                             "Microsoft.Cpp",
+                             "v4.0",
+                             "Microsoft.CppBuild.targets")
+  backup_path = source_path + ".backup"
+  if not os.path.exists(backup_path):
+    try:
+      print "Backing up %s..." % source_path
+      shutil.copyfile(source_path, backup_path)
+    except IOError:
+      print "Could not back up %s to %s. Run as Administrator?" % (
+          source_path, backup_path)
+      return 1
+
+  source = open(source_path).read()
+  base = ('''<Target Name="GetResolvedLinkObjs" Returns="@(ObjFullPath)" '''
+          '''DependsOnTargets="$(CommonBuildOnlyTargets);ComputeCLOutputs;'''
+          '''ResolvedLinkObjs"''')
+  find = base + '>'
+  replace = base + ''' Condition="'$(ConfigurationType)'=='StaticLibrary'">'''
+  result = source.replace(find, replace)
+
+  if result != source:
+    open(source_path, "w").write(result)
+    print "Patched %s." % source_path
+  return 0
+
+
+def main():
+  return patch_msbuild()
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/win/setup_cygwin_mount.py b/src/build/win/setup_cygwin_mount.py
new file mode 100644
index 0000000..d68a3af
--- /dev/null
+++ b/src/build/win/setup_cygwin_mount.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def main():
+  if len(sys.argv) != 2 or sys.argv[1] != '--win-only':
+    return 1
+  if sys.platform in ('win32', 'cygwin'):
+    self_dir = os.path.dirname(sys.argv[0])
+    mount_path = os.path.join(self_dir, "../../third_party/cygwin")
+    batch_path = os.path.join(mount_path, "setup_mount.bat")
+    return os.system(os.path.normpath(batch_path) + ">nul")
+  return 0
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/src/build/win_precompile.gypi b/src/build/win_precompile.gypi
new file mode 100644
index 0000000..b992a0f
--- /dev/null
+++ b/src/build/win_precompile.gypi
@@ -0,0 +1,21 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+  'conditions': [
+    ['(OS=="win" or (OS=="lb_shell" and target_arch=="xb360")) and \
+      chromium_win_pch==1', {
+        'target_defaults': {
+          'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+          'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+          'sources': ['<(DEPTH)/build/precompile.cc'],
+        }
+      }],
+  ],
+}