Merge remote-tracking branch 'upstream/master'
diff --git a/.gitignore b/.gitignore
index 395a7ff..a242c60 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,7 @@
 /git_bin
 /git-*_bin
 /svn_bin
+/external_bin
 /win_toolchain/vs2013_files
 /win_toolchain/.timestamps
 /win_toolchain/.vspro
@@ -45,3 +46,6 @@
 /tests/subversion_config/servers
 /tests/svn/
 /tests/svnrepo/
+
+# Ignore virtualenv created during bootstrapping.
+/ENV
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..de0c6a7
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,2 @@
+[style]
+based_on_style = chromium
diff --git a/OWNERS b/OWNERS
index 0343114..44c55ef 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,13 +1,18 @@
 set noparent
 agable@chromium.org
-bradnelson@google.com
-cmp@chromium.org
+bradnelson@chromium.org
 dpranke@chromium.org
+hinoka@chromium.org
 iannucci@chromium.org
 jochen@chromium.org
 maruel@chromium.org
-maruel@google.com
+nodir@chromium.org
 petermayo@chromium.org
+pgervais@chromium.org
 rogerta@chromium.org
 stip@chromium.org
-szager@chromium.org
+
+per-file commit_queue*=akuegel@chromium.org
+per-file commit_queue*=phajdan.jr@chromium.org
+per-file commit_queue*=sergiyb@chromium.org
+per-file commit_queue*=tandrii@chromium.org
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 767eac6..41c56d3 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -18,10 +18,12 @@
   black_list = list(input_api.DEFAULT_BLACK_LIST) + [
       r'^cpplint\.py$',
       r'^cpplint_chromium\.py$',
+      r'^external_bin[\/\\].+',
       r'^python[0-9]*_bin[\/\\].+',
       r'^site-packages-py[0-9]\.[0-9][\/\\].+',
       r'^svn_bin[\/\\].+',
-      r'^testing_support[\/\\]_rietveld[\/\\].+']
+      r'^testing_support[\/\\]_rietveld[\/\\].+',
+      r'^bootstrap[\/\\].+']
   if os.path.exists('.gitignore'):
     with open('.gitignore') as fh:
       lines = [l.strip() for l in fh.readlines()]
diff --git a/apply_issue.py b/apply_issue.py
index 8bcb3b2..ba38987 100755
--- a/apply_issue.py
+++ b/apply_issue.py
@@ -18,6 +18,7 @@
 import breakpad  # pylint: disable=W0611
 
 import annotated_gclient
+import auth
 import checkout
 import fix_encoding
 import gclient_utils
@@ -56,13 +57,10 @@
       help='File containing the email address to access rietveld. '
            'If not specified, anonymous access will be used.')
   parser.add_option(
-      '-w', '--password',
-      help='Password for email addressed. Use - to read password from stdin. '
-           'if -k is provided, this is the private key file password.')
-  parser.add_option(
       '-k', '--private-key-file',
       help='Path to file containing a private key in p12 format for OAuth2 '
-           'authentication. Use -w to provide the decrypting password, if any.')
+           'authentication with "notasecret" password (as generated by Google '
+           'Cloud Console).')
   parser.add_option(
       '-i', '--issue', type='int', help='Rietveld issue number')
   parser.add_option(
@@ -92,13 +90,14 @@
                     help='Don\'t patch specified file(s).')
   parser.add_option('-d', '--ignore_deps', action='store_true',
                     help='Don\'t run gclient sync on DEPS changes.')
+
+  auth.add_auth_options(parser)
   options, args = parser.parse_args()
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if options.whitelist and options.blacklist:
     parser.error('Cannot specify both --whitelist and --blacklist')
 
-  if options.password and options.private_key_file:
-    parser.error('-k and -w options are incompatible')
   if options.email and options.email_file:
     parser.error('-e and -E options are incompatible')
 
@@ -121,10 +120,6 @@
 
   options.revision_mapping = json.loads(options.revision_mapping)
 
-  if options.password == '-':
-    print('Reading password')
-    options.password = sys.stdin.readline().strip()
-
   # read email if needed
   if options.email_file:
     if not os.path.exists(options.email_file):
@@ -138,11 +133,11 @@
     # OAuth2 authentication
     obj = rietveld.JwtOAuth2Rietveld(options.server,
                                      options.email,
-                                     options.private_key_file,
-                                     private_key_password=options.password)
+                                     options.private_key_file)
     properties = obj.get_issue_properties(options.issue, False)
   else:
-    obj = rietveld.Rietveld(options.server, '', None)
+    # Passing None as auth_config disables authentication.
+    obj = rietveld.Rietveld(options.server, None)
     properties = None
     # Bad except clauses order (HTTPError is an ancestor class of
     # ClientLoginError)
@@ -156,77 +151,101 @@
         exit('FAIL: Login detected -- is issue private?')
       # TODO(maruel): A few 'Invalid username or password.' are printed first,
       # we should get rid of those.
-    except rietveld.upload.ClientLoginError, e:
+    except rietveld.upload.ClientLoginError as e:
       # Fine, we'll do proper authentication.
       pass
     if properties is None:
-      if options.email is not None:
-        obj = rietveld.Rietveld(options.server, options.email, options.password)
-        try:
-          properties = obj.get_issue_properties(options.issue, False)
-        except rietveld.upload.ClientLoginError, e:
-          if sys.stdout.closed:
-            print('Accessing the issue requires proper credentials.')
-            return 1
-      else:
-        print('Accessing the issue requires login.')
-        obj = rietveld.Rietveld(options.server, None, None)
-        try:
-          properties = obj.get_issue_properties(options.issue, False)
-        except rietveld.upload.ClientLoginError, e:
-          print('Accessing the issue requires proper credentials.')
-          return 1
+      obj = rietveld.Rietveld(options.server, auth_config, options.email)
+      try:
+        properties = obj.get_issue_properties(options.issue, False)
+      except rietveld.upload.ClientLoginError as e:
+        print('Accessing the issue requires proper credentials.')
+        return 1
 
   if not options.patchset:
     options.patchset = properties['patchsets'][-1]
     print('No patchset specified. Using patchset %d' % options.patchset)
 
-  print('Downloading the patch.')
-  try:
-    patchset = obj.get_patch(options.issue, options.patchset)
-  except urllib2.HTTPError, e:
-    print(
-        'Failed to fetch the patch for issue %d, patchset %d.\n'
-        'Try visiting %s/%d') % (
-            options.issue, options.patchset,
-            options.server, options.issue)
-    return 1
-  if options.whitelist:
-    patchset.patches = [patch for patch in patchset.patches
-                        if patch.filename in options.whitelist]
-  if options.blacklist:
-    patchset.patches = [patch for patch in patchset.patches
-                        if patch.filename not in options.blacklist]
-  for patch in patchset.patches:
-    print(patch)
-  full_dir = os.path.abspath(options.root_dir)
-  scm_type = scm.determine_scm(full_dir)
-  if scm_type == 'svn':
-    scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None)
-  elif scm_type == 'git':
-    scm_obj = checkout.GitCheckout(full_dir, None, None, None, None)
-  elif scm_type == None:
-    scm_obj = checkout.RawCheckout(full_dir, None, None)
-  else:
-    parser.error('Couldn\'t determine the scm')
+  issues_patchsets_to_apply = [(options.issue, options.patchset)]
+  depends_on_info = obj.get_depends_on_patchset(options.issue, options.patchset)
+  while depends_on_info:
+    depends_on_issue = int(depends_on_info['issue'])
+    depends_on_patchset = int(depends_on_info['patchset'])
+    try:
+      depends_on_info = obj.get_depends_on_patchset(depends_on_issue,
+                                                    depends_on_patchset)
+      issues_patchsets_to_apply.insert(0, (depends_on_issue,
+                                           depends_on_patchset))
+    except urllib2.HTTPError:
+      print ('The patchset that was marked as a dependency no longer '
+             'exists: %s/%d/#ps%d' % (
+                 options.server, depends_on_issue, depends_on_patchset))
+      print 'Therefore it is likely that this patch will not apply cleanly.'
+      print
+      depends_on_info = None
 
-  # TODO(maruel): HACK, remove me.
-  # When run a build slave, make sure buildbot knows that the checkout was
-  # modified.
-  if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot':
-    # See sourcedirIsPatched() in:
-    # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/
-    #    chromium_commands.py?view=markup
-    open('.buildbot-patched', 'w').close()
+  num_issues_patchsets_to_apply = len(issues_patchsets_to_apply)
+  if num_issues_patchsets_to_apply > 1:
+    print
+    print 'apply_issue.py found %d dependent CLs.' % (
+        num_issues_patchsets_to_apply - 1)
+    print 'They will be applied in the following order:'
+    num = 1
+    for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
+      print '  #%d %s/%d/#ps%d' % (
+          num, options.server, issue_to_apply, patchset_to_apply)
+      num += 1
+    print
 
-  print('\nApplying the patch.')
-  try:
-    scm_obj.apply_patch(patchset, verbose=True)
-  except checkout.PatchApplicationFailed, e:
-    print(str(e))
-    print('CWD=%s' % os.getcwd())
-    print('Checkout path=%s' % scm_obj.project_path)
-    return 1
+  for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
+    issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply,
+                                 patchset_to_apply)
+    print('Downloading patch from %s' % issue_url)
+    try:
+      patchset = obj.get_patch(issue_to_apply, patchset_to_apply)
+    except urllib2.HTTPError as e:
+      print(
+          'Failed to fetch the patch for issue %d, patchset %d.\n'
+          'Try visiting %s/%d') % (
+              issue_to_apply, patchset_to_apply,
+              options.server, issue_to_apply)
+      return 1
+    if options.whitelist:
+      patchset.patches = [patch for patch in patchset.patches
+                          if patch.filename in options.whitelist]
+    if options.blacklist:
+      patchset.patches = [patch for patch in patchset.patches
+                          if patch.filename not in options.blacklist]
+    for patch in patchset.patches:
+      print(patch)
+    full_dir = os.path.abspath(options.root_dir)
+    scm_type = scm.determine_scm(full_dir)
+    if scm_type == 'svn':
+      scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None)
+    elif scm_type == 'git':
+      scm_obj = checkout.GitCheckout(full_dir, None, None, None, None)
+    elif scm_type == None:
+      scm_obj = checkout.RawCheckout(full_dir, None, None)
+    else:
+      parser.error('Couldn\'t determine the scm')
+
+    # TODO(maruel): HACK, remove me.
+    # When run a build slave, make sure buildbot knows that the checkout was
+    # modified.
+    if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot':
+      # See sourcedirIsPatched() in:
+      # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/
+      #    chromium_commands.py?view=markup
+      open('.buildbot-patched', 'w').close()
+
+    print('\nApplying the patch from %s' % issue_url)
+    try:
+      scm_obj.apply_patch(patchset, verbose=True)
+    except checkout.PatchApplicationFailed as e:
+      print(str(e))
+      print('CWD=%s' % os.getcwd())
+      print('Checkout path=%s' % scm_obj.project_path)
+      return 1
 
   if ('DEPS' in map(os.path.basename, patchset.filenames)
       and not options.ignore_deps):
@@ -262,4 +281,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/auth.py b/auth.py
new file mode 100644
index 0000000..6e0d2f3
--- /dev/null
+++ b/auth.py
@@ -0,0 +1,685 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Google OAuth2 related functions."""
+
+import BaseHTTPServer
+import collections
+import datetime
+import functools
+import hashlib
+import json
+import logging
+import optparse
+import os
+import socket
+import sys
+import threading
+import urllib
+import urlparse
+import webbrowser
+
+from third_party import httplib2
+from third_party.oauth2client import client
+from third_party.oauth2client import multistore_file
+
+
+# depot_tools/.
+DEPOT_TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+# Google OAuth2 clients always have a secret, even if the client is an installed
+# application/utility such as this. Of course, in such cases the "secret" is
+# actually publicly known; security depends entirely on the secrecy of refresh
+# tokens, which effectively become bearer tokens. An attacker can impersonate
+# service's identity in OAuth2 flow. But that's generally fine as long as a list
+# of allowed redirect_uri's associated with client_id is limited to 'localhost'
+# or 'urn:ietf:wg:oauth:2.0:oob'. In that case attacker needs some process
+# running on user's machine to successfully complete the flow and grab refresh
+# token. When you have a malicious code running on your machine, you're screwed
+# anyway.
+# This particular set is managed by API Console project "chrome-infra-auth".
+OAUTH_CLIENT_ID = (
+    '446450136466-2hr92jrq8e6i4tnsa56b52vacp7t3936.apps.googleusercontent.com')
+OAUTH_CLIENT_SECRET = 'uBfbay2KCy9t4QveJ-dOqHtp'
+
+# List of space separated OAuth scopes for generated tokens. GAE apps usually
+# use userinfo.email scope for authentication.
+OAUTH_SCOPES = 'https://www.googleapis.com/auth/userinfo.email'
+
+# Additional OAuth scopes.
+ADDITIONAL_SCOPES = {
+  'code.google.com': 'https://www.googleapis.com/auth/projecthosting',
+}
+
+# Path to a file with cached OAuth2 credentials used by default relative to the
+# home dir (see _get_token_cache_path). It should be a safe location accessible
+# only to a current user: knowing content of this file is roughly equivalent to
+# knowing account password. Single file can hold multiple independent tokens
+# identified by token_cache_key (see Authenticator).
+OAUTH_TOKENS_CACHE = '.depot_tools_oauth2_tokens'
+
+
+# Authentication configuration extracted from command line options.
+# See doc string for 'make_auth_config' for meaning of fields.
+AuthConfig = collections.namedtuple('AuthConfig', [
+    'use_oauth2', # deprecated, will be always True
+    'save_cookies', # deprecated, will be removed
+    'use_local_webserver',
+    'webserver_port',
+    'refresh_token_json',
+])
+
+
+# OAuth access token with its expiration time (UTC datetime or None if unknown).
+AccessToken = collections.namedtuple('AccessToken', [
+    'token',
+    'expires_at',
+])
+
+
+# Refresh token passed via --auth-refresh-token-json.
+RefreshToken = collections.namedtuple('RefreshToken', [
+    'client_id',
+    'client_secret',
+    'refresh_token',
+])
+
+
+class AuthenticationError(Exception):
+  """Raised on errors related to authentication."""
+
+
+class LoginRequiredError(AuthenticationError):
+  """Interaction with the user is required to authenticate."""
+
+  def __init__(self, token_cache_key):
+    # HACK(vadimsh): It is assumed here that the token cache key is a hostname.
+    msg = (
+        'You are not logged in. Please login first by running:\n'
+        '  depot-tools-auth login %s' % token_cache_key)
+    super(LoginRequiredError, self).__init__(msg)
+
+
+def make_auth_config(
+    use_oauth2=None,
+    save_cookies=None,
+    use_local_webserver=None,
+    webserver_port=None,
+    refresh_token_json=None):
+  """Returns new instance of AuthConfig.
+
+  If some config option is None, it will be set to a reasonable default value.
+  This function also acts as an authoritative place for default values of
+  corresponding command line options.
+  """
+  default = lambda val, d: val if val is not None else d
+  return AuthConfig(
+      default(use_oauth2, True),
+      default(save_cookies, True),
+      default(use_local_webserver, not _is_headless()),
+      default(webserver_port, 8090),
+      default(refresh_token_json, ''))
+
+
+def add_auth_options(parser, default_config=None):
+  """Appends OAuth related options to OptionParser."""
+  default_config = default_config or make_auth_config()
+  parser.auth_group = optparse.OptionGroup(parser, 'Auth options')
+  parser.add_option_group(parser.auth_group)
+
+  # OAuth2 vs password switch.
+  auth_default = 'use OAuth2' if default_config.use_oauth2 else 'use password'
+  parser.auth_group.add_option(
+      '--oauth2',
+      action='store_true',
+      dest='use_oauth2',
+      default=default_config.use_oauth2,
+      help='Use OAuth 2.0 instead of a password. [default: %s]' % auth_default)
+  parser.auth_group.add_option(
+      '--no-oauth2',
+      action='store_false',
+      dest='use_oauth2',
+      default=default_config.use_oauth2,
+      help='Use password instead of OAuth 2.0. [default: %s]' % auth_default)
+
+  # Password related options, deprecated.
+  parser.auth_group.add_option(
+      '--no-cookies',
+      action='store_false',
+      dest='save_cookies',
+      default=default_config.save_cookies,
+      help='Do not save authentication cookies to local disk.')
+
+  # OAuth2 related options.
+  parser.auth_group.add_option(
+      '--auth-no-local-webserver',
+      action='store_false',
+      dest='use_local_webserver',
+      default=default_config.use_local_webserver,
+      help='Do not run a local web server when performing OAuth2 login flow.')
+  parser.auth_group.add_option(
+      '--auth-host-port',
+      type=int,
+      default=default_config.webserver_port,
+      help='Port a local web server should listen on. Used only if '
+          '--auth-no-local-webserver is not set. [default: %default]')
+  parser.auth_group.add_option(
+      '--auth-refresh-token-json',
+      default=default_config.refresh_token_json,
+      help='Path to a JSON file with role account refresh token to use.')
+
+
+def extract_auth_config_from_options(options):
+  """Given OptionParser parsed options, extracts AuthConfig from it.
+
+  OptionParser should be populated with auth options by 'add_auth_options'.
+  """
+  return make_auth_config(
+      use_oauth2=options.use_oauth2,
+      save_cookies=False if options.use_oauth2 else options.save_cookies,
+      use_local_webserver=options.use_local_webserver,
+      webserver_port=options.auth_host_port,
+      refresh_token_json=options.auth_refresh_token_json)
+
+
+def auth_config_to_command_options(auth_config):
+  """AuthConfig -> list of strings with command line options.
+
+  Omits options that are set to default values.
+  """
+  if not auth_config:
+    return []
+  defaults = make_auth_config()
+  opts = []
+  if auth_config.use_oauth2 != defaults.use_oauth2:
+    opts.append('--oauth2' if auth_config.use_oauth2 else '--no-oauth2')
+  if auth_config.save_cookies != auth_config.save_cookies:
+    if not auth_config.save_cookies:
+      opts.append('--no-cookies')
+  if auth_config.use_local_webserver != defaults.use_local_webserver:
+    if not auth_config.use_local_webserver:
+      opts.append('--auth-no-local-webserver')
+  if auth_config.webserver_port != defaults.webserver_port:
+    opts.extend(['--auth-host-port', str(auth_config.webserver_port)])
+  if auth_config.refresh_token_json != defaults.refresh_token_json:
+    opts.extend([
+        '--auth-refresh-token-json', str(auth_config.refresh_token_json)])
+  return opts
+
+
+def get_authenticator_for_host(hostname, config):
+  """Returns Authenticator instance to access given host.
+
+  Args:
+    hostname: a naked hostname or http(s)://<hostname>[/] URL. Used to derive
+        a cache key for token cache.
+    config: AuthConfig instance.
+
+  Returns:
+    Authenticator object.
+  """
+  hostname = hostname.lower().rstrip('/')
+  # Append some scheme, otherwise urlparse puts hostname into parsed.path.
+  if '://' not in hostname:
+    hostname = 'https://' + hostname
+  scopes = OAUTH_SCOPES
+  parsed = urlparse.urlparse(hostname)
+  if parsed.netloc in ADDITIONAL_SCOPES:
+    scopes = "%s %s" % (scopes, ADDITIONAL_SCOPES[parsed.netloc])
+
+  if parsed.path or parsed.params or parsed.query or parsed.fragment:
+    raise AuthenticationError(
+        'Expecting a hostname or root host URL, got %s instead' % hostname)
+  return Authenticator(parsed.netloc, config, scopes)
+
+
+class Authenticator(object):
+  """Object that knows how to refresh access tokens when needed.
+
+  Args:
+    token_cache_key: string key of a section of the token cache file to use
+        to keep the tokens. See hostname_to_token_cache_key.
+    config: AuthConfig object that holds authentication configuration.
+  """
+
+  def __init__(self, token_cache_key, config, scopes):
+    assert isinstance(config, AuthConfig)
+    assert config.use_oauth2
+    self._access_token = None
+    self._config = config
+    self._lock = threading.Lock()
+    self._token_cache_key = token_cache_key
+    self._external_token = None
+    self._scopes = scopes
+    if config.refresh_token_json:
+      self._external_token = _read_refresh_token_json(config.refresh_token_json)
+    logging.debug('Using auth config %r', config)
+
+  def login(self):
+    """Performs interactive login flow if necessary.
+
+    Raises:
+      AuthenticationError on error or if interrupted.
+    """
+    if self._external_token:
+      raise AuthenticationError(
+          'Can\'t run login flow when using --auth-refresh-token-json.')
+    return self.get_access_token(
+        force_refresh=True, allow_user_interaction=True)
+
+  def logout(self):
+    """Revokes the refresh token and deletes it from the cache.
+
+    Returns True if had some credentials cached.
+    """
+    with self._lock:
+      self._access_token = None
+      storage = self._get_storage()
+      credentials = storage.get()
+      had_creds = bool(credentials)
+      if credentials and credentials.refresh_token and credentials.revoke_uri:
+        try:
+          credentials.revoke(httplib2.Http())
+        except client.TokenRevokeError as e:
+          logging.warning('Failed to revoke refresh token: %s', e)
+      storage.delete()
+    return had_creds
+
+  def has_cached_credentials(self):
+    """Returns True if long term credentials (refresh token) are in cache.
+
+    Doesn't make network calls.
+
+    If returns False, get_access_token() later will ask for interactive login by
+    raising LoginRequiredError.
+
+    If returns True, most probably get_access_token() won't ask for interactive
+    login, though it is not guaranteed, since cached token can be already
+    revoked and there's no way to figure this out without actually trying to use
+    it.
+    """
+    with self._lock:
+      return bool(self._get_cached_credentials())
+
+  def get_access_token(self, force_refresh=False, allow_user_interaction=False):
+    """Returns AccessToken, refreshing it if necessary.
+
+    Args:
+      force_refresh: forcefully refresh access token even if it is not expired.
+      allow_user_interaction: True to enable blocking for user input if needed.
+
+    Raises:
+      AuthenticationError on error or if authentication flow was interrupted.
+      LoginRequiredError if user interaction is required, but
+          allow_user_interaction is False.
+    """
+    with self._lock:
+      if force_refresh:
+        logging.debug('Forcing access token refresh')
+        self._access_token = self._create_access_token(allow_user_interaction)
+        return self._access_token
+
+      # Load from on-disk cache on a first access.
+      if not self._access_token:
+        self._access_token = self._load_access_token()
+
+      # Refresh if expired or missing.
+      if not self._access_token or _needs_refresh(self._access_token):
+        # Maybe some other process already updated it, reload from the cache.
+        self._access_token = self._load_access_token()
+        # Nope, still expired, need to run the refresh flow.
+        if not self._access_token or _needs_refresh(self._access_token):
+          self._access_token = self._create_access_token(allow_user_interaction)
+
+      return self._access_token
+
+  def get_token_info(self):
+    """Returns a result of /oauth2/v2/tokeninfo call with token info."""
+    access_token = self.get_access_token()
+    resp, content = httplib2.Http().request(
+        uri='https://www.googleapis.com/oauth2/v2/tokeninfo?%s' % (
+            urllib.urlencode({'access_token': access_token.token})))
+    if resp.status == 200:
+      return json.loads(content)
+    raise AuthenticationError('Failed to fetch the token info: %r' % content)
+
+  def authorize(self, http):
+    """Monkey patches authentication logic of httplib2.Http instance.
+
+    The modified http.request method will add authentication headers to each
+    request and will refresh access_tokens when a 401 is received on a
+    request.
+
+    Args:
+       http: An instance of httplib2.Http.
+
+    Returns:
+       A modified instance of http that was passed in.
+    """
+    # Adapted from oauth2client.OAuth2Credentials.authorize.
+
+    request_orig = http.request
+
+    @functools.wraps(request_orig)
+    def new_request(
+        uri, method='GET', body=None, headers=None,
+        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+        connection_type=None):
+      headers = (headers or {}).copy()
+      headers['Authorization'] = 'Bearer %s' % self.get_access_token().token
+      resp, content = request_orig(
+          uri, method, body, headers, redirections, connection_type)
+      if resp.status in client.REFRESH_STATUS_CODES:
+        logging.info('Refreshing due to a %s', resp.status)
+        access_token = self.get_access_token(force_refresh=True)
+        headers['Authorization'] = 'Bearer %s' % access_token.token
+        return request_orig(
+            uri, method, body, headers, redirections, connection_type)
+      else:
+        return (resp, content)
+
+    http.request = new_request
+    return http
+
+  ## Private methods.
+
+  def _get_storage(self):
+    """Returns oauth2client.Storage with cached tokens."""
+    # Do not mix cache keys for different externally provided tokens.
+    if self._external_token:
+      token_hash = hashlib.sha1(self._external_token.refresh_token).hexdigest()
+      cache_key = '%s:refresh_tok:%s' % (self._token_cache_key, token_hash)
+    else:
+      cache_key = self._token_cache_key
+    path = _get_token_cache_path()
+    logging.debug('Using token storage %r (cache key %r)', path, cache_key)
+    return multistore_file.get_credential_storage_custom_string_key(
+        path, cache_key)
+
+  def _get_cached_credentials(self):
+    """Returns oauth2client.Credentials loaded from storage."""
+    storage = self._get_storage()
+    credentials = storage.get()
+
+    if not credentials:
+      logging.debug('No cached token')
+    else:
+      _log_credentials_info('cached token', credentials)
+
+    # Is using --auth-refresh-token-json?
+    if self._external_token:
+      # Cached credentials are valid and match external token -> use them. It is
+      # important to reuse credentials from the storage because they contain
+      # cached access token.
+      valid = (
+          credentials and not credentials.invalid and
+          credentials.refresh_token == self._external_token.refresh_token and
+          credentials.client_id == self._external_token.client_id and
+          credentials.client_secret == self._external_token.client_secret)
+      if valid:
+        logging.debug('Cached credentials match external refresh token')
+        return credentials
+      # Construct new credentials from externally provided refresh token,
+      # associate them with cache storage (so that access_token will be placed
+      # in the cache later too).
+      logging.debug('Putting external refresh token into the cache')
+      credentials = client.OAuth2Credentials(
+          access_token=None,
+          client_id=self._external_token.client_id,
+          client_secret=self._external_token.client_secret,
+          refresh_token=self._external_token.refresh_token,
+          token_expiry=None,
+          token_uri='https://accounts.google.com/o/oauth2/token',
+          user_agent=None,
+          revoke_uri=None)
+      credentials.set_store(storage)
+      storage.put(credentials)
+      return credentials
+
+    # Not using external refresh token -> return whatever is cached.
+    return credentials if (credentials and not credentials.invalid) else None
+
+  def _load_access_token(self):
+    """Returns cached AccessToken if it is not expired yet."""
+    logging.debug('Reloading access token from cache')
+    creds = self._get_cached_credentials()
+    if not creds or not creds.access_token or creds.access_token_expired:
+      logging.debug('Access token is missing or expired')
+      return None
+    return AccessToken(str(creds.access_token), creds.token_expiry)
+
+  def _create_access_token(self, allow_user_interaction=False):
+    """Mints and caches a new access token, launching OAuth2 dance if necessary.
+
+    Uses cached refresh token, if present. In that case user interaction is not
+    required and function will finish quietly. Otherwise it will launch 3-legged
+    OAuth2 flow, that needs user interaction.
+
+    Args:
+      allow_user_interaction: if True, allow interaction with the user (e.g.
+          reading standard input, or launching a browser).
+
+    Returns:
+      AccessToken.
+
+    Raises:
+      AuthenticationError on error or if authentication flow was interrupted.
+      LoginRequiredError if user interaction is required, but
+          allow_user_interaction is False.
+    """
+    logging.debug(
+        'Making new access token (allow_user_interaction=%r)',
+        allow_user_interaction)
+    credentials = self._get_cached_credentials()
+
+    # 3-legged flow with (perhaps cached) refresh token.
+    refreshed = False
+    if credentials and not credentials.invalid:
+      try:
+        logging.debug('Attempting to refresh access_token')
+        credentials.refresh(httplib2.Http())
+        _log_credentials_info('refreshed token', credentials)
+        refreshed = True
+      except client.Error as err:
+        logging.warning(
+            'OAuth error during access token refresh (%s). '
+            'Attempting a full authentication flow.', err)
+
+    # Refresh token is missing or invalid, go through the full flow.
+    if not refreshed:
+      # Can't refresh externally provided token.
+      if self._external_token:
+        raise AuthenticationError(
+            'Token provided via --auth-refresh-token-json is no longer valid.')
+      if not allow_user_interaction:
+        logging.debug('Requesting user to login')
+        raise LoginRequiredError(self._token_cache_key)
+      logging.debug('Launching OAuth browser flow')
+      credentials = _run_oauth_dance(self._config, self._scopes)
+      _log_credentials_info('new token', credentials)
+
+    logging.info(
+        'OAuth access_token refreshed. Expires in %s.',
+        credentials.token_expiry - datetime.datetime.utcnow())
+    storage = self._get_storage()
+    credentials.set_store(storage)
+    storage.put(credentials)
+    return AccessToken(str(credentials.access_token), credentials.token_expiry)
+
+
+## Private functions.
+
+
+def _get_token_cache_path():
+  # On non Win just use HOME.
+  if sys.platform != 'win32':
+    return os.path.join(os.path.expanduser('~'), OAUTH_TOKENS_CACHE)
+  # Prefer USERPROFILE over HOME, since HOME is overridden in
+  # git-..._bin/cmd/git.cmd to point to depot_tools. depot-tools-auth.py script
+  # (and all other scripts) doesn't use this override and thus uses another
+  # value for HOME. git.cmd doesn't touch USERPROFILE though, and usually
+  # USERPROFILE == HOME on Windows.
+  if 'USERPROFILE' in os.environ:
+    return os.path.join(os.environ['USERPROFILE'], OAUTH_TOKENS_CACHE)
+  return os.path.join(os.path.expanduser('~'), OAUTH_TOKENS_CACHE)
+
+
+def _is_headless():
+  """True if machine doesn't seem to have a display."""
+  return sys.platform == 'linux2' and not os.environ.get('DISPLAY')
+
+
+def _read_refresh_token_json(path):
+  """Returns RefreshToken by reading it from the JSON file."""
+  try:
+    with open(path, 'r') as f:
+      data = json.load(f)
+      return RefreshToken(
+          client_id=str(data.get('client_id', OAUTH_CLIENT_ID)),
+          client_secret=str(data.get('client_secret', OAUTH_CLIENT_SECRET)),
+          refresh_token=str(data['refresh_token']))
+  except (IOError, ValueError) as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: %s' % (path, e))
+  except KeyError as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: missing key %s' % (path, e))
+
+
+def _needs_refresh(access_token):
+  """True if AccessToken should be refreshed."""
+  if access_token.expires_at is not None:
+    # Allow 5 min of clock skew between client and backend.
+    now = datetime.datetime.utcnow() + datetime.timedelta(seconds=300)
+    return now >= access_token.expires_at
+  # Token without expiration time never expires.
+  return False
+
+
+def _log_credentials_info(title, credentials):
+  """Dumps (non sensitive) part of client.Credentials object to debug log."""
+  if credentials:
+    logging.debug('%s info: %r', title, {
+        'access_token_expired': credentials.access_token_expired,
+        'has_access_token': bool(credentials.access_token),
+        'invalid': credentials.invalid,
+        'utcnow': datetime.datetime.utcnow(),
+        'token_expiry': credentials.token_expiry,
+    })
+
+
+def _run_oauth_dance(config, scopes):
+  """Perform full 3-legged OAuth2 flow with the browser.
+
+  Returns:
+    oauth2client.Credentials.
+
+  Raises:
+    AuthenticationError on errors.
+  """
+  flow = client.OAuth2WebServerFlow(
+      OAUTH_CLIENT_ID,
+      OAUTH_CLIENT_SECRET,
+      scopes,
+      approval_prompt='force')
+
+  use_local_webserver = config.use_local_webserver
+  port = config.webserver_port
+  if config.use_local_webserver:
+    success = False
+    try:
+      httpd = _ClientRedirectServer(('localhost', port), _ClientRedirectHandler)
+    except socket.error:
+      pass
+    else:
+      success = True
+    use_local_webserver = success
+    if not success:
+      print(
+        'Failed to start a local webserver listening on port %d.\n'
+        'Please check your firewall settings and locally running programs that '
+        'may be blocking or using those ports.\n\n'
+        'Falling back to --auth-no-local-webserver and continuing with '
+        'authentication.\n' % port)
+
+  if use_local_webserver:
+    oauth_callback = 'http://localhost:%s/' % port
+  else:
+    oauth_callback = client.OOB_CALLBACK_URN
+  flow.redirect_uri = oauth_callback
+  authorize_url = flow.step1_get_authorize_url()
+
+  if use_local_webserver:
+    webbrowser.open(authorize_url, new=1, autoraise=True)
+    print(
+      'Your browser has been opened to visit:\n\n'
+      '    %s\n\n'
+      'If your browser is on a different machine then exit and re-run this '
+      'application with the command-line parameter\n\n'
+      '  --auth-no-local-webserver\n' % authorize_url)
+  else:
+    print(
+      'Go to the following link in your browser:\n\n'
+      '    %s\n' % authorize_url)
+
+  try:
+    code = None
+    if use_local_webserver:
+      httpd.handle_request()
+      if 'error' in httpd.query_params:
+        raise AuthenticationError(
+            'Authentication request was rejected: %s' %
+            httpd.query_params['error'])
+      if 'code' not in httpd.query_params:
+        raise AuthenticationError(
+            'Failed to find "code" in the query parameters of the redirect.\n'
+            'Try running with --auth-no-local-webserver.')
+      code = httpd.query_params['code']
+    else:
+      code = raw_input('Enter verification code: ').strip()
+  except KeyboardInterrupt:
+    raise AuthenticationError('Authentication was canceled.')
+
+  try:
+    return flow.step2_exchange(code)
+  except client.FlowExchangeError as e:
+    raise AuthenticationError('Authentication has failed: %s' % e)
+
+
+class _ClientRedirectServer(BaseHTTPServer.HTTPServer):
+  """A server to handle OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into query_params and then stops serving.
+  """
+  query_params = {}
+
+
+class _ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler for OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into the servers query_params and then stops serving.
+  """
+
+  def do_GET(self):
+    """Handle a GET request.
+
+    Parses the query parameters and prints a message
+    if the flow has completed. Note that we can't detect
+    if an error occurred.
+    """
+    self.send_response(200)
+    self.send_header('Content-type', 'text/html')
+    self.end_headers()
+    query = self.path.split('?', 1)[-1]
+    query = dict(urlparse.parse_qsl(query))
+    self.server.query_params = query
+    self.wfile.write('<html><head><title>Authentication Status</title></head>')
+    self.wfile.write('<body><p>The authentication flow has completed.</p>')
+    self.wfile.write('</body></html>')
+
+  def log_message(self, _format, *args):
+    """Do not log messages to stdout while running as command line program."""
diff --git a/bootstrap/.gitignore b/bootstrap/.gitignore
new file mode 100644
index 0000000..7603e80
--- /dev/null
+++ b/bootstrap/.gitignore
@@ -0,0 +1,2 @@
+BUILD_ENV
+wheelhouse
diff --git a/bootstrap/bootstrap.py b/bootstrap/bootstrap.py
new file mode 100755
index 0000000..eb596af
--- /dev/null
+++ b/bootstrap/bootstrap.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import contextlib
+import glob
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from util import STORAGE_URL, OBJECT_URL, LOCAL_STORAGE_PATH, LOCAL_OBJECT_URL
+from util import read_deps, merge_deps, print_deps, platform_tag
+
+LOGGER = logging.getLogger(__name__)
+
+# /path/to/infra
+ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+PYTHON_BAT_WIN = '@%~dp0\\..\\Scripts\\python.exe %*'
+
+
+class NoWheelException(Exception):
+  def __init__(self, name, version, build, source_sha):
+    super(NoWheelException, self).__init__(
+        'No matching wheel found for (%s==%s (build %s_%s))' %
+        (name, version, build, source_sha))
+
+
+def check_pydistutils():
+  if os.path.exists(os.path.expanduser('~/.pydistutils.cfg')):
+    print >> sys.stderr, '\n'.join([
+      '',
+      '',
+      '=========== ERROR ===========',
+      'You have a ~/.pydistutils.cfg file, which interferes with the ',
+      'infra virtualenv environment. Please move it to the side and bootstrap ',
+      'again. Once infra has bootstrapped, you may move it back.',
+      '',
+      'Upstream bug: https://github.com/pypa/virtualenv/issues/88/',
+      ''
+    ])
+    sys.exit(1)
+
+
+def ls(prefix):
+  from pip._vendor import requests  # pylint: disable=E0611
+  data = requests.get(STORAGE_URL, params=dict(
+      prefix=prefix,
+      fields='items(name,md5Hash)'
+  )).json()
+  entries = data.get('items', [])
+  for entry in entries:
+    entry['md5Hash'] = entry['md5Hash'].decode('base64').encode('hex')
+    entry['local'] = False
+  # Also look in the local cache
+  entries.extend([
+    {'name': fname, 'md5Hash': None, 'local': True}
+    for fname in glob.glob(os.path.join(LOCAL_STORAGE_PATH,
+                                        prefix.split('/')[-1] + '*'))])
+  return entries
+
+
+def sha_for(deps_entry):
+  if 'rev' in deps_entry:
+    return deps_entry['rev']
+  else:
+    return deps_entry['gs'].split('.')[0]
+
+
+def get_links(deps):
+  import pip.wheel  # pylint: disable=E0611
+  plat_tag = platform_tag()
+
+  links = []
+
+  for name, dep in deps.iteritems():
+    version, source_sha = dep['version'] , sha_for(dep)
+    prefix = 'wheels/{}-{}-{}_{}'.format(name, version, dep['build'],
+                                         source_sha)
+    generic_link = None
+    binary_link = None
+    local_link = None
+
+    for entry in ls(prefix):
+      fname = entry['name'].split('/')[-1]
+      md5hash = entry['md5Hash']
+      wheel_info = pip.wheel.Wheel.wheel_file_re.match(fname)
+      if not wheel_info:
+        LOGGER.warn('Skipping invalid wheel: %r', fname)
+        continue
+
+      if pip.wheel.Wheel(fname).supported():
+        if entry['local']:
+          link = LOCAL_OBJECT_URL.format(entry['name'])
+          local_link = link
+          continue
+        else:
+          link = OBJECT_URL.format(entry['name'], md5hash)
+        if fname.endswith('none-any.whl'):
+          if generic_link:
+            LOGGER.error(
+              'Found more than one generic matching wheel for %r: %r',
+              prefix, dep)
+            continue
+          generic_link = link
+        elif plat_tag in fname:
+          if binary_link:
+            LOGGER.error(
+              'Found more than one binary matching wheel for %r: %r',
+              prefix, dep)
+            continue
+          binary_link = link
+
+    if not binary_link and not generic_link and not local_link:
+      raise NoWheelException(name, version, dep['build'], source_sha)
+
+    links.append(local_link or binary_link or generic_link)
+
+  return links
+
+
+@contextlib.contextmanager
+def html_index(links):
+  tf = tempfile.mktemp('.html')
+  try:
+    with open(tf, 'w') as f:
+      print >> f, '<html><body>'
+      for link in links:
+        print >> f, '<a href="%s">wat</a>' % link
+      print >> f, '</body></html>'
+    yield tf
+  finally:
+    os.unlink(tf)
+
+
+def install(deps):
+  bin_dir = 'Scripts' if sys.platform.startswith('win') else 'bin'
+  pip = os.path.join(sys.prefix, bin_dir, 'pip')
+
+  links = get_links(deps)
+  with html_index(links) as ipath:
+    requirements = []
+    # TODO(iannucci): Do this as a requirements.txt
+    for name, deps_entry in deps.iteritems():
+      if not deps_entry.get('implicit'):
+        requirements.append('%s==%s' % (name, deps_entry['version']))
+    subprocess.check_call(
+        [pip, 'install', '--no-index', '--download-cache',
+         os.path.join(ROOT, '.wheelcache'), '-f', ipath] + requirements)
+
+
+def activate_env(env, deps, quiet=False):
+  if hasattr(sys, 'real_prefix'):
+    LOGGER.error('Already activated environment!')
+    return
+
+  if not quiet:
+    print 'Activating environment: %r' % env
+  assert isinstance(deps, dict)
+
+  manifest_path = os.path.join(env, 'manifest.pyl')
+  cur_deps = read_deps(manifest_path)
+  if cur_deps != deps:
+    if not quiet:
+      print '  Removing old environment: %r' % cur_deps
+    shutil.rmtree(env, ignore_errors=True)
+    cur_deps = None
+
+  if cur_deps is None:
+    check_pydistutils()
+
+    if not quiet:
+      print '  Building new environment'
+    # Add in bundled virtualenv lib
+    sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'virtualenv'))
+    import virtualenv  # pylint: disable=F0401
+    virtualenv.create_environment(
+        env, search_dirs=virtualenv.file_search_dirs())
+
+  if not quiet:
+    print '  Activating environment'
+  # Ensure hermeticity during activation.
+  os.environ.pop('PYTHONPATH', None)
+  bin_dir = 'Scripts' if sys.platform.startswith('win') else 'bin'
+  activate_this = os.path.join(env, bin_dir, 'activate_this.py')
+  execfile(activate_this, dict(__file__=activate_this))
+
+  if cur_deps is None:
+    if not quiet:
+      print '  Installing deps'
+      print_deps(deps, indent=2, with_implicit=False)
+    install(deps)
+    virtualenv.make_environment_relocatable(env)
+    with open(manifest_path, 'wb') as f:
+      f.write(repr(deps) + '\n')
+
+  # Create bin\python.bat on Windows to unify path where Python is found.
+  if sys.platform.startswith('win'):
+    bin_path = os.path.join(env, 'bin')
+    if not os.path.isdir(bin_path):
+      os.makedirs(bin_path)
+    python_bat_path = os.path.join(bin_path, 'python.bat')
+    if not os.path.isfile(python_bat_path):
+      with open(python_bat_path, 'w') as python_bat_file:
+        python_bat_file.write(PYTHON_BAT_WIN)
+
+  if not quiet:
+    print 'Done creating environment'
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--deps-file', '--deps_file', action='append',
+                      help='Path to deps.pyl file (may be used multiple times)')
+  parser.add_argument('-q', '--quiet', action='store_true', default=False,
+                      help='Supress all output')
+  parser.add_argument('env_path',
+                      help='Path to place environment (default: %(default)s)',
+                      default='ENV')
+  opts = parser.parse_args(args)
+
+  deps = merge_deps(opts.deps_file)
+  activate_env(opts.env_path, deps, opts.quiet)
+
+
+if __name__ == '__main__':
+  logging.basicConfig()
+  LOGGER.setLevel(logging.DEBUG)
+  sys.exit(main(sys.argv[1:]))
diff --git a/bootstrap/deps.pyl b/bootstrap/deps.pyl
new file mode 100644
index 0000000..c6236d4
--- /dev/null
+++ b/bootstrap/deps.pyl
@@ -0,0 +1,15 @@
+#vim: ft=python:
+{
+  'wheel': {
+    'version': '0.24.0',
+    'build': '0',
+    'gs': 'c02262299489646af253067e8136c060a93572e3.tar.gz',
+  },
+
+  'protobuf': {
+    'version': '2.6.0',
+    'build': '0',
+    'repo': 'external/github.com/google/protobuf',
+    'rev': '629a556879cc84e0f52546f0484b65b72ce44fe8',
+  },
+}
diff --git a/bootstrap/util.py b/bootstrap/util.py
new file mode 100644
index 0000000..d64b142
--- /dev/null
+++ b/bootstrap/util.py
@@ -0,0 +1,87 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import os
+import platform
+import shutil
+import sys
+import tempfile
+
+
+ROOT = os.path.dirname(os.path.abspath(__file__))
+WHEELHOUSE = os.path.join(ROOT, 'wheelhouse')
+
+BUCKET = 'chrome-python-wheelhouse'
+STORAGE_URL = 'https://www.googleapis.com/storage/v1/b/{}/o'.format(BUCKET)
+OBJECT_URL = 'https://storage.googleapis.com/{}/{{}}#md5={{}}'.format(BUCKET)
+LOCAL_OBJECT_URL = 'file://{}'
+
+LOCAL_STORAGE_PATH = os.path.join(ROOT, 'wheelhouse_cache')
+
+SOURCE_URL = 'gs://{}/sources/{{}}'.format(BUCKET)
+WHEELS_URL = 'gs://{}/wheels/'.format(BUCKET)
+
+
+class DepsConflictException(Exception):
+  def __init__(self, name):
+    super(DepsConflictException, self).__init__(
+        'Package \'%s\' is defined twice in deps.pyl' % name)
+
+
+def platform_tag():
+  if sys.platform.startswith('linux'):
+    return '_{0}_{1}'.format(*platform.linux_distribution())
+  return ''
+
+
+def print_deps(deps, indent=1, with_implicit=True):
+  for dep, entry in deps.iteritems():
+    if not with_implicit and entry.get('implicit'):
+      continue
+    print '  ' * indent + '%s: %r' % (dep, entry)
+  print
+
+
+@contextlib.contextmanager
+def tempdir(*args, **kwargs):
+  tdir = None
+  try:
+    tdir = tempfile.mkdtemp(*args, **kwargs)
+    yield tdir
+  finally:
+    if tdir:
+      shutil.rmtree(tdir, ignore_errors=True)
+
+
+@contextlib.contextmanager
+def tempname(*args, **kwargs):
+  tmp = None
+  try:
+    tmp = tempfile.mktemp(*args, **kwargs)
+    yield tmp
+  finally:
+    if tmp:
+      try:
+        os.unlink(tmp)
+      except OSError:
+        pass
+
+
+def read_deps(path):
+  if os.path.exists(path):
+    with open(path, 'rb') as f:
+      return ast.literal_eval(f.read())
+
+
+def merge_deps(paths):
+  deps = {}
+  for path in paths:
+    d = read_deps(path)
+    for key in d:
+      if key in deps:
+        raise DepsConflictException(key)
+    deps.update(d)
+  return deps
diff --git a/bootstrap/virtualenv/.gitignore b/bootstrap/virtualenv/.gitignore
new file mode 100644
index 0000000..6a79b83
--- /dev/null
+++ b/bootstrap/virtualenv/.gitignore
@@ -0,0 +1,10 @@
+virtualenv.egg-info
+build
+dist
+docs/_build
+.DS_Store
+*.pyc
+mock-*.egg
+nose-*.egg
+.tox
+tests/test_activate_actual.output
diff --git a/bootstrap/virtualenv/.travis.yml b/bootstrap/virtualenv/.travis.yml
new file mode 100644
index 0000000..b0c6d1a
--- /dev/null
+++ b/bootstrap/virtualenv/.travis.yml
@@ -0,0 +1,28 @@
+language: python
+
+env:
+  - TOXENV=py26
+  - TOXENV=py27
+  - TOXENV=py32
+  - TOXENV=py33
+  - TOXENV=py34
+  - TOXENV=pypy
+  - TOXENV=pypy3
+  - TOXENV=docs
+
+install: pip install tox
+
+script: tox
+
+branches:
+  only:
+    - master
+    - develop
+    - 1.11.X
+
+notifications:
+  irc:
+    channels:
+      - "irc.freenode.org#pypa-dev"
+    use_notice: true
+    skip_join: true
diff --git a/bootstrap/virtualenv/AUTHORS.txt b/bootstrap/virtualenv/AUTHORS.txt
new file mode 100644
index 0000000..2724941
--- /dev/null
+++ b/bootstrap/virtualenv/AUTHORS.txt
@@ -0,0 +1,91 @@
+Author
+------
+
+Ian Bicking
+
+Maintainers
+-----------
+
+Brian Rosner
+Carl Meyer
+Jannis Leidel
+Paul Moore
+Paul Nasrat
+Marcus Smith
+
+Contributors
+------------
+
+Alex Grönholm
+Anatoly Techtonik
+Antonio Cuni
+Antonio Valentino
+Armin Ronacher
+Barry Warsaw
+Benjamin Root
+Bradley Ayers
+Branden Rolston
+Brandon Carl
+Brian Kearns
+Cap Petschulat
+CBWhiz
+Chris Adams
+Chris McDonough
+Christos Kontas
+Christian Hudon
+Christian Stefanescu
+Christopher Nilsson
+Cliff Xuan
+Curt Micol
+Damien Nozay
+Dan Sully
+Daniel Hahler
+Daniel Holth
+David Schoonover
+Denis Costa
+Doug Hellmann
+Doug Napoleone
+Douglas Creager
+Eduard-Cristian Stefan
+Erik M. Bray
+Ethan Jucovy
+Gabriel de Perthuis
+Gunnlaugur Thor Briem
+Graham Dennis
+Greg Haskins
+Jason Penney
+Jason R. Coombs
+Jeff Hammel
+Jeremy Orem
+Jason Penney
+Jason R. Coombs
+John Kleint
+Jonathan Griffin
+Jonathan Hitchcock
+Jorge Vargas
+Josh Bronson
+Kamil Kisiel
+Kyle Gibson
+Konstantin Zemlyak
+Kumar McMillan
+Lars Francke
+Marc Abramowitz
+Mika Laitio
+Mike Hommey
+Miki Tebeka
+Philip Jenvey
+Philippe Ombredanne
+Piotr Dobrogost
+Preston Holmes
+Ralf Schmitt
+Raul Leal
+Ronny Pfannschmidt
+Satrajit Ghosh
+Sergio de Carvalho
+Stefano Rivera
+Tarek Ziadé
+Thomas Aglassinger
+Vinay Sajip
+Vitaly Babiy
+Vladimir Rutsky
+Wang Xuerui
\ No newline at end of file
diff --git a/bootstrap/virtualenv/CONTRIBUTING.rst b/bootstrap/virtualenv/CONTRIBUTING.rst
new file mode 100644
index 0000000..924e7e2
--- /dev/null
+++ b/bootstrap/virtualenv/CONTRIBUTING.rst
@@ -0,0 +1,21 @@
+virtualenv
+==========
+
+See docs/index.rst for user documentation.
+
+Contributor notes
+-----------------
+
+* virtualenv is designed to work on python 2 and 3 with a single code base.
+  Use Python 3 print-function syntax, and always ``use sys.exc_info()[1]``
+  inside the ``except`` block to get at exception objects.
+
+* virtualenv uses git-flow_ to `coordinate development`_. The latest stable
+  version should exist on the *master* branch, and new work should be
+  integrated to *develop*.
+
+* All changes to files inside virtualenv_embedded should be integrated to
+  ``virtualenv.py`` with ``bin/rebuild-script.py``.
+
+.. _git-flow: https://github.com/nvie/gitflow
+.. _coordinate development: http://nvie.com/posts/a-successful-git-branching-model/
diff --git a/bootstrap/virtualenv/LICENSE.txt b/bootstrap/virtualenv/LICENSE.txt
new file mode 100644
index 0000000..7e00d5d
--- /dev/null
+++ b/bootstrap/virtualenv/LICENSE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2007 Ian Bicking and Contributors
+Copyright (c) 2009 Ian Bicking, The Open Planning Project
+Copyright (c) 2011-2014 The virtualenv developers
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/bootstrap/virtualenv/MANIFEST.in b/bootstrap/virtualenv/MANIFEST.in
new file mode 100644
index 0000000..62621c8
--- /dev/null
+++ b/bootstrap/virtualenv/MANIFEST.in
@@ -0,0 +1,11 @@
+recursive-include bin *
+recursive-include docs *
+recursive-include scripts *
+recursive-include virtualenv_support *.whl
+recursive-include virtualenv_embedded *
+recursive-exclude docs/_templates *
+recursive-exclude docs/_build *
+include virtualenv_support/__init__.py
+include *.py
+include AUTHORS.txt
+include LICENSE.txt
diff --git a/bootstrap/virtualenv/README.rst b/bootstrap/virtualenv/README.rst
new file mode 100644
index 0000000..5a7a545
--- /dev/null
+++ b/bootstrap/virtualenv/README.rst
@@ -0,0 +1,10 @@
+virtualenv
+==========
+
+.. image:: https://pypip.in/v/virtualenv/badge.png
+        :target: https://pypi.python.org/pypi/virtualenv
+
+.. image:: https://secure.travis-ci.org/pypa/virtualenv.png?branch=develop
+   :target: http://travis-ci.org/pypa/virtualenv
+
+For documentation, see https://virtualenv.pypa.io/
diff --git a/bootstrap/virtualenv/bin/rebuild-script.py b/bootstrap/virtualenv/bin/rebuild-script.py
new file mode 100755
index 0000000..44fb129
--- /dev/null
+++ b/bootstrap/virtualenv/bin/rebuild-script.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+"""
+Helper script to rebuild virtualenv.py from virtualenv_support
+"""
+
+import re
+import os
+import sys
+
+here = os.path.dirname(__file__)
+script = os.path.join(here, '..', 'virtualenv.py')
+
+file_regex = re.compile(
+    r'##file (.*?)\n([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*convert\("""(.*?)"""\)',
+    re.S)
+file_template = '##file %(filename)s\n%(varname)s = convert("""\n%(data)s""")'
+
+def rebuild():
+    f = open(script, 'rb')
+    content = f.read()
+    f.close()
+    parts = []
+    last_pos = 0
+    match = None
+    for match in file_regex.finditer(content):
+        parts.append(content[last_pos:match.start()])
+        last_pos = match.end()
+        filename = match.group(1)
+        varname = match.group(2)
+        data = match.group(3)
+        print('Found reference to file %s' % filename)
+        pathname = os.path.join(here, '..', 'virtualenv_embedded', filename)
+        f = open(pathname, 'rb')
+        c = f.read()
+        f.close()
+        new_data = c.encode('zlib').encode('base64')
+        if new_data == data:
+            print('  Reference up to date (%s bytes)' % len(c))
+            parts.append(match.group(0))
+            continue
+        print('  Content changed (%s bytes -> %s bytes)' % (
+            zipped_len(data), len(c)))
+        new_match = file_template % dict(
+            filename=filename,
+            varname=varname,
+            data=new_data)
+        parts.append(new_match)
+    parts.append(content[last_pos:])
+    new_content = ''.join(parts)
+    if new_content != content:
+        sys.stdout.write('Content updated; overwriting... ')
+        f = open(script, 'wb')
+        f.write(new_content)
+        f.close()
+        print('done.')
+    else:
+        print('No changes in content')
+    if match is None:
+        print('No variables were matched/found')
+
+def zipped_len(data):
+    if not data:
+        return 'no data'
+    try:
+        return len(data.decode('base64').decode('zlib'))
+    except:
+        return 'unknown'
+
+if __name__ == '__main__':
+    rebuild()
+    
diff --git a/bootstrap/virtualenv/docs/Makefile b/bootstrap/virtualenv/docs/Makefile
new file mode 100644
index 0000000..e4de9f8
--- /dev/null
+++ b/bootstrap/virtualenv/docs/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-compressor.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-compressor.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/django-compressor"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-compressor"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	make -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/bootstrap/virtualenv/docs/changes.rst b/bootstrap/virtualenv/docs/changes.rst
new file mode 100644
index 0000000..9d74fda
--- /dev/null
+++ b/bootstrap/virtualenv/docs/changes.rst
@@ -0,0 +1,747 @@
+Release History
+===============
+
+12.0 (2014-12-22)
+~~~~~~~~~~~~~~~~~
+
+* **PROCESS** Version numbers are now simply ``X.Y`` where the leading ``1``
+  has been dropped.
+* Split up documentation into structured pages
+* Now using pytest framework
+* Correct sys.path ordering for debian, issue #461
+* Correctly throws error on older Pythons, issue #619
+* Allow for empty $PATH, pull #601
+* Don't set prompt if $env:VIRTUAL_ENV_DISABLE_PROMPT is set for Powershell
+* Updated setuptools to 7.0
+
+1.11.6 (2014-05-16)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 3.6
+* Updated pip to 1.5.6
+
+1.11.5 (2014-05-03)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 3.4.4
+* Updated documentation to use https://virtualenv.pypa.io/
+* Updated pip to 1.5.5
+
+1.11.4 (2014-02-21)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated pip to 1.5.4
+
+
+1.11.3 (2014-02-20)
+~~~~~~~~~~~~~~~~~~~
+
+* Updated setuptools to 2.2
+* Updated pip to 1.5.3
+
+
+1.11.2 (2014-01-26)
+~~~~~~~~~~~~~~~~~~~
+
+* Fixed easy_install installed virtualenvs by updated pip to 1.5.2
+
+1.11.1 (2014-01-20)
+~~~~~~~~~~~~~~~~~~~
+
+* Fixed an issue where pip and setuptools were not getting installed when using
+  the ``--system-site-packages`` flag.
+* Updated setuptools to fix an issue when installed with easy_install
+* Fixed an issue with Python 3.4 and sys.stdout encoding being set to ascii
+* Upgraded pip to v1.5.1
+* Upgraded setuptools to v2.1
+
+1.11 (2014-01-02)
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Switched to using wheels for the bundled copies of
+  setuptools and pip. Using sdists is no longer supported - users supplying
+  their own versions of pip/setuptools will need to provide wheels.
+* **BACKWARDS INCOMPATIBLE** Modified the handling of ``--extra-search-dirs``.
+  This option now works like pip's ``--find-links`` option, in that it adds
+  extra directories to search for compatible wheels for pip and setuptools.
+  The actual wheel selected is chosen based on version and compatibility, using
+  the same algorithm as ``pip install setuptools``.
+* Fixed #495, --always-copy was failing (#PR 511)
+* Upgraded pip to v1.5
+* Upgraded setuptools to v1.4
+
+1.10.1 (2013-08-07)
+~~~~~~~~~~~~~~~~~~~
+
+* **New Signing Key** Release 1.10.1 is using a different key than normal with
+  fingerprint: 7C6B 7C5D 5E2B 6356 A926 F04F 6E3C BCE9 3372 DCFA
+* Upgraded pip to v1.4.1
+* Upgraded setuptools to v0.9.8
+
+
+1.10 (2013-07-23)
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Dropped support for Python 2.5. The minimum
+  supported Python version is now Python 2.6.
+
+* **BACKWARDS INCOMPATIBLE** Using ``virtualenv.py`` as an isolated script
+  (i.e. without an associated ``virtualenv_support`` directory) is no longer
+  supported for security reasons and will fail with an error.
+
+  Along with this, ``--never-download`` is now always pinned to ``True``, and
+  is only being maintained in the short term for backward compatibility
+  (Pull #412).
+
+* **IMPORTANT** Switched to the new setuptools (v0.9.7) which has been merged
+  with Distribute_ again and works for Python 2 and 3 with one codebase.
+  The ``--distribute`` and ``--setuptools`` options are now no-op.
+
+* Updated to pip 1.4.
+
+* Added support for PyPy3k
+
+* Added the option to use a version number with the ``-p`` option to get the
+  system copy of that Python version (Windows only)
+
+* Removed embedded ``ez_setup.py``, ``distribute_setup.py`` and
+  ``distribute_from_egg.py`` files as part of switching to merged setuptools.
+
+* Fixed ``--relocatable`` to work better on Windows.
+
+* Fixed issue with readline on Windows.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+
+1.9.1 (2013-03-08)
+~~~~~~~~~~~~~~~~~~
+
+* Updated to pip 1.3.1 that fixed a major backward incompatible change of
+  parsing URLs to externally hosted packages that got accidentily included
+  in pip 1.3.
+
+1.9 (2013-03-07)
+~~~~~~~~~~~~~~~~
+
+* Unset VIRTUAL_ENV environment variable in deactivate.bat (Pull #364)
+* Upgraded distribute to 0.6.34.
+* Added ``--no-setuptools`` and ``--no-pip`` options (Pull #336).
+* Fixed Issue #373. virtualenv-1.8.4 was failing in cygwin (Pull #382).
+* Fixed Issue #378. virtualenv is now "multiarch" aware on debian/ubuntu (Pull #379).
+* Fixed issue with readline module path on pypy and OSX (Pull #374).
+* Made 64bit detection compatible with Python 2.5 (Pull #393).
+
+
+1.8.4 (2012-11-25)
+~~~~~~~~~~~~~~~~~~
+
+* Updated distribute to 0.6.31. This fixes #359 (numpy install regression) on
+  UTF-8 platforms, and provides a workaround on other platforms:
+  ``PYTHONIOENCODING=utf8 pip install numpy``.
+
+* When installing virtualenv via curl, don't forget to filter out arguments
+  the distribute setup script won't understand. Fixes #358.
+
+* Added some more integration tests.
+
+* Removed the unsupported embedded setuptools egg for Python 2.4 to reduce
+  file size.
+
+1.8.3 (2012-11-21)
+~~~~~~~~~~~~~~~~~~
+
+* Fixed readline on OS X. Thanks minrk
+
+* Updated distribute to 0.6.30 (improves our error reporting, plus new
+  distribute features and fixes). Thanks Gabriel (g2p)
+
+* Added compatibility with multiarch Python (Python 3.3 for example). Added an
+  integration test. Thanks Gabriel (g2p)
+
+* Added ability to install distribute from a user-provided egg, rather than the
+  bundled sdist, for better speed. Thanks Paul Moore.
+
+* Make the creation of lib64 symlink smarter about already-existing symlink,
+  and more explicit about full paths. Fixes #334 and #330. Thanks Jeremy Orem.
+
+* Give lib64 site-dir preference over lib on 64-bit systems, to avoid wrong
+  32-bit compiles in the venv. Fixes #328. Thanks Damien Nozay.
+
+* Fix a bug with prompt-handling in ``activate.csh`` in non-interactive csh
+  shells. Fixes #332. Thanks Benjamin Root for report and patch.
+
+* Make it possible to create a virtualenv from within a Python
+  3.3. pyvenv. Thanks Chris McDonough for the report.
+
+* Add optional --setuptools option to be able to switch to it in case
+  distribute is the default (like in Debian).
+
+1.8.2 (2012-09-06)
+~~~~~~~~~~~~~~~~~~
+
+* Updated the included pip version to 1.2.1 to fix regressions introduced
+  there in 1.2.
+
+
+1.8.1 (2012-09-03)
+~~~~~~~~~~~~~~~~~~
+
+* Fixed distribute version used with `--never-download`. Thanks michr for
+  report and patch.
+
+* Fix creating Python 3.3 based virtualenvs by unsetting the
+  ``__PYVENV_LAUNCHER__`` environment variable in subprocesses.
+
+
+1.8 (2012-09-01)
+~~~~~~~~~~~~~~~~
+
+* **Dropped support for Python 2.4** The minimum supported Python version is
+  now Python 2.5.
+
+* Fix `--relocatable` on systems that use lib64. Fixes #78. Thanks Branden
+  Rolston.
+
+* Symlink some additional modules under Python 3. Fixes #194. Thanks Vinay
+  Sajip, Ian Clelland, and Stefan Holek for the report.
+
+* Fix ``--relocatable`` when a script uses ``__future__`` imports. Thanks
+  Branden Rolston.
+
+* Fix a bug in the config option parser that prevented setting negative
+  options with environment variables. Thanks Ralf Schmitt.
+
+* Allow setting ``--no-site-packages`` from the config file.
+
+* Use ``/usr/bin/multiarch-platform`` if available to figure out the include
+  directory. Thanks for the patch, Mika Laitio.
+
+* Fix ``install_name_tool`` replacement to work on Python 3.X.
+
+* Handle paths of users' site-packages on Mac OS X correctly when changing
+  the prefix.
+
+* Updated the embedded version of distribute to 0.6.28 and pip to 1.2.
+
+
+1.7.2 (2012-06-22)
+~~~~~~~~~~~~~~~~~~
+
+* Updated to distribute 0.6.27.
+
+* Fix activate.fish on OS X. Fixes #8. Thanks David Schoonover.
+
+* Create a virtualenv-x.x script with the Python version when installing, so
+  virtualenv for multiple Python versions can be installed to the same
+  script location. Thanks Miki Tebeka.
+
+* Restored ability to create a virtualenv with a path longer than 78
+  characters, without breaking creation of virtualenvs with non-ASCII paths.
+  Thanks, Bradley Ayers.
+
+* Added ability to create virtualenvs without having installed Apple's
+  developers tools (using an own implementation of ``install_name_tool``).
+  Thanks Mike Hommey.
+
+* Fixed PyPy and Jython support on Windows. Thanks Konstantin Zemlyak.
+
+* Added pydoc script to ease use. Thanks Marc Abramowitz. Fixes #149.
+
+* Fixed creating a bootstrap script on Python 3. Thanks Raul Leal. Fixes #280.
+
+* Fixed inconsistency when having set the ``PYTHONDONTWRITEBYTECODE`` env var
+  with the --distribute option or the ``VIRTUALENV_USE_DISTRIBUTE`` env var.
+  ``VIRTUALENV_USE_DISTRIBUTE`` is now considered again as a legacy alias.
+
+
+1.7.1.2 (2012-02-17)
+~~~~~~~~~~~~~~~~~~~~
+
+* Fixed minor issue in `--relocatable`. Thanks, Cap Petschulat.
+
+
+1.7.1.1 (2012-02-16)
+~~~~~~~~~~~~~~~~~~~~
+
+* Bumped the version string in ``virtualenv.py`` up, too.
+
+* Fixed rST rendering bug of long description.
+
+
+1.7.1 (2012-02-16)
+~~~~~~~~~~~~~~~~~~
+
+* Update embedded pip to version 1.1.
+
+* Fix `--relocatable` under Python 3. Thanks Doug Hellmann.
+
+* Added environ PATH modification to activate_this.py. Thanks Doug
+  Napoleone. Fixes #14.
+
+* Support creating virtualenvs directly from a Python build directory on
+  Windows. Thanks CBWhiz. Fixes #139.
+
+* Use non-recursive symlinks to fix things up for posix_local install
+  scheme. Thanks michr.
+
+* Made activate script available for use with msys and cygwin on Windows.
+  Thanks Greg Haskins, Cliff Xuan, Jonathan Griffin and Doug Napoleone.
+  Fixes #176.
+
+* Fixed creation of virtualenvs on Windows when Python is not installed for
+  all users. Thanks Anatoly Techtonik for report and patch and Doug
+  Napoleone for testing and confirmation. Fixes #87.
+
+* Fixed creation of virtualenvs using -p in installs where some modules
+  that ought to be in the standard library (e.g. `readline`) are actually
+  installed in `site-packages` next to `virtualenv.py`. Thanks Greg Haskins
+  for report and fix. Fixes #167.
+
+* Added activation script for Powershell (signed by Jannis Leidel). Many
+  thanks to Jason R. Coombs.
+
+
+1.7 (2011-11-30)
+~~~~~~~~~~~~~~~~
+
+* Gave user-provided ``--extra-search-dir`` priority over default dirs for
+  finding setuptools/distribute (it already had priority for finding pip).
+  Thanks Ethan Jucovy.
+
+* Updated embedded Distribute release to 0.6.24. Thanks Alex Gronholm.
+
+* Made ``--no-site-packages`` behavior the default behavior.  The
+  ``--no-site-packages`` flag is still permitted, but displays a warning when
+  used. Thanks Chris McDonough.
+
+* New flag: ``--system-site-packages``; this flag should be passed to get the
+  previous default global-site-package-including behavior back.
+
+* Added ability to set command options as environment variables and options
+  in a ``virtualenv.ini`` file.
+
+* Fixed various encoding related issues with paths. Thanks Gunnlaugur Thor Briem.
+
+* Made ``virtualenv.py`` script executable.
+
+
+1.6.4 (2011-07-21)
+~~~~~~~~~~~~~~~~~~
+
+* Restored ability to run on Python 2.4, too.
+
+
+1.6.3 (2011-07-16)
+~~~~~~~~~~~~~~~~~~
+
+* Restored ability to run on Python < 2.7.
+
+
+1.6.2 (2011-07-16)
+~~~~~~~~~~~~~~~~~~
+
+* Updated embedded distribute release to 0.6.19.
+
+* Updated embedded pip release to 1.0.2.
+
+* Fixed #141 - Be smarter about finding pkg_resources when using the
+  non-default Python interpreter (by using the ``-p`` option).
+
+* Fixed #112 - Fixed path in docs.
+
+* Fixed #109 - Corrected doctests of a Logger method.
+
+* Fixed #118 - Fixed creating virtualenvs on platforms that use the
+  "posix_local" install scheme, such as Ubuntu with Python 2.7.
+
+* Add missing library to Python 3 virtualenvs (``_dummy_thread``).
+
+
+1.6.1 (2011-04-30)
+~~~~~~~~~~~~~~~~~~
+
+* Start to use git-flow.
+
+* Added support for PyPy 1.5
+
+* Fixed #121 -- added sanity-checking of the -p argument. Thanks Paul Nasrat.
+
+* Added progress meter for pip installation as well as setuptools. Thanks Ethan
+  Jucovy.
+
+* Added --never-download and --search-dir options. Thanks Ethan Jucovy.
+
+
+1.6
+~~~
+
+* Added Python 3 support! Huge thanks to Vinay Sajip and Vitaly Babiy.
+
+* Fixed creation of virtualenvs on Mac OS X when standard library modules
+  (readline) are installed outside the standard library.
+
+* Updated bundled pip to 1.0.
+
+
+1.5.2
+~~~~~
+
+* Moved main repository to Github: https://github.com/pypa/virtualenv
+
+* Transferred primary maintenance from Ian to Jannis Leidel, Carl Meyer and Brian Rosner
+
+* Fixed a few more pypy related bugs.
+
+* Updated bundled pip to 0.8.2.
+
+* Handed project over to new team of maintainers.
+
+* Moved virtualenv to Github at https://github.com/pypa/virtualenv
+
+
+1.5.1
+~~~~~
+
+* Added ``_weakrefset`` requirement for Python 2.7.1.
+
+* Fixed Windows regression in 1.5
+
+
+1.5
+~~~
+
+* Include pip 0.8.1.
+
+* Add support for PyPy.
+
+* Uses a proper temporary dir when installing environment requirements.
+
+* Add ``--prompt`` option to be able to override the default prompt prefix.
+
+* Fix an issue with ``--relocatable`` on Windows.
+
+* Fix issue with installing the wrong version of distribute.
+
+* Add fish and csh activate scripts.
+
+
+1.4.9
+~~~~~
+
+* Include pip 0.7.2
+
+
+1.4.8
+~~~~~
+
+* Fix for Mac OS X Framework builds that use
+  ``--universal-archs=intel``
+
+* Fix ``activate_this.py`` on Windows.
+
+* Allow ``$PYTHONHOME`` to be set, so long as you use ``source
+  bin/activate`` it will get unset; if you leave it set and do not
+  activate the environment it will still break the environment.
+
+* Include pip 0.7.1
+
+
+1.4.7
+~~~~~
+
+* Include pip 0.7
+
+
+1.4.6
+~~~~~
+
+* Allow ``activate.sh`` to skip updating the prompt (by setting
+  ``$VIRTUAL_ENV_DISABLE_PROMPT``).
+
+
+1.4.5
+~~~~~
+
+* Include pip 0.6.3
+
+* Fix ``activate.bat`` and ``deactivate.bat`` under Windows when
+  ``PATH`` contained a parenthesis
+
+
+1.4.4
+~~~~~
+
+* Include pip 0.6.2 and Distribute 0.6.10
+
+* Create the ``virtualenv`` script even when Setuptools isn't
+  installed
+
+* Fix problem with ``virtualenv --relocate`` when ``bin/`` has
+  subdirectories (e.g., ``bin/.svn/``); from Alan Franzoni.
+
+* If you set ``$VIRTUALENV_DISTRIBUTE`` then virtualenv will use
+  Distribute by default (so you don't have to remember to use
+  ``--distribute``).
+
+
+1.4.3
+~~~~~
+
+* Include pip 0.6.1
+
+
+1.4.2
+~~~~~
+
+* Fix pip installation on Windows
+
+* Fix use of stand-alone ``virtualenv.py`` (and boot scripts)
+
+* Exclude ~/.local (user site-packages) from environments when using
+  ``--no-site-packages``
+
+
+1.4.1
+~~~~~
+
+* Include pip 0.6
+
+
+1.4
+~~~
+
+* Updated setuptools to 0.6c11
+
+* Added the --distribute option
+
+* Fixed packaging problem of support-files
+
+
+1.3.4
+~~~~~
+
+* Virtualenv now copies the actual embedded Python binary on
+  Mac OS X to fix a hang on Snow Leopard (10.6).
+
+* Fail more gracefully on Windows when ``win32api`` is not installed.
+
+* Fix site-packages taking precedent over Jython's ``__classpath__``
+  and also specially handle the new ``__pyclasspath__`` entry in
+  ``sys.path``.
+
+* Now copies Jython's ``registry`` file to the virtualenv if it exists.
+
+* Better find libraries when compiling extensions on Windows.
+
+* Create ``Scripts\pythonw.exe`` on Windows.
+
+* Added support for the Debian/Ubuntu
+  ``/usr/lib/pythonX.Y/dist-packages`` directory.
+
+* Set ``distutils.sysconfig.get_config_vars()['LIBDIR']`` (based on
+  ``sys.real_prefix``) which is reported to help building on Windows.
+
+* Make ``deactivate`` work on ksh
+
+* Fixes for ``--python``: make it work with ``--relocatable`` and the
+  symlink created to the exact Python version.
+
+
+1.3.3
+~~~~~
+
+* Use Windows newlines in ``activate.bat``, which has been reported to help
+  when using non-ASCII directory names.
+
+* Fixed compatibility with Jython 2.5b1.
+
+* Added a function ``virtualenv.install_python`` for more fine-grained
+  access to what ``virtualenv.create_environment`` does.
+
+* Fix `a problem <https://bugs.launchpad.net/virtualenv/+bug/241581>`_
+  with Windows and paths that contain spaces.
+
+* If ``/path/to/env/.pydistutils.cfg`` exists (or
+  ``/path/to/env/pydistutils.cfg`` on Windows systems) then ignore
+  ``~/.pydistutils.cfg`` and use that other file instead.
+
+* Fix ` a problem
+  <https://bugs.launchpad.net/virtualenv/+bug/340050>`_ picking up
+  some ``.so`` libraries in ``/usr/local``.
+
+
+1.3.2
+~~~~~
+
+* Remove the ``[install] prefix = ...`` setting from the virtualenv
+  ``distutils.cfg`` -- this has been causing problems for a lot of
+  people, in rather obscure ways.
+
+* If you use a boot script it will attempt to import ``virtualenv``
+  and find a pre-downloaded Setuptools egg using that.
+
+* Added platform-specific paths, like ``/usr/lib/pythonX.Y/plat-linux2``
+
+
+1.3.1
+~~~~~
+
+* Real Python 2.6 compatibility.  Backported the Python 2.6 updates to
+  ``site.py``, including `user directories
+  <http://docs.python.org/dev/whatsnew/2.6.html#pep-370-per-user-site-packages-directory>`_
+  (this means older versions of Python will support user directories,
+  whether intended or not).
+
+* Always set ``[install] prefix`` in ``distutils.cfg`` -- previously
+  on some platforms where a system-wide ``distutils.cfg`` was present
+  with a ``prefix`` setting, packages would be installed globally
+  (usually in ``/usr/local/lib/pythonX.Y/site-packages``).
+
+* Sometimes Cygwin seems to leave ``.exe`` off ``sys.executable``; a
+  workaround is added.
+
+* Fix ``--python`` option.
+
+* Fixed handling of Jython environments that use a
+  jython-complete.jar.
+
+
+1.3
+~~~
+
+* Update to Setuptools 0.6c9
+* Added an option ``virtualenv --relocatable EXISTING_ENV``, which
+  will make an existing environment "relocatable" -- the paths will
+  not be absolute in scripts, ``.egg-info`` and ``.pth`` files.  This
+  may assist in building environments that can be moved and copied.
+  You have to run this *after* any new packages installed.
+* Added ``bin/activate_this.py``, a file you can use like
+  ``execfile("path_to/activate_this.py",
+  dict(__file__="path_to/activate_this.py"))`` -- this will activate
+  the environment in place, similar to what `the mod_wsgi example
+  does <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+* For Mac framework builds of Python, the site-packages directory
+  ``/Library/Python/X.Y/site-packages`` is added to ``sys.path``, from
+  Andrea Rech.
+* Some platform-specific modules in Macs are added to the path now
+  (``plat-darwin/``, ``plat-mac/``, ``plat-mac/lib-scriptpackages``),
+  from Andrea Rech.
+* Fixed a small Bashism in the ``bin/activate`` shell script.
+* Added ``__future__`` to the list of required modules, for Python
+  2.3.  You'll still need to backport your own ``subprocess`` module.
+* Fixed the ``__classpath__`` entry in Jython's ``sys.path`` taking
+  precedent over virtualenv's libs.
+
+
+1.2
+~~~
+
+* Added a ``--python`` option to select the Python interpreter.
+* Add ``warnings`` to the modules copied over, for Python 2.6 support.
+* Add ``sets`` to the module copied over for Python 2.3 (though Python
+  2.3 still probably doesn't work).
+
+
+1.1.1
+~~~~~
+
+* Added support for Jython 2.5.
+
+
+1.1
+~~~
+
+* Added support for Python 2.6.
+* Fix a problem with missing ``DLLs/zlib.pyd`` on Windows.  Create
+* ``bin/python`` (or ``bin/python.exe``) even when you run virtualenv
+  with an interpreter named, e.g., ``python2.4``
+* Fix MacPorts Python
+* Added --unzip-setuptools option
+* Update to Setuptools 0.6c8
+* If the current directory is not writable, run ez_setup.py in ``/tmp``
+* Copy or symlink over the ``include`` directory so that packages will
+  more consistently compile.
+
+
+1.0
+~~~
+
+* Fix build on systems that use ``/usr/lib64``, distinct from
+  ``/usr/lib`` (specifically CentOS x64).
+* Fixed bug in ``--clear``.
+* Fixed typos in ``deactivate.bat``.
+* Preserve ``$PYTHONPATH`` when calling subprocesses.
+
+
+0.9.2
+~~~~~
+
+* Fix include dir copying on Windows (makes compiling possible).
+* Include the main ``lib-tk`` in the path.
+* Patch ``distutils.sysconfig``: ``get_python_inc`` and
+  ``get_python_lib`` to point to the global locations.
+* Install ``distutils.cfg`` before Setuptools, so that system
+  customizations of ``distutils.cfg`` won't effect the installation.
+* Add ``bin/pythonX.Y`` to the virtualenv (in addition to
+  ``bin/python``).
+* Fixed an issue with Mac Framework Python builds, and absolute paths
+  (from Ronald Oussoren).
+
+
+0.9.1
+~~~~~
+
+* Improve ability to create a virtualenv from inside a virtualenv.
+* Fix a little bug in ``bin/activate``.
+* Actually get ``distutils.cfg`` to work reliably.
+
+
+0.9
+~~~
+
+* Added ``lib-dynload`` and ``config`` to things that need to be
+  copied over in an environment.
+* Copy over or symlink the ``include`` directory, so that you can
+  build packages that need the C headers.
+* Include a ``distutils`` package, so you can locally update
+  ``distutils.cfg`` (in ``lib/pythonX.Y/distutils/distutils.cfg``).
+* Better avoid downloading Setuptools, and hitting PyPI on environment
+  creation.
+* Fix a problem creating a ``lib64/`` directory.
+* Should work on MacOSX Framework builds (the default Python
+  installations on Mac).  Thanks to Ronald Oussoren.
+
+
+0.8.4
+~~~~~
+
+* Windows installs would sometimes give errors about ``sys.prefix`` that
+  were inaccurate.
+* Slightly prettier output.
+
+
+0.8.3
+~~~~~
+
+* Added support for Windows.
+
+
+0.8.2
+~~~~~
+
+* Give a better warning if you are on an unsupported platform (Mac
+  Framework Pythons, and Windows).
+* Give error about running while inside a workingenv.
+* Give better error message about Python 2.3.
+
+
+0.8.1
+~~~~~
+
+Fixed packaging of the library.
+
+
+0.8
+~~~
+
+Initial release.  Everything is changed and new!
diff --git a/bootstrap/virtualenv/docs/conf.py b/bootstrap/virtualenv/docs/conf.py
new file mode 100644
index 0000000..1d89554
--- /dev/null
+++ b/bootstrap/virtualenv/docs/conf.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import os
+import sys
+
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+# If your extensions are in another directory, add it here.
+sys.path.insert(0, os.path.abspath(os.pardir))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc']
+
+# Add any paths that contain templates here, relative to this directory.
+## FIXME: disabled for now because I haven't figured out how to use this:
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'virtualenv'
+copyright = '2007-2014, Ian Bicking, The Open Planning Project, PyPA'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+try:
+    from virtualenv import __version__
+    # The short X.Y version.
+    version = '.'.join(__version__.split('.')[:2])
+    # The full version, including alpha/beta/rc tags.
+    release = __version__
+except ImportError:
+    version = release = 'dev'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'default.css'
+
+html_theme = 'default'
+if not on_rtd:
+    try:
+        import sphinx_rtd_theme
+        html_theme = 'sphinx_rtd_theme'
+        html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+    except ImportError:
+        pass
+
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pastedoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/bootstrap/virtualenv/docs/development.rst b/bootstrap/virtualenv/docs/development.rst
new file mode 100644
index 0000000..aba2785
--- /dev/null
+++ b/bootstrap/virtualenv/docs/development.rst
@@ -0,0 +1,61 @@
+Development
+===========
+
+Contributing
+------------
+
+Refer to the `pip development`_ documentation - it applies equally to
+virtualenv, except that virtualenv issues should filed on the `virtualenv
+repo`_ at GitHub.
+
+Virtualenv's release schedule is tied to pip's -- each time there's a new pip
+release, there will be a new virtualenv release that bundles the new version of
+pip.
+
+Files in the `virtualenv_embedded/` subdirectory are embedded into
+`virtualenv.py` itself as base64-encoded strings (in order to support
+single-file use of `virtualenv.py` without installing it). If your patch
+changes any file in `virtualenv_embedded/`, run `bin/rebuild-script.py` to
+update the embedded version of that file in `virtualenv.py`; commit that and
+submit it as part of your patch / pull request.
+
+.. _pip development: http://www.pip-installer.org/en/latest/development.html
+.. _virtualenv repo: https://github.com/pypa/virtualenv/
+
+Running the tests
+-----------------
+
+Virtualenv's test suite is small and not yet at all comprehensive, but we aim
+to grow it.
+
+The easy way to run tests (handles test dependencies automatically)::
+
+    $ python setup.py test
+
+If you want to run only a selection of the tests, you'll need to run them
+directly with pytest instead. Create a virtualenv, and install required
+packages::
+
+    $ pip install pytest mock
+
+Run pytest::
+
+    $ pytest
+
+Or select just a single test file to run::
+
+    $ pytest tests/test_virtualenv
+
+Status and License
+------------------
+
+``virtualenv`` is a successor to `workingenv
+<http://cheeseshop.python.org/pypi/workingenv.py>`_, and an extension
+of `virtual-python
+<http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_.
+
+It was written by Ian Bicking, sponsored by the `Open Planning
+Project <http://openplans.org>`_ and is now maintained by a
+`group of developers <https://github.com/pypa/virtualenv/raw/master/AUTHORS.txt>`_.
+It is licensed under an
+`MIT-style permissive license <https://github.com/pypa/virtualenv/raw/master/LICENSE.txt>`_.
diff --git a/bootstrap/virtualenv/docs/index.rst b/bootstrap/virtualenv/docs/index.rst
new file mode 100644
index 0000000..04f7191
--- /dev/null
+++ b/bootstrap/virtualenv/docs/index.rst
@@ -0,0 +1,137 @@
+Virtualenv
+==========
+
+`Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+`Issues <https://github.com/pypa/virtualenv/issues>`_ |
+`Github <https://github.com/pypa/virtualenv>`_ |
+`PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+User IRC: #pypa
+Dev IRC: #pypa-dev
+
+Introduction
+------------
+
+``virtualenv`` is a tool to create isolated Python environments.
+
+The basic problem being addressed is one of dependencies and versions,
+and indirectly permissions. Imagine you have an application that
+needs version 1 of LibFoo, but another application requires version
+2. How can you use both these applications?  If you install
+everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+platform's standard location is), it's easy to end up in a situation
+where you unintentionally upgrade an application that shouldn't be
+upgraded.
+
+Or more generally, what if you want to install an application *and
+leave it be*?  If an application works, any change in its libraries or
+the versions of those libraries can break the application.
+
+Also, what if you can't install packages into the global
+``site-packages`` directory?  For instance, on a shared host.
+
+In all these cases, ``virtualenv`` can help you. It creates an
+environment that has its own installation directories, that doesn't
+share libraries with other virtualenv environments (and optionally
+doesn't access the globally installed libraries either).
+
+.. comment: split here
+
+.. toctree::
+   :maxdepth: 2
+
+   installation
+   userguide
+   reference
+   development
+   changes
+
+.. warning::
+
+   Python bugfix releases 2.6.8, 2.7.3, 3.1.5 and 3.2.3 include a change that
+   will cause "import random" to fail with "cannot import name urandom" on any
+   virtualenv created on a Unix host with an earlier release of Python
+   2.6/2.7/3.1/3.2, if the underlying system Python is upgraded. This is due to
+   the fact that a virtualenv uses the system Python's standard library but
+   contains its own copy of the Python interpreter, so an upgrade to the system
+   Python results in a mismatch between the version of the Python interpreter
+   and the version of the standard library. It can be fixed by removing
+   ``$ENV/bin/python`` and re-running virtualenv on the same target directory
+   with the upgraded Python.
+
+Other Documentation and Links
+-----------------------------
+
+* `Blog announcement of virtualenv`__.
+
+  .. __: http://blog.ianbicking.org/2007/10/10/workingenv-is-dead-long-live-virtualenv/
+
+* James Gardner has written a tutorial on using `virtualenv with
+  Pylons
+  <http://wiki.pylonshq.com/display/pylonscookbook/Using+a+Virtualenv+Sandbox>`_.
+
+* Chris Perkins created a `showmedo video including virtualenv
+  <http://showmedo.com/videos/video?name=2910000&fromSeriesID=291>`_.
+
+* Doug Hellmann's `virtualenvwrapper`_ is a useful set of scripts to make
+  your workflow with many virtualenvs even easier. `His initial blog post on it`__.
+  He also wrote `an example of using virtualenv to try IPython`__.
+
+  .. _virtualenvwrapper: https://pypi.python.org/pypi/virtualenvwrapper/
+  .. __: http://www.doughellmann.com/articles/CompletelyDifferent-2008-05-virtualenvwrapper/index.html
+  .. __: http://www.doughellmann.com/articles/CompletelyDifferent-2008-02-ipython-and-virtualenv/index.html
+
+* `Pew`_ is another wrapper for virtualenv that makes use of a different
+  activation technique.
+
+  .. _Pew: https://pypi.python.org/pypi/pew/
+
+* `Using virtualenv with mod_wsgi
+  <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+
+* `virtualenv commands
+  <https://github.com/thisismedium/virtualenv-commands>`_ for some more
+  workflow-related tools around virtualenv.
+
+* PyCon US 2011 talk: `Reverse-engineering Ian Bicking's brain: inside pip and virtualenv
+  <http://pyvideo.org/video/568/reverse-engineering-ian-bicking--39-s-brain--insi>`_.
+  By the end of the talk, you'll have a good idea exactly how pip
+  and virtualenv do their magic, and where to go looking in the source
+  for particular behaviors or bug fixes.
+
+Compare & Contrast with Alternatives
+------------------------------------
+
+There are several alternatives that create isolated environments:
+
+* ``workingenv`` (which I do not suggest you use anymore) is the
+  predecessor to this library. It used the main Python interpreter,
+  but relied on setting ``$PYTHONPATH`` to activate the environment.
+  This causes problems when running Python scripts that aren't part of
+  the environment (e.g., a globally installed ``hg`` or ``bzr``). It
+  also conflicted a lot with Setuptools.
+
+* `virtual-python
+  <http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_
+  is also a predecessor to this library. It uses only symlinks, so it
+  couldn't work on Windows. It also symlinks over the *entire*
+  standard library and global ``site-packages``. As a result, it
+  won't see new additions to the global ``site-packages``.
+
+  This script only symlinks a small portion of the standard library
+  into the environment, and so on Windows it is feasible to simply
+  copy these files over. Also, it creates a new/empty
+  ``site-packages`` and also adds the global ``site-packages`` to the
+  path, so updates are tracked separately. This script also installs
+  Setuptools automatically, saving a step and avoiding the need for
+  network access.
+
+* `zc.buildout <http://pypi.python.org/pypi/zc.buildout>`_ doesn't
+  create an isolated Python environment in the same style, but
+  achieves similar results through a declarative config file that sets
+  up scripts with very particular packages. As a declarative system,
+  it is somewhat easier to repeat and manage, but more difficult to
+  experiment with. ``zc.buildout`` includes the ability to setup
+  non-Python systems (e.g., a database server or an Apache instance).
+
+I *strongly* recommend anyone doing application development or
+deployment use one of these tools.
diff --git a/bootstrap/virtualenv/docs/installation.rst b/bootstrap/virtualenv/docs/installation.rst
new file mode 100644
index 0000000..3006d76
--- /dev/null
+++ b/bootstrap/virtualenv/docs/installation.rst
@@ -0,0 +1,58 @@
+Installation
+============
+
+.. warning::
+
+    We advise installing virtualenv-1.9 or greater. Prior to version 1.9, the
+    pip included in virtualenv did not download from PyPI over SSL.
+
+.. warning::
+
+    When using pip to install virtualenv, we advise using pip 1.3 or greater.
+    Prior to version 1.3, pip did not download from PyPI over SSL.
+
+.. warning::
+
+    We advise against using easy_install to install virtualenv when using
+    setuptools < 0.9.7, because easy_install didn't download from PyPI over SSL
+    and was broken in some subtle ways.
+
+To install globally with `pip` (if you have pip 1.3 or greater installed globally):
+
+::
+
+ $ [sudo] pip install virtualenv
+
+Or to get the latest unreleased dev version:
+
+::
+
+ $ [sudo] pip install https://github.com/pypa/virtualenv/tarball/develop
+
+
+To install version X.X globally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ [sudo] python setup.py install
+
+
+To *use* locally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ python virtualenv.py myVE
+
+.. note::
+
+    The ``virtualenv.py`` script is *not* supported if run without the
+    necessary pip/setuptools/virtualenv distributions available locally. All
+    of the installation methods above include a ``virtualenv_support``
+    directory alongside ``virtualenv.py`` which contains a complete set of
+    pip and setuptools distributions, and so are fully supported.
diff --git a/bootstrap/virtualenv/docs/make.bat b/bootstrap/virtualenv/docs/make.bat
new file mode 100644
index 0000000..aa5c189
--- /dev/null
+++ b/bootstrap/virtualenv/docs/make.bat
@@ -0,0 +1,170 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-compressor.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-compressor.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+:end
diff --git a/bootstrap/virtualenv/docs/reference.rst b/bootstrap/virtualenv/docs/reference.rst
new file mode 100644
index 0000000..ca801a0
--- /dev/null
+++ b/bootstrap/virtualenv/docs/reference.rst
@@ -0,0 +1,256 @@
+Reference Guide
+===============
+
+``virtualenv`` Command
+----------------------
+
+.. _usage:
+
+Usage
+~~~~~
+
+:command:`virtualenv [OPTIONS] ENV_DIR`
+
+    Where ``ENV_DIR`` is an absolute or relative path to a directory to create
+    the virtual environment in.
+
+.. _options:
+
+Options
+~~~~~~~
+
+.. program: virtualenv
+
+.. option:: --version
+
+   show program's version number and exit
+
+.. option:: -h, --help
+
+   show this help message and exit
+
+.. option:: -v, --verbose
+
+   Increase verbosity.
+
+.. option:: -q, --quiet
+
+   Decrease verbosity.
+
+.. option:: -p PYTHON_EXE, --python=PYTHON_EXE
+
+   The Python interpreter to use, e.g.,
+   --python=python2.5 will use the python2.5 interpreter
+   to create the new environment.  The default is the
+   interpreter that virtualenv was installed with
+   (like ``/usr/bin/python``)
+
+.. option:: --clear
+
+   Clear out the non-root install and start from scratch.
+
+.. option:: --system-site-packages
+
+   Give the virtual environment access to the global
+   site-packages.
+
+.. option:: --always-copy
+
+   Always copy files rather than symlinking.
+
+.. option:: --relocatable
+
+   Make an EXISTING virtualenv environment relocatable.
+   This fixes up scripts and makes all .pth files relative.
+
+.. option:: --unzip-setuptools
+
+   Unzip Setuptools when installing it.
+
+.. option:: --no-setuptools
+
+   Do not install setuptools (or pip) in the new
+   virtualenv.
+
+.. option:: --no-pip
+
+   Do not install pip in the new virtualenv.
+
+.. option:: --extra-search-dir=DIR
+
+   Directory to look for setuptools/pip distributions in.
+   This option can be specified multiple times.
+
+.. option:: --prompt=PROMPT
+
+   Provides an alternative prompt prefix for this
+   environment.
+
+.. option:: --never-download
+
+   DEPRECATED. Retained only for backward compatibility.
+   This option has no effect. Virtualenv never downloads
+   pip or setuptools.
+
+.. option:: --no-site-packages
+
+   DEPRECATED. Retained only for backward compatibility.
+   Not having access to global site-packages is now the
+   default behavior.
+
+.. option:: --distribute
+.. option:: --setuptools
+
+   Legacy; now have no effect.  Before version 1.10 these could be used
+   to choose whether to install Distribute_ or Setuptools_ into the created
+   virtualenv. Distribute has now been merged into Setuptools, and the
+   latter is always installed.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+.. _Setuptools: https://pypi.python.org/pypi/setuptools
+
+
+Configuration
+-------------
+
+Environment Variables
+~~~~~~~~~~~~~~~~~~~~~
+
+Each command line option is automatically used to look for environment
+variables with the name format ``VIRTUALENV_<UPPER_NAME>``. That means
+the name of the command line options are capitalized and have dashes
+(``'-'``) replaced with underscores (``'_'``).
+
+For example, to automatically use a custom Python binary instead of the
+one virtualenv is run with you can also set an environment variable::
+
+  $ export VIRTUALENV_PYTHON=/opt/python-3.3/bin/python
+  $ virtualenv ENV
+
+It's the same as passing the option to virtualenv directly::
+
+  $ virtualenv --python=/opt/python-3.3/bin/python ENV
+
+This also works for appending command line options, like ``--find-links``.
+Just leave an empty space between the passed values, e.g.::
+
+  $ export VIRTUALENV_EXTRA_SEARCH_DIR="/path/to/dists /path/to/other/dists"
+  $ virtualenv ENV
+
+is the same as calling::
+
+  $ virtualenv --extra-search-dir=/path/to/dists --extra-search-dir=/path/to/other/dists ENV
+
+.. envvar:: VIRTUAL_ENV_DISABLE_PROMPT
+
+   Any virtualenv created when this is set to a non-empty value will not have
+   it's :ref:`activate` modify the shell prompt.
+
+
+Configuration File
+~~~~~~~~~~~~~~~~~~
+
+virtualenv also looks for a standard ini config file. On Unix and Mac OS X
+that's ``$HOME/.virtualenv/virtualenv.ini`` and on Windows, it's
+``%APPDATA%\virtualenv\virtualenv.ini``.
+
+The names of the settings are derived from the long command line option,
+e.g. the option :option:`--python <-p>` would look like this::
+
+  [virtualenv]
+  python = /opt/python-3.3/bin/python
+
+Appending options like :option:`--extra-search-dir` can be written on multiple
+lines::
+
+  [virtualenv]
+  extra-search-dir =
+      /path/to/dists
+      /path/to/other/dists
+
+Please have a look at the output of :option:`--help <-h>` for a full list
+of supported options.
+
+
+Extending Virtualenv
+--------------------
+
+
+Creating Your Own Bootstrap Scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While this creates an environment, it doesn't put anything into the
+environment. Developers may find it useful to distribute a script
+that sets up a particular environment, for example a script that
+installs a particular web application.
+
+To create a script like this, call
+:py:func:`virtualenv.create_bootstrap_script`, and write the
+result to your new bootstrapping script.
+
+.. py:function:: create_bootstrap_script(extra_text)
+
+   Creates a bootstrap script from ``extra_text``, which is like
+   this script but with extend_parser, adjust_options, and after_install hooks.
+
+This returns a string that (written to disk of course) can be used
+as a bootstrap script with your own customizations. The script
+will be the standard virtualenv.py script, with your extra text
+added (your extra text should be Python code).
+
+If you include these functions, they will be called:
+
+.. py:function:: extend_parser(optparse_parser)
+
+   You can add or remove options from the parser here.
+
+.. py:function:: adjust_options(options, args)
+
+   You can change options here, or change the args (if you accept
+   different kinds of arguments, be sure you modify ``args`` so it is
+   only ``[DEST_DIR]``).
+
+.. py:function:: after_install(options, home_dir)
+
+   After everything is installed, this function is called. This
+   is probably the function you are most likely to use. An
+   example would be::
+
+       def after_install(options, home_dir):
+           if sys.platform == 'win32':
+               bin = 'Scripts'
+           else:
+               bin = 'bin'
+           subprocess.call([join(home_dir, bin, 'easy_install'),
+                            'MyPackage'])
+           subprocess.call([join(home_dir, bin, 'my-package-script'),
+                            'setup', home_dir])
+
+   This example immediately installs a package, and runs a setup
+   script from that package.
+
+Bootstrap Example
+~~~~~~~~~~~~~~~~~
+
+Here's a more concrete example of how you could use this::
+
+    import virtualenv, textwrap
+    output = virtualenv.create_bootstrap_script(textwrap.dedent("""
+    import os, subprocess
+    def after_install(options, home_dir):
+        etc = join(home_dir, 'etc')
+        if not os.path.exists(etc):
+            os.makedirs(etc)
+        subprocess.call([join(home_dir, 'bin', 'easy_install'),
+                         'BlogApplication'])
+        subprocess.call([join(home_dir, 'bin', 'paster'),
+                         'make-config', 'BlogApplication',
+                         join(etc, 'blog.ini')])
+        subprocess.call([join(home_dir, 'bin', 'paster'),
+                         'setup-app', join(etc, 'blog.ini')])
+    """))
+    f = open('blog-bootstrap.py', 'w').write(output)
+
+Another example is available `here`__.
+
+.. __: https://github.com/socialplanning/fassembler/blob/master/fassembler/create-venv-script.py
diff --git a/bootstrap/virtualenv/docs/userguide.rst b/bootstrap/virtualenv/docs/userguide.rst
new file mode 100644
index 0000000..70d4af8
--- /dev/null
+++ b/bootstrap/virtualenv/docs/userguide.rst
@@ -0,0 +1,249 @@
+User Guide
+==========
+
+
+Usage
+-----
+
+Virtualenv has one basic command::
+
+    $ virtualenv ENV
+
+Where ``ENV`` is a directory to place the new virtual environment. It has
+a number of usual effects (modifiable by many :ref:`options`):
+
+ - :file:`ENV/lib/` and :file:`ENV/include/` are created, containing supporting
+   library files for a new virtualenv python. Packages installed in this
+   environment will live under :file:`ENV/lib/pythonX.X/site-packages/`.
+
+ - :file:`ENV/bin` is created, where executables live - noticeably a new
+   :command:`python`. Thus running a script with ``#! /path/to/ENV/bin/python``
+   would run that script under this virtualenv's python.
+
+ - The crucial packages pip_ and setuptools_ are installed, which allow other
+   packages to be easily installed to the environment. This associated pip
+   can be run from :file:`ENV/bin/pip`.
+
+The python in your new virtualenv is effectively isolated from the python that
+was used to create it.
+
+.. _pip: https://pypi.python.org/pypi/pip
+.. _setuptools: https://pypi.python.org/pypi/setuptools
+
+
+.. _activate:
+
+activate script
+~~~~~~~~~~~~~~~
+
+In a newly created virtualenv there will also be a :command:`activate` shell
+script. For Windows systems, activation scripts are provided for
+the Command Prompt and Powershell.
+
+On Posix systems, this resides in :file:`/ENV/bin/`, so you can run::
+
+    $ source bin/activate
+
+For some shells (e.g. the original Bourne Shell) you may need to use the
+:command:`.` command, when :command:`source` does not exist.
+
+This will change your ``$PATH`` so its first entry is the virtualenv's
+``bin/`` directory. (You have to use ``source`` because it changes your
+shell environment in-place.) This is all it does; it's purely a
+convenience. If you directly run a script or the python interpreter
+from the virtualenv's ``bin/`` directory (e.g. ``path/to/ENV/bin/pip``
+or ``/path/to/ENV/bin/python-script.py``) there's no need for
+activation.
+
+The ``activate`` script will also modify your shell prompt to indicate
+which environment is currently active. To disable this behaviour, see
+:envvar:`VIRTUAL_ENV_DISABLE_PROMPT`.
+
+To undo these changes to your path (and prompt), just run::
+
+    $ deactivate
+
+On Windows, the equivalent `activate` script is in the ``Scripts`` folder::
+
+    > \path\to\env\Scripts\activate
+
+And type ``deactivate`` to undo the changes.
+
+Based on your active shell (CMD.exe or Powershell.exe), Windows will use
+either activate.bat or activate.ps1 (as appropriate) to activate the
+virtual environment. If using Powershell, see the notes about code signing
+below.
+
+.. note::
+
+    If using Powershell, the ``activate`` script is subject to the
+    `execution policies`_ on the system. By default on Windows 7, the system's
+    excution policy is set to ``Restricted``, meaning no scripts like the
+    ``activate`` script are allowed to be executed. But that can't stop us
+    from changing that slightly to allow it to be executed.
+
+    In order to use the script, you can relax your system's execution
+    policy to ``AllSigned``, meaning all scripts on the system must be
+    digitally signed to be executed. Since the virtualenv activation
+    script is signed by one of the authors (Jannis Leidel) this level of
+    the execution policy suffices. As an administrator run::
+
+        PS C:\> Set-ExecutionPolicy AllSigned
+
+    Then you'll be asked to trust the signer, when executing the script.
+    You will be prompted with the following::
+
+        PS C:\> virtualenv .\foo
+        New python executable in C:\foo\Scripts\python.exe
+        Installing setuptools................done.
+        Installing pip...................done.
+        PS C:\> .\foo\scripts\activate
+
+        Do you want to run software from this untrusted publisher?
+        File C:\foo\scripts\activate.ps1 is published by E=jannis@leidel.info,
+        CN=Jannis Leidel, L=Berlin, S=Berlin, C=DE, Description=581796-Gh7xfJxkxQSIO4E0
+        and is not trusted on your system. Only run scripts from trusted publishers.
+        [V] Never run  [D] Do not run  [R] Run once  [A] Always run  [?] Help
+        (default is "D"):A
+        (foo) PS C:\>
+
+    If you select ``[A] Always Run``, the certificate will be added to the
+    Trusted Publishers of your user account, and will be trusted in this
+    user's context henceforth. If you select ``[R] Run Once``, the script will
+    be run, but you will be prometed on a subsequent invocation. Advanced users
+    can add the signer's certificate to the Trusted Publishers of the Computer
+    account to apply to all users (though this technique is out of scope of this
+    document).
+
+    Alternatively, you may relax the system execution policy to allow running
+    of local scripts without verifying the code signature using the following::
+
+        PS C:\> Set-ExecutionPolicy RemoteSigned
+
+    Since the ``activate.ps1`` script is generated locally for each virtualenv,
+    it is not considered a remote script and can then be executed.
+
+.. _`execution policies`: http://technet.microsoft.com/en-us/library/dd347641.aspx
+
+The :option:`--system-site-packages` Option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you build with ``virtualenv --system-site-packages ENV``, your virtual
+environment will inherit packages from ``/usr/lib/python2.7/site-packages``
+(or wherever your global site-packages directory is).
+
+This can be used if you have control over the global site-packages directory,
+and you want to depend on the packages there. If you want isolation from the
+global system, do not use this flag.
+
+Windows Notes
+~~~~~~~~~~~~~
+
+Some paths within the virtualenv are slightly different on Windows: scripts and
+executables on Windows go in ``ENV\Scripts\`` instead of ``ENV/bin/`` and
+libraries go in ``ENV\Lib\`` rather than ``ENV/lib/``.
+
+To create a virtualenv under a path with spaces in it on Windows, you'll need
+the `win32api <http://sourceforge.net/projects/pywin32/>`_ library installed.
+
+
+Using Virtualenv without ``bin/python``
+---------------------------------------
+
+Sometimes you can't or don't want to use the Python interpreter
+created by the virtualenv. For instance, in a `mod_python
+<http://www.modpython.org/>`_ or `mod_wsgi <http://www.modwsgi.org/>`_
+environment, there is only one interpreter.
+
+Luckily, it's easy. You must use the custom Python interpreter to
+*install* libraries. But to *use* libraries, you just have to be sure
+the path is correct. A script is available to correct the path. You
+can setup the environment like::
+
+    activate_this = '/path/to/env/bin/activate_this.py'
+    execfile(activate_this, dict(__file__=activate_this))
+
+This will change ``sys.path`` and even change ``sys.prefix``, but also allow
+you to use an existing interpreter. Items in your environment will show up
+first on ``sys.path``, before global items. However, global items will
+always be accessible (as if the :option:`--system-site-packages` flag had been
+used in creating the environment, whether it was or not). Also, this cannot undo
+the activation of other environments, or modules that have been imported.
+You shouldn't try to, for instance, activate an environment before a web
+request; you should activate *one* environment as early as possible, and not
+do it again in that process.
+
+Making Environments Relocatable
+-------------------------------
+
+**Note:** this option is somewhat experimental, and there are probably
+caveats that have not yet been identified.
+
+.. warning::
+
+    The ``--relocatable`` option currently has a number of issues,
+    and is not guaranteed to work in all circumstances. It is possible
+    that the option will be deprecated in a future version of ``virtualenv``.
+
+Normally environments are tied to a specific path. That means that
+you cannot move an environment around or copy it to another computer.
+You can fix up an environment to make it relocatable with the
+command::
+
+    $ virtualenv --relocatable ENV
+
+This will make some of the files created by setuptools use relative paths,
+and will change all the scripts to use ``activate_this.py`` instead of using
+the location of the Python interpreter to select the environment.
+
+**Note:** scripts which have been made relocatable will only work if
+the virtualenv is activated, specifically the python executable from
+the virtualenv must be the first one on the system PATH. Also note that
+the activate scripts are not currently made relocatable by
+``virtualenv --relocatable``.
+
+**Note:** you must run this after you've installed *any* packages into
+the environment. If you make an environment relocatable, then
+install a new package, you must run ``virtualenv --relocatable``
+again.
+
+Also, this **does not make your packages cross-platform**. You can
+move the directory around, but it can only be used on other similar
+computers. Some known environmental differences that can cause
+incompatibilities: a different version of Python, when one platform
+uses UCS2 for its internal unicode representation and another uses
+UCS4 (a compile-time option), obvious platform changes like Windows
+vs. Linux, or Intel vs. ARM, and if you have libraries that bind to C
+libraries on the system, if those C libraries are located somewhere
+different (either different versions, or a different filesystem
+layout).
+
+If you use this flag to create an environment, currently, the
+:option:`--system-site-packages` option will be implied.
+
+The :option:`--extra-search-dir` option
+---------------------------------------
+
+This option allows you to provide your own versions of setuptools and/or
+pip to use instead of the embedded versions that come with virtualenv.
+
+To use this feature, pass one or more ``--extra-search-dir`` options to
+virtualenv like this::
+
+    $ virtualenv --extra-search-dir=/path/to/distributions ENV
+
+The ``/path/to/distributions`` path should point to a directory that contains
+setuptools and/or pip wheels.
+
+virtualenv will look for wheels in the specified directories, but will use
+pip's standard algorithm for selecting the wheel to install, which looks for
+the latest compatible wheel.
+
+As well as the extra directories, the search order includes:
+
+#. The ``virtualenv_support`` directory relative to virtualenv.py
+#. The directory where virtualenv.py is located.
+#. The current directory.
+
+If no satisfactory local distributions are found, virtualenv will
+fail. Virtualenv will never download packages.
diff --git a/bootstrap/virtualenv/scripts/virtualenv b/bootstrap/virtualenv/scripts/virtualenv
new file mode 100644
index 0000000..c961dd7
--- /dev/null
+++ b/bootstrap/virtualenv/scripts/virtualenv
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+import virtualenv
+virtualenv.main()
diff --git a/bootstrap/virtualenv/setup.py b/bootstrap/virtualenv/setup.py
new file mode 100644
index 0000000..ce35314
--- /dev/null
+++ b/bootstrap/virtualenv/setup.py
@@ -0,0 +1,111 @@
+import os
+import re
+import shutil
+import sys
+
+if sys.version_info[:2] < (2, 6):
+    sys.exit('virtualenv requires Python 2.6 or higher.')
+
+try:
+    from setuptools import setup
+    from setuptools.command.test import test as TestCommand
+
+    class PyTest(TestCommand):
+        user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+        def initialize_options(self):
+            TestCommand.initialize_options(self)
+            self.pytest_args = None
+
+        def finalize_options(self):
+            TestCommand.finalize_options(self)
+            self.test_args = []
+            self.test_suite = True
+
+        def run_tests(self):
+            # import here, because outside the eggs aren't loaded
+            import pytest
+            errno = pytest.main(self.pytest_args)
+            sys.exit(errno)
+
+    setup_params = {
+        'entry_points': {
+            'console_scripts': [
+                'virtualenv=virtualenv:main',
+                'virtualenv-%s.%s=virtualenv:main' % sys.version_info[:2]
+            ],
+        },
+        'zip_safe': False,
+        'cmdclass': {'test': PyTest},
+        'tests_require': ['pytest', 'mock'],
+    }
+except ImportError:
+    from distutils.core import setup
+    if sys.platform == 'win32':
+        print('Note: without Setuptools installed you will '
+              'have to use "python -m virtualenv ENV"')
+        setup_params = {}
+    else:
+        script = 'scripts/virtualenv'
+        script_ver = script + '-%s.%s' % sys.version_info[:2]
+        shutil.copy(script, script_ver)
+        setup_params = {'scripts': [script, script_ver]}
+
+
+def read_file(*paths):
+    here = os.path.dirname(os.path.abspath(__file__))
+    with open(os.path.join(here, *paths)) as f:
+        return f.read()
+
+# Get long_description from index.rst:
+long_description = read_file('docs', 'index.rst')
+long_description = long_description.strip().split('split here', 1)[0]
+# Add release history
+long_description += "\n\n" + read_file('docs', 'changes.rst')
+
+
+def get_version():
+    version_file = read_file('virtualenv.py')
+    version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
+                              version_file, re.M)
+    if version_match:
+        return version_match.group(1)
+    raise RuntimeError("Unable to find version string.")
+
+
+# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
+# exit of python setup.py test # in multiprocessing/util.py _exit_function when
+# running python setup.py test (see
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
+
+setup(
+    name='virtualenv',
+    version=get_version(),
+    description="Virtual Python Environment builder",
+    long_description=long_description,
+    classifiers=[
+        'Development Status :: 5 - Production/Stable',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.1',
+        'Programming Language :: Python :: 3.2',
+    ],
+    keywords='setuptools deployment installation distutils',
+    author='Ian Bicking',
+    author_email='ianb@colorstudy.com',
+    maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',
+    maintainer_email='python-virtualenv@groups.google.com',
+    url='https://virtualenv.pypa.io/',
+    license='MIT',
+    py_modules=['virtualenv'],
+    packages=['virtualenv_support'],
+    package_data={'virtualenv_support': ['*.whl']},
+    **setup_params)
diff --git a/bootstrap/virtualenv/tests/__init__.py b/bootstrap/virtualenv/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/virtualenv/tests/__init__.py
diff --git a/bootstrap/virtualenv/tests/test_activate.sh b/bootstrap/virtualenv/tests/test_activate.sh
new file mode 100755
index 0000000..a2b79a5
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_activate.sh
@@ -0,0 +1,94 @@
+#!/bin/sh
+
+ROOT="$(dirname $0)/.."
+VIRTUALENV="${ROOT}/virtualenv.py"
+TESTENV="/tmp/test_virtualenv_activate.venv"
+
+rm -rf ${TESTENV}
+
+echo "$0: Creating virtualenv ${TESTENV}..." 1>&2
+
+${VIRTUALENV} ${TESTENV} | tee ${ROOT}/tests/test_activate_actual.output
+if ! diff ${ROOT}/tests/test_activate_expected.output ${ROOT}/tests/test_activate_actual.output; then
+    echo "$0: Failed to get expected output from ${VIRTUALENV}!" 1>&2
+    exit 1
+fi
+
+echo "$0: Created virtualenv ${TESTENV}." 1>&2
+
+echo "$0: Activating ${TESTENV}..." 1>&2
+. ${TESTENV}/bin/activate
+echo "$0: Activated ${TESTENV}." 1>&2
+
+echo "$0: Checking value of \$VIRTUAL_ENV..." 1>&2
+
+if [ "$VIRTUAL_ENV" != "${TESTENV}" ]; then
+    echo "$0: Expected \$VIRTUAL_ENV to be set to \"${TESTENV}\"; actual value: \"${VIRTUAL_ENV}\"!" 1>&2
+    exit 2
+fi
+
+echo "$0: \$VIRTUAL_ENV = \"${VIRTUAL_ENV}\" -- OK." 1>&2
+
+echo "$0: Checking output of \$(which python)..." 1>&2
+
+if [ "$(which python)" != "${TESTENV}/bin/python" ]; then
+    echo "$0: Expected \$(which python) to return \"${TESTENV}/bin/python\"; actual value: \"$(which python)\"!" 1>&2
+    exit 3
+fi
+
+echo "$0: Output of \$(which python) is OK." 1>&2
+
+echo "$0: Checking output of \$(which pip)..." 1>&2
+
+if [ "$(which pip)" != "${TESTENV}/bin/pip" ]; then
+    echo "$0: Expected \$(which pip) to return \"${TESTENV}/bin/pip\"; actual value: \"$(which pip)\"!" 1>&2
+    exit 4
+fi
+
+echo "$0: Output of \$(which pip) is OK." 1>&2
+
+echo "$0: Checking output of \$(which easy_install)..." 1>&2
+
+if [ "$(which easy_install)" != "${TESTENV}/bin/easy_install" ]; then
+    echo "$0: Expected \$(which easy_install) to return \"${TESTENV}/bin/easy_install\"; actual value: \"$(which easy_install)\"!" 1>&2
+    exit 5
+fi
+
+echo "$0: Output of \$(which easy_install) is OK." 1>&2
+
+echo "$0: Executing a simple Python program..." 1>&2
+
+TESTENV=${TESTENV} python <<__END__
+import os, sys
+
+expected_site_packages = os.path.join(os.environ['TESTENV'], 'lib','python%s' % sys.version[:3], 'site-packages')
+site_packages = os.path.join(os.environ['VIRTUAL_ENV'], 'lib', 'python%s' % sys.version[:3], 'site-packages')
+
+assert site_packages == expected_site_packages, 'site_packages did not have expected value; actual value: %r' % site_packages
+
+open(os.path.join(site_packages, 'pydoc_test.py'), 'w').write('"""This is pydoc_test.py"""\n')
+__END__
+
+if [ $? -ne 0 ]; then
+    echo "$0: Python script failed!" 1>&2
+    exit 6
+fi
+
+echo "$0: Execution of a simple Python program -- OK." 1>&2
+
+echo "$0: Testing pydoc..." 1>&2
+
+if ! PAGER=cat pydoc pydoc_test | grep 'This is pydoc_test.py' > /dev/null; then
+    echo "$0: pydoc test failed!" 1>&2
+    exit 7
+fi
+
+echo "$0: pydoc is OK." 1>&2
+
+echo "$0: Deactivating ${TESTENV}..." 1>&2
+deactivate
+echo "$0: Deactivated ${TESTENV}." 1>&2
+echo "$0: OK!" 1>&2
+
+rm -rf ${TESTENV}
+
diff --git a/bootstrap/virtualenv/tests/test_activate_expected.output b/bootstrap/virtualenv/tests/test_activate_expected.output
new file mode 100644
index 0000000..35bf7f7
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_activate_expected.output
@@ -0,0 +1,2 @@
+New python executable in /tmp/test_virtualenv_activate.venv/bin/python
+Installing setuptools, pip...done.
diff --git a/bootstrap/virtualenv/tests/test_virtualenv.py b/bootstrap/virtualenv/tests/test_virtualenv.py
new file mode 100644
index 0000000..10c1136
--- /dev/null
+++ b/bootstrap/virtualenv/tests/test_virtualenv.py
@@ -0,0 +1,139 @@
+import virtualenv
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+from mock import patch, Mock
+
+
+def test_version():
+    """Should have a version string"""
+    assert virtualenv.virtualenv_version, "Should have version"
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_absolute_path(mock_exists):
+    """Should return absolute path if given and exists"""
+    mock_exists.return_value = True
+    virtualenv.is_executable = Mock(return_value=True)
+
+    exe = virtualenv.resolve_interpreter("/usr/bin/python42")
+
+    assert exe == "/usr/bin/python42", "Absolute path should return as is"
+    mock_exists.assert_called_with("/usr/bin/python42")
+    virtualenv.is_executable.assert_called_with("/usr/bin/python42")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_nonexistent_interpreter(mock_exists):
+    """Should exit when with absolute path if not exists"""
+    mock_exists.return_value = False
+
+    try:
+        virtualenv.resolve_interpreter("/usr/bin/python42")
+        assert False, "Should raise exception"
+    except SystemExit:
+        pass
+
+    mock_exists.assert_called_with("/usr/bin/python42")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_invalid_interpreter(mock_exists):
+    """Should exit when with absolute path if not exists"""
+    mock_exists.return_value = True
+    virtualenv.is_executable = Mock(return_value=False)
+
+    try:
+        virtualenv.resolve_interpreter("/usr/bin/python42")
+        assert False, "Should raise exception"
+    except SystemExit:
+        pass
+
+    mock_exists.assert_called_with("/usr/bin/python42")
+    virtualenv.is_executable.assert_called_with("/usr/bin/python42")
+
+
+def test_activate_after_future_statements():
+    """Should insert activation line after last future statement"""
+    script = [
+        '#!/usr/bin/env python',
+        'from __future__ import with_statement',
+        'from __future__ import print_function',
+        'print("Hello, world!")'
+    ]
+    assert virtualenv.relative_script(script) == [
+        '#!/usr/bin/env python',
+        'from __future__ import with_statement',
+        'from __future__ import print_function',
+        '',
+        "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this",
+        '',
+        'print("Hello, world!")'
+    ]
+
+
+def test_cop_update_defaults_with_store_false():
+    """store_false options need reverted logic"""
+    class MyConfigOptionParser(virtualenv.ConfigOptionParser):
+        def __init__(self, *args, **kwargs):
+            self.config = virtualenv.ConfigParser.RawConfigParser()
+            self.files = []
+            optparse.OptionParser.__init__(self, *args, **kwargs)
+
+        def get_environ_vars(self, prefix='VIRTUALENV_'):
+            yield ("no_site_packages", "1")
+
+    cop = MyConfigOptionParser()
+    cop.add_option(
+        '--no-site-packages',
+        dest='system_site_packages',
+        action='store_false',
+        help="Don't give access to the global site-packages dir to the "
+             "virtual environment (default)")
+
+    defaults = {}
+    cop.update_defaults(defaults)
+    assert defaults == {'system_site_packages': 0}
+
+def test_install_python_bin():
+    """Should create the right python executables and links"""
+    tmp_virtualenv = tempfile.mkdtemp()
+    try:
+        home_dir, lib_dir, inc_dir, bin_dir = \
+                                virtualenv.path_locations(tmp_virtualenv)
+        virtualenv.install_python(home_dir, lib_dir, inc_dir, bin_dir, False,
+                                  False)
+
+        if virtualenv.is_win:
+            required_executables = [ 'python.exe', 'pythonw.exe']
+        else:
+            py_exe_no_version = 'python'
+            py_exe_version_major = 'python%s' % sys.version_info[0]
+            py_exe_version_major_minor = 'python%s.%s' % (
+                sys.version_info[0], sys.version_info[1])
+            required_executables = [ py_exe_no_version, py_exe_version_major,
+                                     py_exe_version_major_minor ]
+
+        for pth in required_executables:
+            assert os.path.exists(os.path.join(bin_dir, pth)), ("%s should "
+                            "exist in bin_dir" % pth)
+    finally:
+        shutil.rmtree(tmp_virtualenv)
+
+
+def test_always_copy_option():
+    """Should be no symlinks in directory tree"""
+    tmp_virtualenv = tempfile.mkdtemp()
+    ve_path = os.path.join(tmp_virtualenv, 'venv')
+    try:
+        virtualenv.create_environment(ve_path, symlink=False)
+
+        for root, dirs, files in os.walk(tmp_virtualenv):
+            for f in files + dirs:
+                full_name = os.path.join(root, f)
+                assert not os.path.islink(full_name), "%s should not be a" \
+                    " symlink (to %s)" % (full_name, os.readlink(full_name))
+    finally:
+        shutil.rmtree(tmp_virtualenv)
diff --git a/bootstrap/virtualenv/tests/tox.ini b/bootstrap/virtualenv/tests/tox.ini
new file mode 100644
index 0000000..da59e02
--- /dev/null
+++ b/bootstrap/virtualenv/tests/tox.ini
@@ -0,0 +1,12 @@
+# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install tox"
+# and then run "tox" from this directory.
+
+[tox]
+envlist = py25, py26, py27, py31, py32, pypy, jython
+setupdir = ..
+
+[testenv]
+commands = python setup.py test
+changedir = ..
diff --git a/bootstrap/virtualenv/tox.ini b/bootstrap/virtualenv/tox.ini
new file mode 100644
index 0000000..d2661ea
--- /dev/null
+++ b/bootstrap/virtualenv/tox.ini
@@ -0,0 +1,17 @@
+[tox]
+envlist =
+    py26,py27,py32,py33,py34,pypy,pypy3,docs
+
+[testenv]
+deps =
+    mock
+    pytest
+commands =
+    py.test []
+    python virtualenv.py {envtmpdir}/test-venv-01
+
+[testenv:docs]
+deps = sphinx
+basepython = python2.7
+commands =
+    sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
diff --git a/bootstrap/virtualenv/virtualenv.py b/bootstrap/virtualenv/virtualenv.py
new file mode 100755
index 0000000..380a601
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv.py
@@ -0,0 +1,2367 @@
+#!/usr/bin/env python
+"""Create a "virtual" Python installation
+"""
+
+__version__ = "12.0"
+virtualenv_version = __version__  # legacy
+
+# NB: avoid placing additional imports here, before sys.path is fixed!
+
+import sys
+import os
+
+#
+# RATIONALE:
+# This script is both it's own "host" and "guest".  If it's running in "guest
+# mode" (inside the virtualenv interpreter), it's essentially invoked via:
+#     /path/to/python /path/to/this/script.py
+#
+# Which, by the nature of Python, will put `/path/to/this` on the system path
+# as the first argument.  Now this can cause many subtle bugs, because the
+# rest of the script is now looking to import from the "host" Python version
+# first.  This has been especially troublesome when trying to create a Python
+# 3 "guest" env using a Python 2 "host", but even with minor Python
+# differences, there may been some bleeding between environments that doesn't
+# stand out as obviously.
+#
+# This removes the first argument off the system path, to avoid any accidental
+# usage of the "host" library directories.
+#
+if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+    del sys.path[0]
+
+import base64
+import codecs
+import optparse
+import re
+import shutil
+import logging
+import tempfile
+import zlib
+import errno
+import glob
+import distutils.sysconfig
+from distutils.util import strtobool
+import struct
+import subprocess
+import tarfile
+
+if sys.version_info < (2, 6):
+    print('ERROR: %s' % sys.exc_info()[1])
+    print('ERROR: this script requires Python 2.6 or greater.')
+    sys.exit(101)
+
+try:
+    basestring
+except NameError:
+    basestring = str
+
+try:
+    import ConfigParser
+except ImportError:
+    import configparser as ConfigParser
+
+join = os.path.join
+py_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
+
+is_jython = sys.platform.startswith('java')
+is_pypy = hasattr(sys, 'pypy_version_info')
+is_win = (sys.platform == 'win32')
+is_cygwin = (sys.platform == 'cygwin')
+is_darwin = (sys.platform == 'darwin')
+abiflags = getattr(sys, 'abiflags', '')
+
+user_dir = os.path.expanduser('~')
+if is_win:
+    default_storage_dir = os.path.join(user_dir, 'virtualenv')
+else:
+    default_storage_dir = os.path.join(user_dir, '.virtualenv')
+default_config_file = os.path.join(default_storage_dir, 'virtualenv.ini')
+
+if is_pypy:
+    expected_exe = 'pypy'
+elif is_jython:
+    expected_exe = 'jython'
+else:
+    expected_exe = 'python'
+
+# Return a mapping of version -> Python executable
+# Only provided for Windows, where the information in the registry is used
+if not is_win:
+    def get_installed_pythons():
+        return {}
+else:
+    try:
+        import winreg
+    except ImportError:
+        import _winreg as winreg
+
+    def get_installed_pythons():
+        try:
+            python_core = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
+                    "Software\\Python\\PythonCore")
+        except WindowsError:
+            # No registered Python installations
+            return {}
+        i = 0
+        versions = []
+        while True:
+            try:
+                versions.append(winreg.EnumKey(python_core, i))
+                i = i + 1
+            except WindowsError:
+                break
+        exes = dict()
+        for ver in versions:
+            try:
+                path = winreg.QueryValue(python_core, "%s\\InstallPath" % ver)
+            except WindowsError:
+                continue
+            exes[ver] = join(path, "python.exe")
+
+        winreg.CloseKey(python_core)
+
+        # Add the major versions
+        # Sort the keys, then repeatedly update the major version entry
+        # Last executable (i.e., highest version) wins with this approach
+        for ver in sorted(exes):
+            exes[ver[0]] = exes[ver]
+
+        return exes
+
+REQUIRED_MODULES = ['os', 'posix', 'posixpath', 'nt', 'ntpath', 'genericpath',
+                    'fnmatch', 'locale', 'encodings', 'codecs',
+                    'stat', 'UserDict', 'readline', 'copy_reg', 'types',
+                    're', 'sre', 'sre_parse', 'sre_constants', 'sre_compile',
+                    'zlib']
+
+REQUIRED_FILES = ['lib-dynload', 'config']
+
+majver, minver = sys.version_info[:2]
+if majver == 2:
+    if minver >= 6:
+        REQUIRED_MODULES.extend(['warnings', 'linecache', '_abcoll', 'abc'])
+    if minver >= 7:
+        REQUIRED_MODULES.extend(['_weakrefset'])
+elif majver == 3:
+    # Some extra modules are needed for Python 3, but different ones
+    # for different versions.
+    REQUIRED_MODULES.extend(['_abcoll', 'warnings', 'linecache', 'abc', 'io',
+                             '_weakrefset', 'copyreg', 'tempfile', 'random',
+                             '__future__', 'collections', 'keyword', 'tarfile',
+                             'shutil', 'struct', 'copy', 'tokenize', 'token',
+                             'functools', 'heapq', 'bisect', 'weakref',
+                             'reprlib'])
+    if minver >= 2:
+        REQUIRED_FILES[-1] = 'config-%s' % majver
+    if minver >= 3:
+        import sysconfig
+        platdir = sysconfig.get_config_var('PLATDIR')
+        REQUIRED_FILES.append(platdir)
+        # The whole list of 3.3 modules is reproduced below - the current
+        # uncommented ones are required for 3.3 as of now, but more may be
+        # added as 3.3 development continues.
+        REQUIRED_MODULES.extend([
+            #"aifc",
+            #"antigravity",
+            #"argparse",
+            #"ast",
+            #"asynchat",
+            #"asyncore",
+            "base64",
+            #"bdb",
+            #"binhex",
+            #"bisect",
+            #"calendar",
+            #"cgi",
+            #"cgitb",
+            #"chunk",
+            #"cmd",
+            #"codeop",
+            #"code",
+            #"colorsys",
+            #"_compat_pickle",
+            #"compileall",
+            #"concurrent",
+            #"configparser",
+            #"contextlib",
+            #"cProfile",
+            #"crypt",
+            #"csv",
+            #"ctypes",
+            #"curses",
+            #"datetime",
+            #"dbm",
+            #"decimal",
+            #"difflib",
+            #"dis",
+            #"doctest",
+            #"dummy_threading",
+            "_dummy_thread",
+            #"email",
+            #"filecmp",
+            #"fileinput",
+            #"formatter",
+            #"fractions",
+            #"ftplib",
+            #"functools",
+            #"getopt",
+            #"getpass",
+            #"gettext",
+            #"glob",
+            #"gzip",
+            "hashlib",
+            #"heapq",
+            "hmac",
+            #"html",
+            #"http",
+            #"idlelib",
+            #"imaplib",
+            #"imghdr",
+            "imp",
+            "importlib",
+            #"inspect",
+            #"json",
+            #"lib2to3",
+            #"logging",
+            #"macpath",
+            #"macurl2path",
+            #"mailbox",
+            #"mailcap",
+            #"_markupbase",
+            #"mimetypes",
+            #"modulefinder",
+            #"multiprocessing",
+            #"netrc",
+            #"nntplib",
+            #"nturl2path",
+            #"numbers",
+            #"opcode",
+            #"optparse",
+            #"os2emxpath",
+            #"pdb",
+            #"pickle",
+            #"pickletools",
+            #"pipes",
+            #"pkgutil",
+            #"platform",
+            #"plat-linux2",
+            #"plistlib",
+            #"poplib",
+            #"pprint",
+            #"profile",
+            #"pstats",
+            #"pty",
+            #"pyclbr",
+            #"py_compile",
+            #"pydoc_data",
+            #"pydoc",
+            #"_pyio",
+            #"queue",
+            #"quopri",
+            #"reprlib",
+            "rlcompleter",
+            #"runpy",
+            #"sched",
+            #"shelve",
+            #"shlex",
+            #"smtpd",
+            #"smtplib",
+            #"sndhdr",
+            #"socket",
+            #"socketserver",
+            #"sqlite3",
+            #"ssl",
+            #"stringprep",
+            #"string",
+            #"_strptime",
+            #"subprocess",
+            #"sunau",
+            #"symbol",
+            #"symtable",
+            #"sysconfig",
+            #"tabnanny",
+            #"telnetlib",
+            #"test",
+            #"textwrap",
+            #"this",
+            #"_threading_local",
+            #"threading",
+            #"timeit",
+            #"tkinter",
+            #"tokenize",
+            #"token",
+            #"traceback",
+            #"trace",
+            #"tty",
+            #"turtledemo",
+            #"turtle",
+            #"unittest",
+            #"urllib",
+            #"uuid",
+            #"uu",
+            #"wave",
+            #"weakref",
+            #"webbrowser",
+            #"wsgiref",
+            #"xdrlib",
+            #"xml",
+            #"xmlrpc",
+            #"zipfile",
+        ])
+    if minver >= 4:
+        REQUIRED_MODULES.extend([
+            'operator',
+            '_collections_abc',
+            '_bootlocale',
+        ])
+
+if is_pypy:
+    # these are needed to correctly display the exceptions that may happen
+    # during the bootstrap
+    REQUIRED_MODULES.extend(['traceback', 'linecache'])
+
+class Logger(object):
+
+    """
+    Logging object for use in command-line script.  Allows ranges of
+    levels, to avoid some redundancy of displayed information.
+    """
+
+    DEBUG = logging.DEBUG
+    INFO = logging.INFO
+    NOTIFY = (logging.INFO+logging.WARN)/2
+    WARN = WARNING = logging.WARN
+    ERROR = logging.ERROR
+    FATAL = logging.FATAL
+
+    LEVELS = [DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+    def __init__(self, consumers):
+        self.consumers = consumers
+        self.indent = 0
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def debug(self, msg, *args, **kw):
+        self.log(self.DEBUG, msg, *args, **kw)
+    def info(self, msg, *args, **kw):
+        self.log(self.INFO, msg, *args, **kw)
+    def notify(self, msg, *args, **kw):
+        self.log(self.NOTIFY, msg, *args, **kw)
+    def warn(self, msg, *args, **kw):
+        self.log(self.WARN, msg, *args, **kw)
+    def error(self, msg, *args, **kw):
+        self.log(self.ERROR, msg, *args, **kw)
+    def fatal(self, msg, *args, **kw):
+        self.log(self.FATAL, msg, *args, **kw)
+    def log(self, level, msg, *args, **kw):
+        if args:
+            if kw:
+                raise TypeError(
+                    "You may give positional or keyword arguments, not both")
+        args = args or kw
+        rendered = None
+        for consumer_level, consumer in self.consumers:
+            if self.level_matches(level, consumer_level):
+                if (self.in_progress_hanging
+                    and consumer in (sys.stdout, sys.stderr)):
+                    self.in_progress_hanging = False
+                    sys.stdout.write('\n')
+                    sys.stdout.flush()
+                if rendered is None:
+                    if args:
+                        rendered = msg % args
+                    else:
+                        rendered = msg
+                    rendered = ' '*self.indent + rendered
+                if hasattr(consumer, 'write'):
+                    consumer.write(rendered+'\n')
+                else:
+                    consumer(rendered)
+
+    def start_progress(self, msg):
+        assert not self.in_progress, (
+            "Tried to start_progress(%r) while in_progress %r"
+            % (msg, self.in_progress))
+        if self.level_matches(self.NOTIFY, self._stdout_level()):
+            sys.stdout.write(msg)
+            sys.stdout.flush()
+            self.in_progress_hanging = True
+        else:
+            self.in_progress_hanging = False
+        self.in_progress = msg
+
+    def end_progress(self, msg='done.'):
+        assert self.in_progress, (
+            "Tried to end_progress without start_progress")
+        if self.stdout_level_matches(self.NOTIFY):
+            if not self.in_progress_hanging:
+                # Some message has been printed out since start_progress
+                sys.stdout.write('...' + self.in_progress + msg + '\n')
+                sys.stdout.flush()
+            else:
+                sys.stdout.write(msg + '\n')
+                sys.stdout.flush()
+        self.in_progress = None
+        self.in_progress_hanging = False
+
+    def show_progress(self):
+        """If we are in a progress scope, and no log messages have been
+        shown, write out another '.'"""
+        if self.in_progress_hanging:
+            sys.stdout.write('.')
+            sys.stdout.flush()
+
+    def stdout_level_matches(self, level):
+        """Returns true if a message at this level will go to stdout"""
+        return self.level_matches(level, self._stdout_level())
+
+    def _stdout_level(self):
+        """Returns the level that stdout runs at"""
+        for level, consumer in self.consumers:
+            if consumer is sys.stdout:
+                return level
+        return self.FATAL
+
+    def level_matches(self, level, consumer_level):
+        """
+        >>> l = Logger([])
+        >>> l.level_matches(3, 4)
+        False
+        >>> l.level_matches(3, 2)
+        True
+        >>> l.level_matches(slice(None, 3), 3)
+        False
+        >>> l.level_matches(slice(None, 3), 2)
+        True
+        >>> l.level_matches(slice(1, 3), 1)
+        True
+        >>> l.level_matches(slice(2, 3), 1)
+        False
+        """
+        if isinstance(level, slice):
+            start, stop = level.start, level.stop
+            if start is not None and start > consumer_level:
+                return False
+            if stop is not None and stop <= consumer_level:
+                return False
+            return True
+        else:
+            return level >= consumer_level
+
+    #@classmethod
+    def level_for_integer(cls, level):
+        levels = cls.LEVELS
+        if level < 0:
+            return levels[0]
+        if level >= len(levels):
+            return levels[-1]
+        return levels[level]
+
+    level_for_integer = classmethod(level_for_integer)
+
+# create a silent logger just to prevent this from being undefined
+# will be overridden with requested verbosity main() is called.
+logger = Logger([(Logger.LEVELS[-1], sys.stdout)])
+
+def mkdir(path):
+    if not os.path.exists(path):
+        logger.info('Creating %s', path)
+        os.makedirs(path)
+    else:
+        logger.info('Directory %s already exists', path)
+
+def copyfileordir(src, dest, symlink=True):
+    if os.path.isdir(src):
+        shutil.copytree(src, dest, symlink)
+    else:
+        shutil.copy2(src, dest)
+
+def copyfile(src, dest, symlink=True):
+    if not os.path.exists(src):
+        # Some bad symlink in the src
+        logger.warn('Cannot find file %s (bad symlink)', src)
+        return
+    if os.path.exists(dest):
+        logger.debug('File %s already exists', dest)
+        return
+    if not os.path.exists(os.path.dirname(dest)):
+        logger.info('Creating parent directories for %s', os.path.dirname(dest))
+        os.makedirs(os.path.dirname(dest))
+    if not os.path.islink(src):
+        srcpath = os.path.abspath(src)
+    else:
+        srcpath = os.readlink(src)
+    if symlink and hasattr(os, 'symlink') and not is_win:
+        logger.info('Symlinking %s', dest)
+        try:
+            os.symlink(srcpath, dest)
+        except (OSError, NotImplementedError):
+            logger.info('Symlinking failed, copying to %s', dest)
+            copyfileordir(src, dest, symlink)
+    else:
+        logger.info('Copying to %s', dest)
+        copyfileordir(src, dest, symlink)
+
+def writefile(dest, content, overwrite=True):
+    if not os.path.exists(dest):
+        logger.info('Writing %s', dest)
+        f = open(dest, 'wb')
+        f.write(content.encode('utf-8'))
+        f.close()
+        return
+    else:
+        f = open(dest, 'rb')
+        c = f.read()
+        f.close()
+        if c != content.encode("utf-8"):
+            if not overwrite:
+                logger.notify('File %s exists with different content; not overwriting', dest)
+                return
+            logger.notify('Overwriting %s with new content', dest)
+            f = open(dest, 'wb')
+            f.write(content.encode('utf-8'))
+            f.close()
+        else:
+            logger.info('Content %s already in place', dest)
+
+def rmtree(dir):
+    if os.path.exists(dir):
+        logger.notify('Deleting tree %s', dir)
+        shutil.rmtree(dir)
+    else:
+        logger.info('Do not need to delete %s; already gone', dir)
+
+def make_exe(fn):
+    if hasattr(os, 'chmod'):
+        oldmode = os.stat(fn).st_mode & 0xFFF # 0o7777
+        newmode = (oldmode | 0x16D) & 0xFFF # 0o555, 0o7777
+        os.chmod(fn, newmode)
+        logger.info('Changed mode of %s to %s', fn, oct(newmode))
+
+def _find_file(filename, dirs):
+    for dir in reversed(dirs):
+        files = glob.glob(os.path.join(dir, filename))
+        if files and os.path.isfile(files[0]):
+            return True, files[0]
+    return False, filename
+
+def file_search_dirs():
+    here = os.path.dirname(os.path.abspath(__file__))
+    dirs = ['.', here,
+            join(here, 'virtualenv_support')]
+    if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv':
+        # Probably some boot script; just in case virtualenv is installed...
+        try:
+            import virtualenv
+        except ImportError:
+            pass
+        else:
+            dirs.append(os.path.join(os.path.dirname(virtualenv.__file__), 'virtualenv_support'))
+    return [d for d in dirs if os.path.isdir(d)]
+
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+    """
+    Custom help formatter for use in ConfigOptionParser that updates
+    the defaults before expanding them, allowing them to show up correctly
+    in the help listing
+    """
+    def expand_default(self, option):
+        if self.parser is not None:
+            self.parser.update_defaults(self.parser.defaults)
+        return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+    """
+    Custom option parser which updates its defaults by checking the
+    configuration files and environmental variables
+    """
+    def __init__(self, *args, **kwargs):
+        self.config = ConfigParser.RawConfigParser()
+        self.files = self.get_config_files()
+        self.config.read(self.files)
+        optparse.OptionParser.__init__(self, *args, **kwargs)
+
+    def get_config_files(self):
+        config_file = os.environ.get('VIRTUALENV_CONFIG_FILE', False)
+        if config_file and os.path.exists(config_file):
+            return [config_file]
+        return [default_config_file]
+
+    def update_defaults(self, defaults):
+        """
+        Updates the given defaults with values from the config files and
+        the environ. Does a little special handling for certain types of
+        options (lists).
+        """
+        # Then go and look for the other sources of configuration:
+        config = {}
+        # 1. config files
+        config.update(dict(self.get_config_section('virtualenv')))
+        # 2. environmental variables
+        config.update(dict(self.get_environ_vars()))
+        # Then set the options with those values
+        for key, val in config.items():
+            key = key.replace('_', '-')
+            if not key.startswith('--'):
+                key = '--%s' % key  # only prefer long opts
+            option = self.get_option(key)
+            if option is not None:
+                # ignore empty values
+                if not val:
+                    continue
+                # handle multiline configs
+                if option.action == 'append':
+                    val = val.split()
+                else:
+                    option.nargs = 1
+                if option.action == 'store_false':
+                    val = not strtobool(val)
+                elif option.action in ('store_true', 'count'):
+                    val = strtobool(val)
+                try:
+                    val = option.convert_value(key, val)
+                except optparse.OptionValueError:
+                    e = sys.exc_info()[1]
+                    print("An error occurred during configuration: %s" % e)
+                    sys.exit(3)
+                defaults[option.dest] = val
+        return defaults
+
+    def get_config_section(self, name):
+        """
+        Get a section of a configuration
+        """
+        if self.config.has_section(name):
+            return self.config.items(name)
+        return []
+
+    def get_environ_vars(self, prefix='VIRTUALENV_'):
+        """
+        Returns a generator with all environmental vars with prefix VIRTUALENV
+        """
+        for key, val in os.environ.items():
+            if key.startswith(prefix):
+                yield (key.replace(prefix, '').lower(), val)
+
+    def get_default_values(self):
+        """
+        Overridding to make updating the defaults after instantiation of
+        the option parser possible, update_defaults() does the dirty work.
+        """
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return optparse.Values(self.defaults)
+
+        defaults = self.update_defaults(self.defaults.copy())  # ours
+        for option in self._get_all_options():
+            default = defaults.get(option.dest)
+            if isinstance(default, basestring):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+        return optparse.Values(defaults)
+
+
+def main():
+    parser = ConfigOptionParser(
+        version=virtualenv_version,
+        usage="%prog [OPTIONS] DEST_DIR",
+        formatter=UpdatingDefaultsHelpFormatter())
+
+    parser.add_option(
+        '-v', '--verbose',
+        action='count',
+        dest='verbose',
+        default=0,
+        help="Increase verbosity.")
+
+    parser.add_option(
+        '-q', '--quiet',
+        action='count',
+        dest='quiet',
+        default=0,
+        help='Decrease verbosity.')
+
+    parser.add_option(
+        '-p', '--python',
+        dest='python',
+        metavar='PYTHON_EXE',
+        help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
+        'interpreter to create the new environment.  The default is the interpreter that '
+        'virtualenv was installed with (%s)' % sys.executable)
+
+    parser.add_option(
+        '--clear',
+        dest='clear',
+        action='store_true',
+        help="Clear out the non-root install and start from scratch.")
+
+    parser.set_defaults(system_site_packages=False)
+    parser.add_option(
+        '--no-site-packages',
+        dest='system_site_packages',
+        action='store_false',
+        help="DEPRECATED. Retained only for backward compatibility. "
+             "Not having access to global site-packages is now the default behavior.")
+
+    parser.add_option(
+        '--system-site-packages',
+        dest='system_site_packages',
+        action='store_true',
+        help="Give the virtual environment access to the global site-packages.")
+
+    parser.add_option(
+        '--always-copy',
+        dest='symlink',
+        action='store_false',
+        default=True,
+        help="Always copy files rather than symlinking.")
+
+    parser.add_option(
+        '--unzip-setuptools',
+        dest='unzip_setuptools',
+        action='store_true',
+        help="Unzip Setuptools when installing it.")
+
+    parser.add_option(
+        '--relocatable',
+        dest='relocatable',
+        action='store_true',
+        help='Make an EXISTING virtualenv environment relocatable. '
+             'This fixes up scripts and makes all .pth files relative.')
+
+    parser.add_option(
+        '--no-setuptools',
+        dest='no_setuptools',
+        action='store_true',
+        help='Do not install setuptools (or pip) in the new virtualenv.')
+
+    parser.add_option(
+        '--no-pip',
+        dest='no_pip',
+        action='store_true',
+        help='Do not install pip in the new virtualenv.')
+
+    default_search_dirs = file_search_dirs()
+    parser.add_option(
+        '--extra-search-dir',
+        dest="search_dirs",
+        action="append",
+        metavar='DIR',
+        default=default_search_dirs,
+        help="Directory to look for setuptools/pip distributions in. "
+              "This option can be used multiple times.")
+
+    parser.add_option(
+        '--never-download',
+        dest="never_download",
+        action="store_true",
+        default=True,
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect. "
+              "Virtualenv never downloads pip or setuptools.")
+
+    parser.add_option(
+        '--prompt',
+        dest='prompt',
+        help='Provides an alternative prompt prefix for this environment.')
+
+    parser.add_option(
+        '--setuptools',
+        dest='setuptools',
+        action='store_true',
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+    parser.add_option(
+        '--distribute',
+        dest='distribute',
+        action='store_true',
+        help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+    if 'extend_parser' in globals():
+        extend_parser(parser)
+
+    options, args = parser.parse_args()
+
+    global logger
+
+    if 'adjust_options' in globals():
+        adjust_options(options, args)
+
+    verbosity = options.verbose - options.quiet
+    logger = Logger([(Logger.level_for_integer(2 - verbosity), sys.stdout)])
+
+    if options.python and not os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+        env = os.environ.copy()
+        interpreter = resolve_interpreter(options.python)
+        if interpreter == sys.executable:
+            logger.warn('Already using interpreter %s' % interpreter)
+        else:
+            logger.notify('Running virtualenv with interpreter %s' % interpreter)
+            env['VIRTUALENV_INTERPRETER_RUNNING'] = 'true'
+            file = __file__
+            if file.endswith('.pyc'):
+                file = file[:-1]
+            popen = subprocess.Popen([interpreter, file] + sys.argv[1:], env=env)
+            raise SystemExit(popen.wait())
+
+    if not args:
+        print('You must provide a DEST_DIR')
+        parser.print_help()
+        sys.exit(2)
+    if len(args) > 1:
+        print('There must be only one argument: DEST_DIR (you gave %s)' % (
+            ' '.join(args)))
+        parser.print_help()
+        sys.exit(2)
+
+    home_dir = args[0]
+
+    if os.environ.get('WORKING_ENV'):
+        logger.fatal('ERROR: you cannot run virtualenv while in a workingenv')
+        logger.fatal('Please deactivate your workingenv, then re-run this script')
+        sys.exit(3)
+
+    if 'PYTHONHOME' in os.environ:
+        logger.warn('PYTHONHOME is set.  You *must* activate the virtualenv before using it')
+        del os.environ['PYTHONHOME']
+
+    if options.relocatable:
+        make_environment_relocatable(home_dir)
+        return
+
+    if not options.never_download:
+        logger.warn('The --never-download option is for backward compatibility only.')
+        logger.warn('Setting it to false is no longer supported, and will be ignored.')
+
+    create_environment(home_dir,
+                       site_packages=options.system_site_packages,
+                       clear=options.clear,
+                       unzip_setuptools=options.unzip_setuptools,
+                       prompt=options.prompt,
+                       search_dirs=options.search_dirs,
+                       never_download=True,
+                       no_setuptools=options.no_setuptools,
+                       no_pip=options.no_pip,
+                       symlink=options.symlink)
+    if 'after_install' in globals():
+        after_install(options, home_dir)
+
+def call_subprocess(cmd, show_stdout=True,
+                    filter_stdout=None, cwd=None,
+                    raise_on_returncode=True, extra_env=None,
+                    remove_from_env=None):
+    cmd_parts = []
+    for part in cmd:
+        if len(part) > 45:
+            part = part[:20]+"..."+part[-20:]
+        if ' ' in part or '\n' in part or '"' in part or "'" in part:
+            part = '"%s"' % part.replace('"', '\\"')
+        if hasattr(part, 'decode'):
+            try:
+                part = part.decode(sys.getdefaultencoding())
+            except UnicodeDecodeError:
+                part = part.decode(sys.getfilesystemencoding())
+        cmd_parts.append(part)
+    cmd_desc = ' '.join(cmd_parts)
+    if show_stdout:
+        stdout = None
+    else:
+        stdout = subprocess.PIPE
+    logger.debug("Running command %s" % cmd_desc)
+    if extra_env or remove_from_env:
+        env = os.environ.copy()
+        if extra_env:
+            env.update(extra_env)
+        if remove_from_env:
+            for varname in remove_from_env:
+                env.pop(varname, None)
+    else:
+        env = None
+    try:
+        proc = subprocess.Popen(
+            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
+            cwd=cwd, env=env)
+    except Exception:
+        e = sys.exc_info()[1]
+        logger.fatal(
+            "Error %s while executing command %s" % (e, cmd_desc))
+        raise
+    all_output = []
+    if stdout is not None:
+        stdout = proc.stdout
+        encoding = sys.getdefaultencoding()
+        fs_encoding = sys.getfilesystemencoding()
+        while 1:
+            line = stdout.readline()
+            try:
+                line = line.decode(encoding)
+            except UnicodeDecodeError:
+                line = line.decode(fs_encoding)
+            if not line:
+                break
+            line = line.rstrip()
+            all_output.append(line)
+            if filter_stdout:
+                level = filter_stdout(line)
+                if isinstance(level, tuple):
+                    level, line = level
+                logger.log(level, line)
+                if not logger.stdout_level_matches(level):
+                    logger.show_progress()
+            else:
+                logger.info(line)
+    else:
+        proc.communicate()
+    proc.wait()
+    if proc.returncode:
+        if raise_on_returncode:
+            if all_output:
+                logger.notify('Complete output from command %s:' % cmd_desc)
+                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+            raise OSError(
+                "Command %s failed with error code %s"
+                % (cmd_desc, proc.returncode))
+        else:
+            logger.warn(
+                "Command %s had error code %s"
+                % (cmd_desc, proc.returncode))
+
+def filter_install_output(line):
+    if line.strip().startswith('running'):
+        return Logger.INFO
+    return Logger.DEBUG
+
+def find_wheels(projects, search_dirs):
+    """Find wheels from which we can import PROJECTS.
+
+    Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return
+    a list of the first wheel found for each PROJECT
+    """
+
+    wheels = []
+
+    # Look through SEARCH_DIRS for the first suitable wheel. Don't bother
+    # about version checking here, as this is simply to get something we can
+    # then use to install the correct version.
+    for project in projects:
+        for dirname in search_dirs:
+            # This relies on only having "universal" wheels available.
+            # The pattern could be tightened to require -py2.py3-none-any.whl.
+            files = glob.glob(os.path.join(dirname, project + '-*.whl'))
+            if files:
+                wheels.append(os.path.abspath(files[0]))
+                break
+        else:
+            # We're out of luck, so quit with a suitable error
+            logger.fatal('Cannot find a wheel for %s' % (project,))
+
+    return wheels
+
+def install_wheel(project_names, py_executable, search_dirs=None):
+    if search_dirs is None:
+        search_dirs = file_search_dirs()
+
+    wheels = find_wheels(['setuptools', 'pip'], search_dirs)
+    pythonpath = os.pathsep.join(wheels)
+    findlinks = ' '.join(search_dirs)
+
+    cmd = [
+        py_executable, '-c',
+        'import sys, pip; sys.exit(pip.main(["install", "--ignore-installed"] + sys.argv[1:]))',
+    ] + project_names
+    logger.start_progress('Installing %s...' % (', '.join(project_names)))
+    logger.indent += 2
+    try:
+        call_subprocess(cmd, show_stdout=False,
+            extra_env = {
+                'PYTHONPATH': pythonpath,
+                'PIP_FIND_LINKS': findlinks,
+                'PIP_USE_WHEEL': '1',
+                'PIP_PRE': '1',
+                'PIP_NO_INDEX': '1'
+            }
+        )
+    finally:
+        logger.indent -= 2
+        logger.end_progress()
+
+def create_environment(home_dir, site_packages=False, clear=False,
+                       unzip_setuptools=False,
+                       prompt=None, search_dirs=None, never_download=False,
+                       no_setuptools=False, no_pip=False, symlink=True):
+    """
+    Creates a new environment in ``home_dir``.
+
+    If ``site_packages`` is true, then the global ``site-packages/``
+    directory will be on the path.
+
+    If ``clear`` is true (default False) then the environment will
+    first be cleared.
+    """
+    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+
+    py_executable = os.path.abspath(install_python(
+        home_dir, lib_dir, inc_dir, bin_dir,
+        site_packages=site_packages, clear=clear, symlink=symlink))
+
+    install_distutils(home_dir)
+
+    if not no_setuptools:
+        to_install = ['setuptools']
+        if not no_pip:
+            to_install.append('pip')
+        install_wheel(to_install, py_executable, search_dirs)
+
+    install_activate(home_dir, bin_dir, prompt)
+
+def is_executable_file(fpath):
+    return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+def path_locations(home_dir):
+    """Return the path locations for the environment (where libraries are,
+    where scripts go, etc)"""
+    # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
+    # prefix arg is broken: http://bugs.python.org/issue3386
+    if is_win:
+        # Windows has lots of problems with executables with spaces in
+        # the name; this function will remove them (using the ~1
+        # format):
+        mkdir(home_dir)
+        if ' ' in home_dir:
+            import ctypes
+            GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
+            size = max(len(home_dir)+1, 256)
+            buf = ctypes.create_unicode_buffer(size)
+            try:
+                u = unicode
+            except NameError:
+                u = str
+            ret = GetShortPathName(u(home_dir), buf, size)
+            if not ret:
+                print('Error: the path "%s" has a space in it' % home_dir)
+                print('We could not determine the short pathname for it.')
+                print('Exiting.')
+                sys.exit(3)
+            home_dir = str(buf.value)
+        lib_dir = join(home_dir, 'Lib')
+        inc_dir = join(home_dir, 'Include')
+        bin_dir = join(home_dir, 'Scripts')
+    if is_jython:
+        lib_dir = join(home_dir, 'Lib')
+        inc_dir = join(home_dir, 'Include')
+        bin_dir = join(home_dir, 'bin')
+    elif is_pypy:
+        lib_dir = home_dir
+        inc_dir = join(home_dir, 'include')
+        bin_dir = join(home_dir, 'bin')
+    elif not is_win:
+        lib_dir = join(home_dir, 'lib', py_version)
+        multiarch_exec = '/usr/bin/multiarch-platform'
+        if is_executable_file(multiarch_exec):
+            # In Mageia (2) and Mandriva distros the include dir must be like:
+            # virtualenv/include/multiarch-x86_64-linux/python2.7
+            # instead of being virtualenv/include/python2.7
+            p = subprocess.Popen(multiarch_exec, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            stdout, stderr = p.communicate()
+            # stdout.strip is needed to remove newline character
+            inc_dir = join(home_dir, 'include', stdout.strip(), py_version + abiflags)
+        else:
+            inc_dir = join(home_dir, 'include', py_version + abiflags)
+        bin_dir = join(home_dir, 'bin')
+    return home_dir, lib_dir, inc_dir, bin_dir
+
+
+def change_prefix(filename, dst_prefix):
+    prefixes = [sys.prefix]
+
+    if is_darwin:
+        prefixes.extend((
+            os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+            os.path.join(sys.prefix, "Extras", "lib", "python"),
+            os.path.join("~", "Library", "Python", sys.version[:3], "site-packages"),
+            # Python 2.6 no-frameworks
+            os.path.join("~", ".local", "lib","python", sys.version[:3], "site-packages"),
+            # System Python 2.7 on OSX Mountain Lion
+            os.path.join("~", "Library", "Python", sys.version[:3], "lib", "python", "site-packages")))
+
+    if hasattr(sys, 'real_prefix'):
+        prefixes.append(sys.real_prefix)
+    if hasattr(sys, 'base_prefix'):
+        prefixes.append(sys.base_prefix)
+    prefixes = list(map(os.path.expanduser, prefixes))
+    prefixes = list(map(os.path.abspath, prefixes))
+    # Check longer prefixes first so we don't split in the middle of a filename
+    prefixes = sorted(prefixes, key=len, reverse=True)
+    filename = os.path.abspath(filename)
+    for src_prefix in prefixes:
+        if filename.startswith(src_prefix):
+            _, relpath = filename.split(src_prefix, 1)
+            if src_prefix != os.sep: # sys.prefix == "/"
+                assert relpath[0] == os.sep
+                relpath = relpath[1:]
+            return join(dst_prefix, relpath)
+    assert False, "Filename %s does not start with any of these prefixes: %s" % \
+        (filename, prefixes)
+
+def copy_required_modules(dst_prefix, symlink):
+    import imp
+    for modname in REQUIRED_MODULES:
+        if modname in sys.builtin_module_names:
+            logger.info("Ignoring built-in bootstrap module: %s" % modname)
+            continue
+        try:
+            f, filename, _ = imp.find_module(modname)
+        except ImportError:
+            logger.info("Cannot import bootstrap module: %s" % modname)
+        else:
+            if f is not None:
+                f.close()
+            # special-case custom readline.so on OS X, but not for pypy:
+            if modname == 'readline' and sys.platform == 'darwin' and not (
+                    is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
+                dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
+            elif modname == 'readline' and sys.platform == 'win32':
+                # special-case for Windows, where readline is not a
+                # standard module, though it may have been installed in
+                # site-packages by a third-party package
+                pass
+            else:
+                dst_filename = change_prefix(filename, dst_prefix)
+            copyfile(filename, dst_filename, symlink)
+            if filename.endswith('.pyc'):
+                pyfile = filename[:-1]
+                if os.path.exists(pyfile):
+                    copyfile(pyfile, dst_filename[:-1], symlink)
+
+
+def subst_path(prefix_path, prefix, home_dir):
+    prefix_path = os.path.normpath(prefix_path)
+    prefix = os.path.normpath(prefix)
+    home_dir = os.path.normpath(home_dir)
+    if not prefix_path.startswith(prefix):
+        logger.warn('Path not in prefix %r %r', prefix_path, prefix)
+        return
+    return prefix_path.replace(prefix, home_dir, 1)
+
+
+def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
+    """Install just the base environment, no distutils patches etc"""
+    if sys.executable.startswith(bin_dir):
+        print('Please use the *system* python to run this script')
+        return
+
+    if clear:
+        rmtree(lib_dir)
+        ## FIXME: why not delete it?
+        ## Maybe it should delete everything with #!/path/to/venv/python in it
+        logger.notify('Not deleting %s', bin_dir)
+
+    if hasattr(sys, 'real_prefix'):
+        logger.notify('Using real prefix %r' % sys.real_prefix)
+        prefix = sys.real_prefix
+    elif hasattr(sys, 'base_prefix'):
+        logger.notify('Using base prefix %r' % sys.base_prefix)
+        prefix = sys.base_prefix
+    else:
+        prefix = sys.prefix
+    mkdir(lib_dir)
+    fix_lib64(lib_dir, symlink)
+    stdlib_dirs = [os.path.dirname(os.__file__)]
+    if is_win:
+        stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
+    elif is_darwin:
+        stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
+    if hasattr(os, 'symlink'):
+        logger.info('Symlinking Python bootstrap modules')
+    else:
+        logger.info('Copying Python bootstrap modules')
+    logger.indent += 2
+    try:
+        # copy required files...
+        for stdlib_dir in stdlib_dirs:
+            if not os.path.isdir(stdlib_dir):
+                continue
+            for fn in os.listdir(stdlib_dir):
+                bn = os.path.splitext(fn)[0]
+                if fn != 'site-packages' and bn in REQUIRED_FILES:
+                    copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink)
+        # ...and modules
+        copy_required_modules(home_dir, symlink)
+    finally:
+        logger.indent -= 2
+    mkdir(join(lib_dir, 'site-packages'))
+    import site
+    site_filename = site.__file__
+    if site_filename.endswith('.pyc'):
+        site_filename = site_filename[:-1]
+    elif site_filename.endswith('$py.class'):
+        site_filename = site_filename.replace('$py.class', '.py')
+    site_filename_dst = change_prefix(site_filename, home_dir)
+    site_dir = os.path.dirname(site_filename_dst)
+    writefile(site_filename_dst, SITE_PY)
+    writefile(join(site_dir, 'orig-prefix.txt'), prefix)
+    site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
+    if not site_packages:
+        writefile(site_packages_filename, '')
+
+    if is_pypy or is_win:
+        stdinc_dir = join(prefix, 'include')
+    else:
+        stdinc_dir = join(prefix, 'include', py_version + abiflags)
+    if os.path.exists(stdinc_dir):
+        copyfile(stdinc_dir, inc_dir, symlink)
+    else:
+        logger.debug('No include dir %s' % stdinc_dir)
+
+    platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
+    if platinc_dir != stdinc_dir:
+        platinc_dest = distutils.sysconfig.get_python_inc(
+            plat_specific=1, prefix=home_dir)
+        if platinc_dir == platinc_dest:
+            # Do platinc_dest manually due to a CPython bug;
+            # not http://bugs.python.org/issue3386 but a close cousin
+            platinc_dest = subst_path(platinc_dir, prefix, home_dir)
+        if platinc_dest:
+            # PyPy's stdinc_dir and prefix are relative to the original binary
+            # (traversing virtualenvs), whereas the platinc_dir is relative to
+            # the inner virtualenv and ignores the prefix argument.
+            # This seems more evolved than designed.
+            copyfile(platinc_dir, platinc_dest, symlink)
+
+    # pypy never uses exec_prefix, just ignore it
+    if sys.exec_prefix != prefix and not is_pypy:
+        if is_win:
+            exec_dir = join(sys.exec_prefix, 'lib')
+        elif is_jython:
+            exec_dir = join(sys.exec_prefix, 'Lib')
+        else:
+            exec_dir = join(sys.exec_prefix, 'lib', py_version)
+        for fn in os.listdir(exec_dir):
+            copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink)
+
+    if is_jython:
+        # Jython has either jython-dev.jar and javalib/ dir, or just
+        # jython.jar
+        for name in 'jython-dev.jar', 'javalib', 'jython.jar':
+            src = join(prefix, name)
+            if os.path.exists(src):
+                copyfile(src, join(home_dir, name), symlink)
+        # XXX: registry should always exist after Jython 2.5rc1
+        src = join(prefix, 'registry')
+        if os.path.exists(src):
+            copyfile(src, join(home_dir, 'registry'), symlink=False)
+        copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
+                 symlink=False)
+
+    mkdir(bin_dir)
+    py_executable = join(bin_dir, os.path.basename(sys.executable))
+    if 'Python.framework' in prefix:
+        # OS X framework builds cause validation to break
+        # https://github.com/pypa/virtualenv/issues/322
+        if os.environ.get('__PYVENV_LAUNCHER__'):
+            del os.environ["__PYVENV_LAUNCHER__"]
+        if re.search(r'/Python(?:-32|-64)*$', py_executable):
+            # The name of the python executable is not quite what
+            # we want, rename it.
+            py_executable = os.path.join(
+                    os.path.dirname(py_executable), 'python')
+
+    logger.notify('New %s executable in %s', expected_exe, py_executable)
+    pcbuild_dir = os.path.dirname(sys.executable)
+    pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
+    if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
+        logger.notify('Detected python running from build directory %s', pcbuild_dir)
+        logger.notify('Writing .pth file linking to build directory for *.pyd files')
+        writefile(pyd_pth, pcbuild_dir)
+    else:
+        pcbuild_dir = None
+        if os.path.exists(pyd_pth):
+            logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
+            os.unlink(pyd_pth)
+
+    if sys.executable != py_executable:
+        ## FIXME: could I just hard link?
+        executable = sys.executable
+        shutil.copyfile(executable, py_executable)
+        make_exe(py_executable)
+        if is_win or is_cygwin:
+            pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
+            if os.path.exists(pythonw):
+                logger.info('Also created pythonw.exe')
+                shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
+            python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
+            python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
+            if os.path.exists(python_d):
+                logger.info('Also created python_d.exe')
+                shutil.copyfile(python_d, python_d_dest)
+            elif os.path.exists(python_d_dest):
+                logger.info('Removed python_d.exe as it is no longer at the source')
+                os.unlink(python_d_dest)
+            # we need to copy the DLL to enforce that windows will load the correct one.
+            # may not exist if we are cygwin.
+            py_executable_dll = 'python%s%s.dll' % (
+                sys.version_info[0], sys.version_info[1])
+            py_executable_dll_d = 'python%s%s_d.dll' % (
+                sys.version_info[0], sys.version_info[1])
+            pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
+            pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
+            pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
+            if os.path.exists(pythondll):
+                logger.info('Also created %s' % py_executable_dll)
+                shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
+            if os.path.exists(pythondll_d):
+                logger.info('Also created %s' % py_executable_dll_d)
+                shutil.copyfile(pythondll_d, pythondll_d_dest)
+            elif os.path.exists(pythondll_d_dest):
+                logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
+                os.unlink(pythondll_d_dest)
+        if is_pypy:
+            # make a symlink python --> pypy-c
+            python_executable = os.path.join(os.path.dirname(py_executable), 'python')
+            if sys.platform in ('win32', 'cygwin'):
+                python_executable += '.exe'
+            logger.info('Also created executable %s' % python_executable)
+            copyfile(py_executable, python_executable, symlink)
+
+            if is_win:
+                for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll',
+                            'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll',
+                            'tcl85.dll', 'tk85.dll']:
+                    src = join(prefix, name)
+                    if os.path.exists(src):
+                        copyfile(src, join(bin_dir, name), symlink)
+
+                for d in sys.path:
+                    if d.endswith('lib_pypy'):
+                        break
+                else:
+                    logger.fatal('Could not find lib_pypy in sys.path')
+                    raise SystemExit(3)
+                logger.info('Copying lib_pypy')
+                copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink)
+
+    if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
+        secondary_exe = os.path.join(os.path.dirname(py_executable),
+                                     expected_exe)
+        py_executable_ext = os.path.splitext(py_executable)[1]
+        if py_executable_ext.lower() == '.exe':
+            # python2.4 gives an extension of '.4' :P
+            secondary_exe += py_executable_ext
+        if os.path.exists(secondary_exe):
+            logger.warn('Not overwriting existing %s script %s (you must use %s)'
+                        % (expected_exe, secondary_exe, py_executable))
+        else:
+            logger.notify('Also creating executable in %s' % secondary_exe)
+            shutil.copyfile(sys.executable, secondary_exe)
+            make_exe(secondary_exe)
+
+    if '.framework' in prefix:
+        if 'Python.framework' in prefix:
+            logger.debug('MacOSX Python framework detected')
+            # Make sure we use the embedded interpreter inside
+            # the framework, even if sys.executable points to
+            # the stub executable in ${sys.prefix}/bin
+            # See http://groups.google.com/group/python-virtualenv/
+            #                              browse_thread/thread/17cab2f85da75951
+            original_python = os.path.join(
+                prefix, 'Resources/Python.app/Contents/MacOS/Python')
+        if 'EPD' in prefix:
+            logger.debug('EPD framework detected')
+            original_python = os.path.join(prefix, 'bin/python')
+        shutil.copy(original_python, py_executable)
+
+        # Copy the framework's dylib into the virtual
+        # environment
+        virtual_lib = os.path.join(home_dir, '.Python')
+
+        if os.path.exists(virtual_lib):
+            os.unlink(virtual_lib)
+        copyfile(
+            os.path.join(prefix, 'Python'),
+            virtual_lib,
+            symlink)
+
+        # And then change the install_name of the copied python executable
+        try:
+            mach_o_change(py_executable,
+                          os.path.join(prefix, 'Python'),
+                          '@executable_path/../.Python')
+        except:
+            e = sys.exc_info()[1]
+            logger.warn("Could not call mach_o_change: %s. "
+                        "Trying to call install_name_tool instead." % e)
+            try:
+                call_subprocess(
+                    ["install_name_tool", "-change",
+                     os.path.join(prefix, 'Python'),
+                     '@executable_path/../.Python',
+                     py_executable])
+            except:
+                logger.fatal("Could not call install_name_tool -- you must "
+                             "have Apple's development tools installed")
+                raise
+
+    if not is_win:
+        # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
+        py_exe_version_major = 'python%s' % sys.version_info[0]
+        py_exe_version_major_minor = 'python%s.%s' % (
+            sys.version_info[0], sys.version_info[1])
+        py_exe_no_version = 'python'
+        required_symlinks = [ py_exe_no_version, py_exe_version_major,
+                         py_exe_version_major_minor ]
+
+        py_executable_base = os.path.basename(py_executable)
+
+        if py_executable_base in required_symlinks:
+            # Don't try to symlink to yourself.
+            required_symlinks.remove(py_executable_base)
+
+        for pth in required_symlinks:
+            full_pth = join(bin_dir, pth)
+            if os.path.exists(full_pth):
+                os.unlink(full_pth)
+            if symlink:
+                os.symlink(py_executable_base, full_pth)
+            else:
+                copyfile(py_executable, full_pth, symlink)
+
+    if is_win and ' ' in py_executable:
+        # There's a bug with subprocess on Windows when using a first
+        # argument that has a space in it.  Instead we have to quote
+        # the value:
+        py_executable = '"%s"' % py_executable
+    # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
+    cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
+        'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
+    logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
+    try:
+        proc = subprocess.Popen(cmd,
+                            stdout=subprocess.PIPE)
+        proc_stdout, proc_stderr = proc.communicate()
+    except OSError:
+        e = sys.exc_info()[1]
+        if e.errno == errno.EACCES:
+            logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
+            sys.exit(100)
+        else:
+            raise e
+
+    proc_stdout = proc_stdout.strip().decode("utf-8")
+    proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
+    norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
+    if hasattr(norm_home_dir, 'decode'):
+        norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
+    if proc_stdout != norm_home_dir:
+        logger.fatal(
+            'ERROR: The executable %s is not functioning' % py_executable)
+        logger.fatal(
+            'ERROR: It thinks sys.prefix is %r (should be %r)'
+            % (proc_stdout, norm_home_dir))
+        logger.fatal(
+            'ERROR: virtualenv is not compatible with this system or executable')
+        if is_win:
+            logger.fatal(
+                'Note: some Windows users have reported this error when they '
+                'installed Python for "Only this user" or have multiple '
+                'versions of Python installed. Copying the appropriate '
+                'PythonXX.dll to the virtualenv Scripts/ directory may fix '
+                'this problem.')
+        sys.exit(100)
+    else:
+        logger.info('Got sys.prefix result: %r' % proc_stdout)
+
+    pydistutils = os.path.expanduser('~/.pydistutils.cfg')
+    if os.path.exists(pydistutils):
+        logger.notify('Please make sure you remove any previous custom paths from '
+                      'your %s file.' % pydistutils)
+    ## FIXME: really this should be calculated earlier
+
+    fix_local_scheme(home_dir, symlink)
+
+    if site_packages:
+        if os.path.exists(site_packages_filename):
+            logger.info('Deleting %s' % site_packages_filename)
+            os.unlink(site_packages_filename)
+
+    return py_executable
+
+
+def install_activate(home_dir, bin_dir, prompt=None):
+    home_dir = os.path.abspath(home_dir)
+    if is_win or is_jython and os._name == 'nt':
+        files = {
+            'activate.bat': ACTIVATE_BAT,
+            'deactivate.bat': DEACTIVATE_BAT,
+            'activate.ps1': ACTIVATE_PS,
+        }
+
+        # MSYS needs paths of the form /c/path/to/file
+        drive, tail = os.path.splitdrive(home_dir.replace(os.sep, '/'))
+        home_dir_msys = (drive and "/%s%s" or "%s%s") % (drive[:1], tail)
+
+        # Run-time conditional enables (basic) Cygwin compatibility
+        home_dir_sh = ("""$(if [ "$OSTYPE" "==" "cygwin" ]; then cygpath -u '%s'; else echo '%s'; fi;)""" %
+                       (home_dir, home_dir_msys))
+        files['activate'] = ACTIVATE_SH.replace('__VIRTUAL_ENV__', home_dir_sh)
+
+    else:
+        files = {'activate': ACTIVATE_SH}
+
+        # suppling activate.fish in addition to, not instead of, the
+        # bash script support.
+        files['activate.fish'] = ACTIVATE_FISH
+
+        # same for csh/tcsh support...
+        files['activate.csh'] = ACTIVATE_CSH
+
+    files['activate_this.py'] = ACTIVATE_THIS
+    if hasattr(home_dir, 'decode'):
+        home_dir = home_dir.decode(sys.getfilesystemencoding())
+    vname = os.path.basename(home_dir)
+    for name, content in files.items():
+        content = content.replace('__VIRTUAL_PROMPT__', prompt or '')
+        content = content.replace('__VIRTUAL_WINPROMPT__', prompt or '(%s)' % vname)
+        content = content.replace('__VIRTUAL_ENV__', home_dir)
+        content = content.replace('__VIRTUAL_NAME__', vname)
+        content = content.replace('__BIN_NAME__', os.path.basename(bin_dir))
+        writefile(os.path.join(bin_dir, name), content)
+
+def install_distutils(home_dir):
+    distutils_path = change_prefix(distutils.__path__[0], home_dir)
+    mkdir(distutils_path)
+    ## FIXME: maybe this prefix setting should only be put in place if
+    ## there's a local distutils.cfg with a prefix setting?
+    home_dir = os.path.abspath(home_dir)
+    ## FIXME: this is breaking things, removing for now:
+    #distutils_cfg = DISTUTILS_CFG + "\n[install]\nprefix=%s\n" % home_dir
+    writefile(os.path.join(distutils_path, '__init__.py'), DISTUTILS_INIT)
+    writefile(os.path.join(distutils_path, 'distutils.cfg'), DISTUTILS_CFG, overwrite=False)
+
+def fix_local_scheme(home_dir, symlink=True):
+    """
+    Platforms that use the "posix_local" install scheme (like Ubuntu with
+    Python 2.7) need to be given an additional "local" location, sigh.
+    """
+    try:
+        import sysconfig
+    except ImportError:
+        pass
+    else:
+        if sysconfig._get_default_scheme() == 'posix_local':
+            local_path = os.path.join(home_dir, 'local')
+            if not os.path.exists(local_path):
+                os.mkdir(local_path)
+                for subdir_name in os.listdir(home_dir):
+                    if subdir_name == 'local':
+                        continue
+                    copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \
+                                                            os.path.join(local_path, subdir_name), symlink)
+
+def fix_lib64(lib_dir, symlink=True):
+    """
+    Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
+    instead of lib/pythonX.Y.  If this is such a platform we'll just create a
+    symlink so lib64 points to lib
+    """
+    if [p for p in distutils.sysconfig.get_config_vars().values()
+        if isinstance(p, basestring) and 'lib64' in p]:
+        # PyPy's library path scheme is not affected by this.
+        # Return early or we will die on the following assert.
+        if is_pypy:
+            logger.debug('PyPy detected, skipping lib64 symlinking')
+            return
+
+        logger.debug('This system uses lib64; symlinking lib64 to lib')
+
+        assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
+            "Unexpected python lib dir: %r" % lib_dir)
+        lib_parent = os.path.dirname(lib_dir)
+        top_level = os.path.dirname(lib_parent)
+        lib_dir = os.path.join(top_level, 'lib')
+        lib64_link = os.path.join(top_level, 'lib64')
+        assert os.path.basename(lib_parent) == 'lib', (
+            "Unexpected parent dir: %r" % lib_parent)
+        if os.path.lexists(lib64_link):
+            return
+        if symlink:
+            os.symlink('lib', lib64_link)
+        else:
+            copyfile('lib', lib64_link)
+
+def resolve_interpreter(exe):
+    """
+    If the executable given isn't an absolute path, search $PATH for the interpreter
+    """
+    # If the "executable" is a version number, get the installed executable for
+    # that version
+    python_versions = get_installed_pythons()
+    if exe in python_versions:
+        exe = python_versions[exe]
+
+    if os.path.abspath(exe) != exe:
+        paths = os.environ.get('PATH', '').split(os.pathsep)
+        for path in paths:
+            if os.path.exists(os.path.join(path, exe)):
+                exe = os.path.join(path, exe)
+                break
+    if not os.path.exists(exe):
+        logger.fatal('The executable %s (from --python=%s) does not exist' % (exe, exe))
+        raise SystemExit(3)
+    if not is_executable(exe):
+        logger.fatal('The executable %s (from --python=%s) is not executable' % (exe, exe))
+        raise SystemExit(3)
+    return exe
+
+def is_executable(exe):
+    """Checks a file is executable"""
+    return os.access(exe, os.X_OK)
+
+############################################################
+## Relocating the environment:
+
+def make_environment_relocatable(home_dir):
+    """
+    Makes the already-existing environment use relative paths, and takes out
+    the #!-based environment selection in scripts.
+    """
+    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+    activate_this = os.path.join(bin_dir, 'activate_this.py')
+    if not os.path.exists(activate_this):
+        logger.fatal(
+            'The environment doesn\'t have a file %s -- please re-run virtualenv '
+            'on this environment to update it' % activate_this)
+    fixup_scripts(home_dir, bin_dir)
+    fixup_pth_and_egg_link(home_dir)
+    ## FIXME: need to fix up distutils.cfg
+
+OK_ABS_SCRIPTS = ['python', 'python%s' % sys.version[:3],
+                  'activate', 'activate.bat', 'activate_this.py',
+                  'activate.fish', 'activate.csh']
+
+def fixup_scripts(home_dir, bin_dir):
+    if is_win:
+        new_shebang_args = (
+            '%s /c' % os.path.normcase(os.environ.get('COMSPEC', 'cmd.exe')),
+            '', '.exe')
+    else:
+        new_shebang_args = ('/usr/bin/env', sys.version[:3], '')
+
+    # This is what we expect at the top of scripts:
+    shebang = '#!%s' % os.path.normcase(os.path.join(
+        os.path.abspath(bin_dir), 'python%s' % new_shebang_args[2]))
+    # This is what we'll put:
+    new_shebang = '#!%s python%s%s' % new_shebang_args
+
+    for filename in os.listdir(bin_dir):
+        filename = os.path.join(bin_dir, filename)
+        if not os.path.isfile(filename):
+            # ignore subdirs, e.g. .svn ones.
+            continue
+        f = open(filename, 'rb')
+        try:
+            try:
+                lines = f.read().decode('utf-8').splitlines()
+            except UnicodeDecodeError:
+                # This is probably a binary program instead
+                # of a script, so just ignore it.
+                continue
+        finally:
+            f.close()
+        if not lines:
+            logger.warn('Script %s is an empty file' % filename)
+            continue
+
+        old_shebang = lines[0].strip()
+        old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:])
+
+        if not old_shebang.startswith(shebang):
+            if os.path.basename(filename) in OK_ABS_SCRIPTS:
+                logger.debug('Cannot make script %s relative' % filename)
+            elif lines[0].strip() == new_shebang:
+                logger.info('Script %s has already been made relative' % filename)
+            else:
+                logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)'
+                            % (filename, shebang))
+            continue
+        logger.notify('Making script %s relative' % filename)
+        script = relative_script([new_shebang] + lines[1:])
+        f = open(filename, 'wb')
+        f.write('\n'.join(script).encode('utf-8'))
+        f.close()
+
+def relative_script(lines):
+    "Return a script that'll work in a relocatable environment."
+    activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this"
+    # Find the last future statement in the script. If we insert the activation
+    # line before a future statement, Python will raise a SyntaxError.
+    activate_at = None
+    for idx, line in reversed(list(enumerate(lines))):
+        if line.split()[:3] == ['from', '__future__', 'import']:
+            activate_at = idx + 1
+            break
+    if activate_at is None:
+        # Activate after the shebang.
+        activate_at = 1
+    return lines[:activate_at] + ['', activate, ''] + lines[activate_at:]
+
+def fixup_pth_and_egg_link(home_dir, sys_path=None):
+    """Makes .pth and .egg-link files use relative paths"""
+    home_dir = os.path.normcase(os.path.abspath(home_dir))
+    if sys_path is None:
+        sys_path = sys.path
+    for path in sys_path:
+        if not path:
+            path = '.'
+        if not os.path.isdir(path):
+            continue
+        path = os.path.normcase(os.path.abspath(path))
+        if not path.startswith(home_dir):
+            logger.debug('Skipping system (non-environment) directory %s' % path)
+            continue
+        for filename in os.listdir(path):
+            filename = os.path.join(path, filename)
+            if filename.endswith('.pth'):
+                if not os.access(filename, os.W_OK):
+                    logger.warn('Cannot write .pth file %s, skipping' % filename)
+                else:
+                    fixup_pth_file(filename)
+            if filename.endswith('.egg-link'):
+                if not os.access(filename, os.W_OK):
+                    logger.warn('Cannot write .egg-link file %s, skipping' % filename)
+                else:
+                    fixup_egg_link(filename)
+
+def fixup_pth_file(filename):
+    lines = []
+    prev_lines = []
+    f = open(filename)
+    prev_lines = f.readlines()
+    f.close()
+    for line in prev_lines:
+        line = line.strip()
+        if (not line or line.startswith('#') or line.startswith('import ')
+            or os.path.abspath(line) != line):
+            lines.append(line)
+        else:
+            new_value = make_relative_path(filename, line)
+            if line != new_value:
+                logger.debug('Rewriting path %s as %s (in %s)' % (line, new_value, filename))
+            lines.append(new_value)
+    if lines == prev_lines:
+        logger.info('No changes to .pth file %s' % filename)
+        return
+    logger.notify('Making paths in .pth file %s relative' % filename)
+    f = open(filename, 'w')
+    f.write('\n'.join(lines) + '\n')
+    f.close()
+
+def fixup_egg_link(filename):
+    f = open(filename)
+    link = f.readline().strip()
+    f.close()
+    if os.path.abspath(link) != link:
+        logger.debug('Link in %s already relative' % filename)
+        return
+    new_link = make_relative_path(filename, link)
+    logger.notify('Rewriting link %s in %s as %s' % (link, filename, new_link))
+    f = open(filename, 'w')
+    f.write(new_link)
+    f.close()
+
+def make_relative_path(source, dest, dest_is_directory=True):
+    """
+    Make a filename relative, where the filename is dest, and it is
+    being referred to from the filename source.
+
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/usr/share/another-place/src/Directory')
+        '../another-place/src/Directory'
+        >>> make_relative_path('/usr/share/something/a-file.pth',
+        ...                    '/home/user/src/Directory')
+        '../../../home/user/src/Directory'
+        >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+        './'
+    """
+    source = os.path.dirname(source)
+    if not dest_is_directory:
+        dest_filename = os.path.basename(dest)
+        dest = os.path.dirname(dest)
+    dest = os.path.normpath(os.path.abspath(dest))
+    source = os.path.normpath(os.path.abspath(source))
+    dest_parts = dest.strip(os.path.sep).split(os.path.sep)
+    source_parts = source.strip(os.path.sep).split(os.path.sep)
+    while dest_parts and source_parts and dest_parts[0] == source_parts[0]:
+        dest_parts.pop(0)
+        source_parts.pop(0)
+    full_parts = ['..']*len(source_parts) + dest_parts
+    if not dest_is_directory:
+        full_parts.append(dest_filename)
+    if not full_parts:
+        # Special case for the current directory (otherwise it'd be '')
+        return './'
+    return os.path.sep.join(full_parts)
+
+
+
+############################################################
+## Bootstrap script creation:
+
+def create_bootstrap_script(extra_text, python_version=''):
+    """
+    Creates a bootstrap script, which is like this script but with
+    extend_parser, adjust_options, and after_install hooks.
+
+    This returns a string that (written to disk of course) can be used
+    as a bootstrap script with your own customizations.  The script
+    will be the standard virtualenv.py script, with your extra text
+    added (your extra text should be Python code).
+
+    If you include these functions, they will be called:
+
+    ``extend_parser(optparse_parser)``:
+        You can add or remove options from the parser here.
+
+    ``adjust_options(options, args)``:
+        You can change options here, or change the args (if you accept
+        different kinds of arguments, be sure you modify ``args`` so it is
+        only ``[DEST_DIR]``).
+
+    ``after_install(options, home_dir)``:
+
+        After everything is installed, this function is called.  This
+        is probably the function you are most likely to use.  An
+        example would be::
+
+            def after_install(options, home_dir):
+                subprocess.call([join(home_dir, 'bin', 'easy_install'),
+                                 'MyPackage'])
+                subprocess.call([join(home_dir, 'bin', 'my-package-script'),
+                                 'setup', home_dir])
+
+        This example immediately installs a package, and runs a setup
+        script from that package.
+
+    If you provide something like ``python_version='2.5'`` then the
+    script will start with ``#!/usr/bin/env python2.5`` instead of
+    ``#!/usr/bin/env python``.  You can use this when the script must
+    be run with a particular Python version.
+    """
+    filename = __file__
+    if filename.endswith('.pyc'):
+        filename = filename[:-1]
+    f = codecs.open(filename, 'r', encoding='utf-8')
+    content = f.read()
+    f.close()
+    py_exe = 'python%s' % python_version
+    content = (('#!/usr/bin/env %s\n' % py_exe)
+               + '## WARNING: This file is generated\n'
+               + content)
+    return content.replace('##EXT' 'END##', extra_text)
+
+##EXTEND##
+
+def convert(s):
+    b = base64.b64decode(s.encode('ascii'))
+    return zlib.decompress(b).decode('utf-8')
+
+##file site.py
+SITE_PY = convert("""
+eJzFPf1z2zaWv/OvwMqToZTKdOJ0e3tO3RsncVrfuYm3yc7m1vXoKAmyWFMkS5C2tTd3f/u9DwAE
++CHb2+6cphNLJPDw8PC+8PAeOhqNTopCZkuxyZd1KoWScblYiyKu1kqs8lJU66Rc7hdxWW3h6eIm
+vpZKVLlQWxVhqygInv/GT/BcfF4nyqAA3+K6yjdxlSziNN2KZFPkZSWXYlmXSXYtkiypkjhN/g4t
+8iwSz387BsFZJmDmaSJLcStLBXCVyFfiYlut80yM6wLn/DL6Y/xqMhVqUSZFBQ1KjTNQZB1XQSbl
+EtCElrUCUiaV3FeFXCSrZGEb3uV1uhRFGi+k+K//4qlR0zAMVL6Rd2tZSpEBMgBTAqwC8YCvSSkW
++VJGQryRixgH4OcNsQKGNsU1U0jGLBdpnl3DnDK5kErF5VaM53VFgAhlscwBpwQwqJI0De7y8kZN
+YElpPe7gkYiZPfzJMHvAPHH8LucAjh+z4C9Zcj9l2MA9CK5aM9uUcpXcixjBwk95Lxcz/WycrMQy
+Wa2ABlk1wSYBI6BEmswPClqOb/UKfXdAWFmujGEMiShzY35JPaLgrBJxqoBt6wJppAjzd3KexBlQ
+I7uF4QAikDToG2eZqMqOQ7MTOQAocR0rkJKNEuNNnGTArD/GC0L7r0m2zO/UhCgAq6XEL7Wq3PmP
+ewgArR0CTANcLLOadZYmNzLdTgCBz4B9KVWdVigQy6SUiyovE6kIAKC2FfIekJ6KuJSahMyZRm6n
+RH+iSZLhwqKAocDjSyTJKrmuS5IwsUqAc4Er3n/8Sbw7fXN28kHzmAHGMnu9AZwBCi20gxMMIA5q
+VR6kOQh0FJzjHxEvlyhk1zg+4NU0OHhwpYMxzL2I2n2cBQey68XVw8AcK1AmNFZA/f4bukzVGujz
+Pw+sdxCcDFGFJs7f7tY5yGQWb6RYx8xfyBnBtxrOd1FRrV8DNyiEUwGpFC4OIpggPCCJS7NxnklR
+AIulSSYnAVBoTm39VQRW+JBn+7TWLU4ACGWQwUvn2YRGzCRMtAvrNeoL03hLM9NNArvOm7wkxQH8
+ny1IF6VxdkM4KmIo/jaX10mWIULIC0G4F9LA6iYBTlxG4pxakV4wjUTI2otbokjUwEvIdMCT8j7e
+FKmcsviibt2tRmgwWQmz1ilzHLSsSL3SqjVT7eW9w+hLi+sIzWpdSgBezz2hW+X5VMxBZxM2Rbxh
+8arucuKcoEeeqBPyBLWEvvgdKHqiVL2R9iXyCmgWYqhgladpfgckOwoCIfawkTHKPnPCW3gH/wJc
+/DeV1WIdBM5IFrAGhcgPgUIgYBJkprlaI+Fxm2bltpJJMtYUebmUJQ31OGIfMOKPbIxzDT7klTZq
+PF1c5XyTVKiS5tpkJmzxsrBi/fia5w3TAMutiGamaUOnDU4vLdbxXBqXZC5XKAl6kV7bZYcxg54x
+yRZXYsNWBt4BWWTCFqRfsaDSWVWSnACAwcIXZ0lRp9RIIYOJGAbaFAR/E6NJz7WzBOzNZjlAhcTm
+ewH2B3D7O4jR3ToB+iwAAmgY1FKwfPOkKtFBaPRR4Bt905/HB049W2nbxEOu4iTVVj7OgjN6eFqW
+JL4LWWCvqSaGghlmFbp21xnQEcV8NBoFgXGHtsp8zVVQldsjYAVhxpnN5nWChm82Q1Ovf6iARxHO
+wF43287CAw1hOn0AKjldVmW+wdd2bp9AmcBY2CPYExekZSQ7yB4nvkbyuSq9ME3RdjvsLFAPBRc/
+nb4/+3L6SRyLy0alTdv67ArGPM1iYGuyCMBUrWEbXQYtUfElqPvEezDvxBRgz6g3ia+Mqxp4F1D/
+XNb0Gqax8F4Gpx9O3pyfzv7y6fSn2aezz6eAINgZGezRlNE81uAwqgiEA7hyqSJtX4NOD3rw5uST
+fRDMEjX75mtgN3gyvpYVMHE5hhlPRbiJ7xUwaDilphPEsdMALHg4mYjvxOHz568OCVqxLbYADMyu
+0xQfzrRFnyXZKg8n1PgXdumPWUlp/+3y6OsrcXwswl/i2zgMwIdqmjJL/Eji9HlbSOhawZ9xriZB
+sJQrEL0biQI6fk5+8YQ7wJJAy1zb6V/yJDPvmSvdIUh/jKkH4DCbLdJYKWw8m4VABOrQ84EOETvX
+KHVj6Fhs3a4TjQp+SgkLm2GXKf7Tg2I8p36IBqPodjGNQFw3i1hJbkXTh36zGeqs2WysBwRhJokB
+h4vVUChME9RZZQJ+LXEe6rC5ylP8ifBRC5AA4tYKtSQukt46RbdxWks1diYFRByPW2RERZso4kdw
+UcZgiZulm0za1DQ8A82AfGkOWrRsUQ4/e+DvgLoymzjc6PHei2mGmP477zQIB3A5Q1T3SrWgsHYU
+F6cX4tWLw310Z2DPubTU8ZqjhU6yWtqHK1gtIw+MMPcy8uLSZYV6Fp8e7Ya5iezKdFlhpZe4lJv8
+Vi4BW2RgZ5XFT/QGduYwj0UMqwh6nfwBVqHGb4xxH8qzB2lB3wGotyEoZv3N0u9xMEBmChQRb6yJ
+1HrXz6awKPPbBJ2N+Va/BFsJyhItpnFsAmfhPCZDkwgaArzgDCl1J0NQh2XNDivhjSDRXiwbxRoR
+uHPU1Ff09SbL77IZ74SPUemOJ5Z1UbA082KDZgn2xHuwQoBkDhu7hmgMBVx+gbK1D8jD9GG6QFna
+WwAgMPSKtmsOLLPVoynyrhGHRRiT14KEt5ToL9yaIWirZYjhQKK3kX1gtARCgslZBWdVg2YylDXT
+DAZ2SOJz3XnEW1AfQIuKEZjNsYbGjQz9Lo9AOYtzVyk5/dAif/nyhdlGrSm+gojNcdLoQqzIWEbF
+FgxrAjrBeGQcrSE2uAPnFsDUSrOm2P8k8oK9MVjPCy3b4AfA7q6qiqODg7u7u0hHF/Ly+kCtDv74
+p2+++dML1onLJfEPTMeRFh1qiw7oHXq00bfGAn1nVq7Fj0nmcyPBGkvyysgVRfy+r5NlLo72J1Z/
+Ihc3Zhr/Na4MKJCZGZSpDLQdNRg9U/vPoldqJJ6RdbZtxxP2S7RJtVbMt7rQo8rBEwC/ZZHXaKob
+TlDiK7BusENfynl9HdrBPRtpfsBUUU7Hlgf2X14hBj5nGL4ypniGWoLYAi2+Q/qfmG1i8o60hkDy
+oonq7J63/VrMEHf5eHm3vqYjNGaGiULuQInwmzxaAG3jruTgR7u2aPcc19Z8PENgLH1gmFc7lmMU
+HMIF12LqSp3D1ejxgjTdsWoGBeOqRlDQ4CTOmdoaHNnIEEGid2M2+7ywugXQqRU5NPEBswrQwh2n
+Y+3arOB4QsgDx+IlPZHgIh913r3gpa3TlAI6LR71qMKAvYVGO50DX44NgKkYlX8ZcUuzTfnYWhRe
+gx5gOceAkMFWHWbCN64PONob9bBTx+oP9WYa94HARRpzLOpR0AnlYx6hVCBNxdjvOcTilrjdwXZa
+HGIqs0wk0mpAuNrKo1eodhqmVZKh7nUWKVqkOXjFVisSIzXvfWeB9kH4uM+YaQnUZGjI4TQ6Jm/P
+E8BQt8Pw2XWNgQY3DoMYbRJF1g3JtIZ/wK2g+AYFo4CWBM2CeayU+RP7HWTOzld/GWAPS2hkCLfp
+kBvSsRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bUclG6CESmQM8eCkJoB3Omlt8HBJxGe
+gJCEIuT7SslCfCVGsHxtUX2c7v5dudQEIcZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8B5fH
+DfS7bG6Y1gZdwFn3FbdFCjQyxWFGExfVK0MYN5j8h2OnRUMsM4hhKG8g70jHjDQJ7HJr0LDgBoy3
+5u2x9GM3YoF9x2GuDuXmHvZ/YZmoRa5Cipm0YxfuR3NFlzYW2/NkPoI/3gKMJlceJJnq+AVGWf6B
+QUIPetgH3ZsshkWWcXmXZCEpME2/Y39pOnhYUnpG7uATbacOYKIY8Tx4X4KA0NHnAYgTagLYlctQ
+abe/C3bnFEcWLncfeW7z5dGrqy5xp0MRHvvpX6rT+6qMFa5WyovGQoGr1TXgqHRhcnG21YeX+nAb
+twllrmAXKT5++iKQEBzXvYu3T5t6w/CIzYNz8j4GddBrD5KrNTtiF0AEtSIyykH4dI58PLJPndyO
+iT0ByJMYZseiGEiaT/4ROLsWCsbYX24zjKO1VQZ+4PU3X896IqMukt98PXpglBYx+sR+3PIE7cic
+VLBrtqWMU3I1nD4UVMwa1rFtignrc9r+aR676vE5NVo29t3fAj8GCobUJfgIL6YN2bpTxY/vTg3C
+03ZqB7DObtV89mgRYG+fz3+BHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8AdwQ
+cjRh0p2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+HFU6ve+B7Bge/r7p8IiBvTqMeMmpbbIZ4
+wQclhz1K9gnzfvqMf9dZP27mw4L1/zHLF/+cST5hKgaaNh4+rH5iuXbXAHuEeRpwO3e4hd2h+axy
+ZZw7VklKPEfd9VzcUboCxVbxpAigLNnv64GDUqoPvd/WZclH16QCC1nu43HsVGCmlvH8ek3Mnjj4
+ICvExDZbUKzayevJ+4Qv1NFnO5Ow2Tf0c+c6NzErmd0mJfQFhTsOf/j442nYb0IwjgudHm9FHu83
+INwnMG6oiRM+pQ9T6Cld/nH10d66+AQ1GQEmIqzJ1iVsJxBs4gj9a/BARMg7sOVjdtyhL9ZycTOT
+lDqAbIpdnaD4W3yNmNiMAj//S8UrSmKDmSzSGmnFjjdmH67qbEHnI5UE/0qnCmPqECUEcPhvlcbX
+Ykydlxh60txI0anbuNTeZ1HmmJwq6mR5cJ0shfy1jlPc1svVCnDBwyv9KuLhKQIl3nFOAyctKrmo
+y6TaAglileuzP0p/cBrOtzzRsYckH/MwATEh4kh8wmnjeybc0pDLBAf8Ew+cJO67sYOTrBDRc3if
+5TMcdUY5vlNGqnsuT4+D9gg5ABgBUJj/aKIjd/4bSa/cA0Zac5eoqCU9UrqRhpycMYQynmCkg3/T
+T58RXd4awPJ6GMvr3Vhet7G87sXy2sfyejeWrkjgwtqglZGEvsBV+1ijN9/GjTnxMKfxYs3tMPcT
+czwBoijMBtvIFKdAe5EtPt8jIKS2nQNnetjkzyScVFrmHALXIJH78RBLb+ZN8rrTmbJxdGeeinFn
+h3KI/L4HUUSpYnPqzvK2jKs48uTiOs3nILYW3WkDYCra6UQcK81uZ3OO7rYs1ejiPz//8PEDNkdQ
+I5PeQN1wEdGw4FTGz+PyWnWlqdn8FcCO1NJPxKFuGuDeIyNrPMoe//OOMjyQccQdZSjkogAPgLK6
+bDM39ykMW891kpR+zkzOh03HYpRVo2ZSA0Q6ubh4d/L5ZEQhv9H/jlyBMbT1pcPFx7SwDbr+m9vc
+Uhz7gFDr2FZj/Nw5ebRuOOJhG2vAdjzf1oPDxxjs3jCBP8t/KqVgSYBQkQ7+PoVQj945/Kb9UIc+
+hhE7yX/uyRo7K/adI3uOi+KIft+xQ3sA/7AT9xgzIIB2ocZmZ9DslVtK35rXHRR1gD7S1/vNe832
+1qu9k/EpaifR4wA6lLXNht0/75yGjZ6S1ZvT788+nJ+9uTj5/IPjAqIr9/HTwaE4/fGLoPwQNGDs
+E8WYGlFhJhIYFrfQSSxz+K/GyM+yrjhIDL3enZ/rk5oNlrpg7jPanAiecxqThcZBM45C24c6/wgx
+SvUGyakponQdqjnC/dKG61lUrvOjqVRpjs5qrbdeulbM1JTRuXYE0geNXVIwCE4xg1eUxV6ZXWHJ
+J4C6zqoHKW2jbWJISkHBTrqAc/5lTle8QCl1hidNZ63oL0MX1/AqUkWawE7udWhlSXfD9JiGcfRD
+e8DNePVpQKc7jKwb8qwHsUCr9Trkuen+k4bRfq0Bw4bB3sG8M0npIZSBjcltIsRGfJITynv4apde
+r4GCBcODvgoX0TBdArOPYXMt1glsIIAn12B9cZ8AEFor4R8IHDnRAZljdkb4drPc/3OoCeK3/vnn
+nuZVme7/TRSwCxKcShT2ENNt/A42PpGMxOnH95OQkaPUXPHnGssDwCGhAKgj7ZS/xCfos7GS6Urn
+l/j6AF9oP4Fet7qXsih1937XOEQJeKbG5DU8U4Z+IaZ7WdhTnMqkBRorHyxmWEHopiGYz574tJZp
+qvPdz96dn4LviMUYKEF87nYKw3G8BI/QdfIdVzi2QOEBO7wukY1LdGEpyWIZec16g9YoctTby8uw
+60SB4W6vThS4jBPloj3GaTMsU04QISvDWphlZdZutUEKu22I4igzzBKzi5ISWH2eAF6mpzFviWCv
+hKUeJgLPp8hJVpmMxTRZgB4FlQsKdQpCgsTFekbivDzjGHheKlMGBQ+LbZlcrys83YDOEZVgYPMf
+T76cn32gsoTDV43X3cOcU9oJTDmJ5BhTBDHaAV/ctD/kqtmsj2f1K4SB2gf+tF9xdsoxD9Dpx4FF
+/NN+xXVox85OkGcACqou2uKBGwCnW5/cNLLAuNp9MH7cFMAGMx8MxSKx7EUnerjz63KibdkyJRT3
+MS+fcICzKmxKmu7spqS1P3qOqwLPuZbj/kbwtk+2zGcOXW86b4aS39xPRwqxJBYw6rb2xzDZYZ2m
+ejoOsw1xC21rtY39OXNipU67RYaiDEQcu50nLpP1K2HdnDnQS6PuABPfanSNJPaq8tHP2Uh7GB4m
+ltidfYrpSGUsZAQwkiF17U8NPhRaBFAglP07diR3Onl+6M3RsQYPz1HrLrCNP4Ai1Lm4VOORl8CJ
+8OVXdhz5FaGFevRIhI6nkskst3li+Llbo1f50p9jrwxQEBPFroyzazlmWFMD8yuf2AMhWNK2Hqkv
+k6s+wyLOwDm9H+Dwrlz0H5wY1FqM0Gl3I7dtdeSTBxv0loLsJJgPvozvQPcXdTXmlRw4h+6tpRuG
++jBEzD6Epvr0fRxiOObXcGB9GsC91NCw0MP7deDsktfGOLLWPraqmkL7QnuwixK2ZpWiYxmnONH4
+otYLaAzucWPyR/apThSyv3vqxJyYkAXKg7sgvbmNdINWOGHE5UpcOZpQOnxTTaPfLeWtTMFogJEd
+Y7XDL7baYRLZcEpvHthvxu5ie7Htx43eNJgdmXIMRIAKMXoDPbsQanDAFf5Z70Ti7Iac47d/PZuK
+tx9+gn/fyI9gQbHmcSr+BqOLt3kJ20ou2qXbFLCAo+L9Yl4rLIwkaHRCwRdPoLd24ZEXT0N0ZYlf
+UmIVpMBk2nLDt50AijxBKmRv3ANTLwG/TUFXywk1DmLfWoz0S6TBcI0L1oUc6JbRutqkaCac4Eiz
+iJej87O3px8+nUbVPTK2+Tlygid+HhZORx8Nl3gMNhX2yaLGJ1eOv/yDTIsed1nvNU29DO41RQjb
+kcLuL/kmjdjuKeISAwai2C7zRYQtgdO5RK+6A/954mwrH7TvnnFFWOOJPjxrnHh8DNQQP7f1zwga
+Uh89J+pJCMVzrBXjx9Go3wJPBUW04c/zm7ulGxDXRT80wTamzazHfnerAtdMZw3PchLhdWyXwdSB
+pkmsNvOFWx/4MRP6IhRQbnS8IVdxnVZCZrCVor093UgBCt4t6WMJYVZhK0Z1bhSdSe/irXJyj2Il
+RjjqiIrq8RyGAoWw9f4xvmEzgLWGouYSaIBOiNK2KXe6qnqxZgnmnRBRryff4C7JXrnJL5rCPChv
+jBeN/wrzRG+RMbqWlZ4/PxhPLl82CQ4UjF54Bb2LAoydyyZ7oDGL58+fj8S/Pez0MCpRmuc34I0B
+7F5n5ZxeDxhsPTm7Wl2H3ryJgB8Xa3kJD64oaG6f1xlFJHd0pQWR9q+BEeLahJYZTfuWOeZYXcnn
+y9yCz6m0wfhLltB1RxhRkqhs9a1RGG0y0kQsCYohjNUiSUKOTsB6bPMaa/Ewuqj5Rd4DxycIZopv
+8WCMd9hrdCwpb9Zyj0XnWIwI8IhSyng0KmamajTAc3ax1WjOzrKkaspIXrhnpvoKgMreYqT5SsR3
+KBlmHi1iOGWdHqs2jnW+k0W9jUq+uHTjjK1Z8uuHcAfWBknLVyuDKTw0i7TIZbkw5hRXLFkklQPG
+tEM43JkubyLrEwU9KI1AvZNVWFqJtm//YNfFxfQjHR/vm5F01lBlL8TimFCctfIKo6gZn6JPlpCW
+b82XCYzygaLZ2hPwxhJ/0LFUrCHw7u1wyxnrTN/HwWkbzSUdAIfugLIK0rKjpyOci8csfGbagVs0
+8EM7c8LtNimrOk5n+tqHGfppM3uervG0ZXA7CzyttwK+fQ6O777O2AfHwSTXID0x49ZUZByLlY5M
+RG5lmV+EVeTo5R2yrwQ+BVJmOTP10CZ2dGnZ1Raa6gRHR8UjqK9M8dKAQ26qZjoFJy7mU0pvMuUO
+A86zn29JV1eI78T41VQctnY+i2KLNzkBss+Woe+KUTeYihMMMHNs34shvjsW45dT8ccd0KOBAY4O
+3RHa+9gWhEEgr66eTMY0mRPZwr4U9of76hxG0PSM4+SqTf4umb4lKv1ri0pcIagTlV+2E5VbYw/u
+WzsfH8lwA4pjlcjl/jOFJNRIN7p5mMEJPyyg37M5Wrp2vKmoocK5OWxG7ho96GhE4zbbQUxRulZf
+XL+LuoYNp71zwKTJtFIV7S1zmMao0WsRFQDM+o7S8Bve7QLvNSlc/2zwiFUXAViwPREEXenJB2ZN
+w0ZQH3QEn6QBHmAUEeJhaqMoXMl6goiEdA8OMdFXrUNsh+N/d+bhEoOho9AOlt98vQtPVzB7izp6
+FnR3pYUnsra8ollu8+kPzHmM0tf1NwmMA6URHXBWzVWV5GYeYfYy30GT2yzmDV4GSSfTaBJT6bpN
+vJXmW7/Qj6HYASWTwVqAJ1Wv8CD5lu62PFGU9IZX1Hx9+HJqKoMZkJ7Aq+jVV/oKSOpmLj/wfeyp
+3rvBS93vMPoXB1hS+b3tq85uhqZ13LoLyh8spOjZJJpZOjSG6eE6kGbNYoF3JjbEZN/aXgDyHryd
+Ofg55vLTHBw22JBGfei6GqOR3iHVNiDAD5uMIcl5VNdGkSLSu4RtSHnuUpxPFgXdq9+CYAgBOX8d
+8xt0BeviyIbYjE3Bk8+xm82Jn+qmt+6M7Qka2+om3DV97r9r7rpFYGdukhk6c/frS10a6L7DVrSP
+Bhze0IR4VIlEo/H7jYlrB6Y6h6Y/Qq8/SH63E850wKw8BMZk7GC8n9hTY2/M/iZeuN8xIWyfL2R2
+y4l7nY3WtDs2o83xj/EUOPkFn9sbBiijaak5kPdLdMPejHNkZ/L6Ws1ivN1xRptsyufq7J7Mtu09
+Xc4nY7U1uy28tAhAGG7Smbducj0wBuhKvmWa06Gc22kEDU1Jw04WskqWbBL01g7ARRwxpf4mEM9p
+xKNUYqBb1WVRwm54pO8i5jydvtTmBqgJ4G1idWNQNz2m+mpaUqyUHGZKkDlO20ryASKwEe+YhtnM
+vgNeedFcs5BMLTPIrN7IMq6aK4b8jIAENl3NCFR0jovrhOcaqWxxiYtYYnnDQQoDZPb7V7Cx9DbV
+O+5VmFht93h2oh465PuUKxscY2S4OLm31wu611ot6Wpr1zu0zRqus1cqwTKYu/JIR+pYGb/V93fx
+HbMcyUf/0uEfkHe38tLPQrfqjL1bi4bzzFUI3Qub8MYAMs599zB2OKB742JrA2zH9/WFZZSOhznQ
+2FJR++S9CqcZbdJEkDBh9IEIkl8U8MQIkgf/kREkfWsmGBqNj9YDvWUCD4SaWD24V1A2jAB9ZkAk
+PMBuXWBoTOXYTbovcpXcj+yF0qwrnUo+Yx6QI7t3kxEIvmpSuRnK3lVwuyJIvnTR4+/PP745OSda
+zC5O3v7HyfeUlIXHJS1b9egQW5bvM7X3vfRvN9ymE2n6Bm+w7bkhlmuYNITO+04OQg+E/nq1vgVt
+KzL39VCHTt1PtxMgvnvaLahDKrsXcscv0zUmbvpMK0870E85qdb8cjITzCNzUsfi0JzEmffN4YmW
+0U5seWjhnPTWrjrR/qq+BXQg7j2xSda0Anhmgvxlj0xMxYwNzLOD0v7ffFBmOFYbmht0QAoX0rnJ
+kS5xZFCV//8TKUHZxbi3Y0dxau/mpnZ8PKTspfN49ruQkSGIV+436s7PFfalTAeoEASs8PQ9hYyI
+0X/6QNWmHzxT4nKfCov3Udlc2V+4Ztq5/WuCSQaVve9LcYISH7NC41WduokDtk+nAzl9dBqVr5xK
+FtB8B0DnRjwVsDf6S6wQ51sRwsZRu2SYHEt01Jf1Ocij3XSwN7R6IfaHyk7dskshXg43XLYqO3WP
+Q+6hHuihalPc51hgzNIcqicV3xFkPs4UdMGX53zgGbre9sPX28uXR/ZwAfkdXzuKhLLJRo5hv3Sy
+MXdeKul0J2Ypp5Suh3s1JySsW1w5UNknGNrbdEpSBvY/Js+BIY289/0hM9PDu3p/1MbUst4RTEmM
+n6kJTcsp4tG42yeT7nQbtdUFwgVJjwDSUYEAC8F0dKOTILrlLO/xC70bnNd0Ha97whQ6UkHJYj5H
+cA/j+zX4tbtTIfGjujOKpj83aHOgXnIQbvYduNXEC4UMm4T21Bs+GHABuCa7v//LR/TvpjHa7oe7
+/Grb6lVvHSD7spj5iplBLRKZxxEYGdCbY9LWWC5hBB2voWno6DJUMzfkC3T8KJsWL9umDQY5szPt
+AVijEPwfucjncQ==
+""")
+
+##file activate.sh
+ACTIVATE_SH = convert("""
+eJytVVFvokAQfudXTLEPtTlLeo9tvMSmJpq02hSvl7u2wRUG2QR2DSxSe7n/frOACEVNLlceRHa+
+nfl25pvZDswCnoDPQ4QoTRQsENIEPci4CsBMZBq7CAsuLOYqvmYKTTj3YxnBgiXBudGBjUzBZUJI
+BXEqgCvweIyuCjeG4eF2F5x14bcB9KQiQQWrjSddI1/oQIx6SYYeoFjzWIoIhYI1izlbhJjkKO7D
+M/QEmKfO9O7WeRo/zr4P7pyHwWxkwitcgwpQ5Ej96OX+PmiFwLeVjFUOrNYKaq1Nud3nR2n8nI2m
+k9H0friPTGVsUdptaxGrTEfpNVFEskxpXtUkkCkl1UNF9cgLBkx48J4EXyALuBtAwNYIjF5kcmUU
+abMKmMq1ULoiRbgsDEkTSsKSGFCJ6Z8vY/2xYiSacmtyAfCDdCNTVZoVF8vSTQOoEwSnOrngBkws
+MYGMBMg8/bMBLSYKS7pYEXP0PqT+ZmBT0Xuy+Pplj5yn4aM9nk72JD8/Wi+Gr98sD9eWSMOwkapD
+BbUv91XSvmyVkICt2tmXR4tWmrcUCsjWOpw87YidEC8i0gdTSOFhouJUNxR+4NYBG0MftoCTD9F7
+2rTtxG3oPwY1b2HncYwhrlmj6Wq924xtGDWqfdNxap+OYxplEurnMVo9RWks+rH8qKEtx7kZT5zJ
+4H7oOFclrN6uFe+d+nW2aIUsSgs/42EIPuOhXq+jEo3S6tX6w2ilNkDnIpHCWdEQhFgwj9pkk7FN
+l/y5eQvRSIQ5+TrL05lewxWpt/Lbhes5cJF3mLET1MGhcKCF+40tNWnUulxrpojwDo2sObdje3Bz
+N3QeHqf3D7OjEXMVV8LN3ZlvuzoWHqiUcNKHtwNd0IbvPGKYYM31nPKCgkUILw3KL+Y8l7aO1ArS
+Ad37nIU0fCj5NE5gQCuC5sOSu+UdI2NeXg/lFkQIlFpdWVaWZRfvqGiirC9o6liJ9FXGYrSY9mI1
+D/Ncozgn13vJvsznr7DnkJWXsyMH7e42ljdJ+aqNDF1bFnKWFLdj31xtaJYK6EXFgqmV/ymD/ROG
++n8O9H8f5vsGOWXsL1+1k3g=
+""")
+
+##file activate.fish
+ACTIVATE_FISH = convert("""
+eJydVW2P2jgQ/s6vmAZQoVpA9/WkqqJaTou0u6x2uZVOVWWZZEKsS+yc7UDpr+84bziQbauLxEvs
+eXnsZ56ZIWwTYSAWKUJWGAs7hMJgBEdhEwiMKnSIsBNywUMrDtziPBYmCeBDrFUG7v8HmCTW5n8u
+Fu7NJJim81Bl08EQTqqAkEupLOhCgrAQCY2hTU+DQVxIiqgkRNiEBphFEKy+kd1BaFvwFOUBuIxA
+oy20BKtAKp3xFMo0QNtCK5mhtMEA6BmSpUELKo38TThwLfguRVNaiRgs0llnEoIR29zfstf18/bv
+5T17Wm7vAiiN3ONCzfbfwC3DtWXXDqHfAGX0q6z/bO82j3ebh1VwnbrduwTQbvwcRtesAfMGor/W
+L3fs6Xnz8LRlm9fV8/P61sM0LDNwCZjl9gSpCokJRzpryGQ5t8kNGFUt51QjOZGu0Mj35FlYlXEr
+yC09EVOp4lEXfF84Lz1qbhBsgl59vDedXI3rTV03xipduSgt9kLytI3XmBp3aV6MPoMQGNUU62T6
+uQdeefTy1Hfj10zVHg2pq8fXDoHBiOv94csfXwN49xECqWREy7pwukKfvxdMY2j23vXDPuuxxeE+
+JOdCOhxCE3N44B1ZeSLuZh8Mmkr2wEPAmPfKWHA2uxIRjEopdbQYjDz3BWOf14/scfmwoki1eQvX
+ExBdF60Mqh+Y/QcX4uiH4Amwzx79KOVFtbL63sXJbtcvy8/3q5rupmO5CnE91wBviQAhjUUegYpL
+vVEbpLt2/W+PklRgq5Ku6mp+rpMhhCo/lXthQTxJ2ysO4Ka0ad97S7VT/n6YXus6fzk3fLnBZW5C
+KDC6gSO62QDqgFqLCCtPmjegjnLeAdArtSE8VYGbAJ/aLb+vnQutFhk768E9uRbSxhCMzdgEveYw
+IZ5ZqFKl6+kz7UR4U+buqQZXu9SIujrAfD7f0FXpozB4Q0gwp31H9mVTZGGC4b871/wm7lvyDLu1
+FUyvTj/yvD66k3UPTs08x1AQQaGziOl0S1qRkPG9COtBTSTWM9NzQ4R64B+Px/l3tDzCgxv5C6Ni
+e+QaF9xFWrxx0V/G5uvYQOdiZzvYpQUVQSIsTr1TTghI33GnPbTA7/GCqcE3oE3GZurq4HeQXQD6
+32XS1ITj/qLjN72ob0hc5C9bzw8MhfmL
+""")
+
+##file activate.csh
+ACTIVATE_CSH = convert("""
+eJx9VG1P2zAQ/u5fcYQKNgTNPtN1WxlIQ4KCUEGaxuQ6yYVYSuzKdhqVX7+zk3bpy5YPUXL3PPfc
+ne98DLNCWshliVDV1kGCUFvMoJGugMjq2qQIiVSxSJ1cCofD1BYRnOVGV0CfZ0N2DD91DalQSjsw
+tQLpIJMGU1euvPe7QeJlkKzgWixlhnAt4aoUVsLnLBiy5NtbJWQ5THX1ZciYKKWwkOFaE04dUm6D
+r/zh7pq/3D7Nnid3/HEy+wFHY/gEJydg0aFaQrBFgz1c5DG1IhTs+UZgsBC2GMFBlaeH+8dZXwcW
+VPvCjXdlAvCfQsE7al0+07XjZvrSCUevR5dnkVeKlFYZmUztG4BdzL2u9KyLVabTU0bdfg7a0hgs
+cSmUg6UwUiQl2iHrcbcVGNvPCiLOe7+cRwG13z9qRGgx2z6DHjfm/Op2yqeT+xvOLzs0PTKHDz2V
+tkckFHoQfQRXoGJAj9el0FyJCmEMhzgMS4sB7KPOE2ExoLcSieYwDvR+cP8cg11gKkVJc2wRcm1g
+QhYFlXiTaTfO2ki0fQoiFM4tLuO4aZrhOzqR4dIPcWx17hphMBY+Srwh7RTyN83XOWkcSPh1Pg/k
+TXX/jbJTbMtUmcxZ+/bbqOsy82suFQg/BhdSOTRhMNBHlUarCpU7JzBhmkKmRejKOQzayQe6MWoa
+n1wqWmuh6LZAaHxcdeqIlVLhIBJdO9/kbl0It2oEXQj+eGjJOuvOIR/YGRqvFhttUB2XTvLXYN2H
+37CBdbW2W7j2r2+VsCn0doVWcFG1/4y1VwBjfwAyoZhD
+""")
+
+##file activate.bat
+ACTIVATE_BAT = convert("""
+eJx9UdEKgjAUfW6wfxjiIH+hEDKUFHSKLCMI7kNOEkIf9P9pTJ3OLJ/03HPPPed4Es9XS9qqwqgT
+PbGKKOdXL4aAFS7A4gvAwgijuiKlqOpGlATS2NeMLE+TjJM9RkQ+SmqAXLrBo1LLIeLdiWlD6jZt
+r7VNubWkndkXaxg5GO3UaOOKS6drO3luDDiO5my3iA0YAKGzPRV1ack8cOdhysI0CYzIPzjSiH5X
+0QcvC8Lfaj0emsVKYF2rhL5L3fCkVjV76kShi59NHwDniAHzkgDgqBcwOgTMx+gDQQqXCw==
+""")
+
+##file deactivate.bat
+DEACTIVATE_BAT = convert("""
+eJxzSE3OyFfIT0vj4ipOLVEI8wwKCXX0iXf1C7Pl4spMU0hJTcvMS01RiPf3cYmHyQYE+fsGhCho
+cCkAAUibEkTEVhWLMlUlLk6QGixStlyaeCyJDPHw9/Pw93VFsQguim4ZXAJoIUw5DhX47XUM8UCx
+EchHtwsohN1bILUgw61c/Vy4AJYPYm4=
+""")
+
+##file activate.ps1
+ACTIVATE_PS = convert("""
+eJylWdmO41hyfW+g/0FTU7C7IXeJIqmtB/3AnZRIStxF2kaBm7gv4ipyMF/mB3+Sf8GXVGVl1tLT
+43ECSqR4b5wbETeWE8z/+a///vNCDaN6cYtSf5G1dbNw/IVXNIu6aCvX9xa3qsgWl0IJ/7IYinbh
+2nkOVqs2X0TNjz/8eeFFle826fBhQRaLBkD9uviw+LCy3Sbq7Mb/UNbrH3+YNtLcVaB+Xbipb+eL
+tly0eVsD/M6u6g8//vC+dquobH5VWU75eMFUdvHb4n02RHlXuHYTFfmHbHCLLLNz70NpN+GrBI4p
+1EeSk4FAXaZR88u0vPip8usi7fznt3fvP+OuPnx49/Pil4td+XnzigIAPoqYQH2J8v4z+C+8b98m
+Q25t7k76LIK0cOz0V89/MXXx0+Lf6z5q3PA/F+/FIif9uqnaadFf/PzXSXYBfqIb2NeApecJwPzI
+dlL/149nnvyoc7KqYfzTAT8v/voUmX7e+3n364tffl/oVaDyswKY/7J18e6bve8Wv9RuUfqfLHmK
+/u139Hwx+9ePRep97KKqae30YwmCo2y+0vTz1k+rv7159B3pb1SOGj97Pe8/flfkC1Vn/7xYR4n6
+lypNEGDDV5f7lcjil3S+4++p881Wv6qKyn5GQg1yJwcp4BZ5E+Wt/z1P/umbiHir4J8Xip/eFt6n
+9T/9gU9eY+7zUX97Jlmb136ziKrKT/3OzpvP8VX/+MObSP0lL3LvVZlJ9v1b8357jXyw8rXxYPXN
+11n4UzJ8G8S/vUbuJ6RPj999DbtS5kys//JusXwrNLnvT99cFlBNwXCe+niRz8JF/ezNr9Pze+H6
+18W7d5PPvozW7+387Zto/v4pL8BvbxTzvIW9KCv/Fj0WzVQb/YXbVlPZWTz3/9vCaRtQbPN/Bb+j
+2rUrDxTVD68gfQXu/ZewAFX53U/vf/rD2P3558W7+W79Po1y/xXoX/6RFHyNIoVjgAG4H0RTcAe5
+3bSVv3DSwk2mZYHjFB8zj6fC4sLOFTHJJQrwzFYJgso0ApOoBzFiRzzQKjIQCCbQMIFJGCKqGUyS
+8AkjiF2wTwmMEbcEUvq8Nj+X0f4YcCQmYRiOY7eRbAJDqzm1chOoNstbJ8oTBhZQ2NcfgaB6QjLp
+U4+SWFjQGCZpyqby8V4JkPGs9eH1BscXIrTG24QxXLIgCLYNsIlxSYLA6SjAeg7HAg4/kpiIB8k9
+TCLm0EM4gKIxEj8IUj2dQeqSxEwYVH88qiRlCLjEYGuNIkJB1BA5dHOZdGAoUFk54WOqEojkuf4Q
+Ig3WY+96TDlKLicMC04h0+gDCdYHj0kz2xBDj9ECDU5zJ0tba6RKgXBneewhBG/xJ5m5FX+WSzsn
+wnHvKhcOciw9NunZ0BUF0n0IJAcJMdcLqgQb0zP19dl8t9PzmMBjkuIF7KkvHgqEovUPOsY0PBB1
+HCtUUhch83qEJPjQcNQDsgj0cRqx2ZbnnlrlUjE1EX2wFJyyDa/0GLrmKDEFepdWlsbmVU45Wiwt
+eFM6mfs4kxg8yc4YmKDy67dniLV5FUeO5AKNPZaOQQ++gh+dXE7dbJ1aTDr7S4WPd8sQoQkDyODg
+XnEu/voeKRAXZxB/e2xaJ4LTFLPYEJ15Ltb87I45l+P6OGFA5F5Ix8A4ORV6M1NH1uMuZMnmFtLi
+VpYed+gSq9JDBoHc05J4OhKetrk1p0LYiKipxLMe3tYS7c5V7O1KcPU8BJGdLfcswhoFCSGQqJ8f
+ThyQKy5EWFtHVuNhvTnkeTc8JMpN5li3buURh0+3ZGuzdwM55kon+8urbintjdQJf9U1D0ah+hNh
+i1XNu4fSKbTC5AikGEaj0CYM1dpuli7EoqUt7929f1plxGGNZnixFSFP2qzhlZMonu2bB9OWSqYx
+VuHKWNGJI8kqUhMTRtk0vJ5ycZ60JlodlmN3D9XiEj/cG2lSt+WV3OtMgt1Tf4/Z+1BaCus740kx
+Nvj78+jMd9tq537Xz/mNFyiHb0HdwHytJ3uQUzKkYhK7wjGtx3oKX43YeYoJVtqDSrCnQFzMemCS
+2bPSvP+M4yZFi/iZhAjL4UOeMfa7Ex8HKBqw4umOCPh+imOP6yVTwG2MplB+wtg97olEtykNZ6wg
+FJBNXSTJ3g0CCTEEMdUjjcaBDjhJ9fyINXgQVHhA0bjk9lhhhhOGzcqQSxYdj3iIN2xGEOODx4qj
+Q2xikJudC1ujCVOtiRwhga5nPdhe1gSa649bLJ0wCuLMcEYIeSy25YcDQHJb95nfowv3rQnin0fE
+zIXFkM/EwSGxvCCMgEPNcDp/wph1gMEa8Xd1qAWOwWZ/KhjlqzgisBpDDDXz9Cmov46GYBKHC4zZ
+84HJnXoTxyWNBbXV4LK/r+OEwSN45zBp7Cub3gIYIvYlxon5BzDgtPUYfXAMPbENGrI+YVGSeTQ5
+i8NMB5UCcC+YRGIBhgs0xhAGwSgYwywpbu4vpCSTdEKrsy8osXMUnHQYenQHbOBofLCNNTg3CRRj
+A1nXY2MZcjnXI+oQ2Zk+561H4CqoW61tbPKv65Y7fqc3TDUF9CA3F3gM0e0JQ0TPADJFJXVzphpr
+2FzwAY8apGCju1QGOiUVO5KV6/hKbtgVN6hRVwpRYtu+/OC6w2bCcGzZQ8NCc4WejNEjFxOIgR3o
+QqR1ZK0IaUxZ9nbL7GWJIjxBARUhAMnYrq/S0tVOjzlOSYRqeIZxaSaOBX5HSR3MFekOXVdUPbjX
+nru61fDwI8HRYPUS7a6Inzq9JLjokU6P6OzT4UCH+Nha+JrU4VqEo4rRHQJhVuulAnvFhYz5NWFT
+aS/bKxW6J3e46y4PLagGrCDKcq5B9EmP+s1QMCaxHNeM7deGEV3WPn3CeKjndlygdPyoIcNaL3dd
+bdqPs47frcZ3aNWQ2Tk+rjFR01Ul4XnQQB6CSKA+cZusD0CP3F2Ph0e78baybgioepG12luSpFXi
+bHbI6rGLDsGEodMObDG7uyxfCeU+1OiyXYk8fnGu0SpbpRoEuWdSUlNi5bd9nBxYqZGrq7Qa7zV+
+VLazLcelzzP9+n6+xUtWx9OVJZW3gk92XGGkstTJ/LreFVFF2feLpXGGuQqq6/1QbWPyhJXIXIMs
+7ySVlzMYqoPmnmrobbeauMIxrCr3sM+qs5HpwmmFt7SM3aRNQWpCrmeAXY28EJ9uc966urGKBL9H
+18MtDE5OX97GDOHxam11y5LCAzcwtkUu8wqWI1dWgHyxGZdY8mC3lXzbzncLZ2bIUxTD2yW7l9eY
+gBUo7uj02ZI3ydUViL7oAVFag37JsjYG8o4Csc5R7SeONGF8yZP+7xxi9scnHvHPcogJ44VH/LMc
+Yu6Vn3jEzCFw9Eqq1ENQAW8aqbUwSiAqi+nZ+OkZJKpBL66Bj8z+ATqb/8qDIJUeNRTwrI0YrVmb
+9FArKVEbCWUNSi8ipfVv+STgkpSsUhcBg541eeKLoBpLGaiHTNoK0r4nn3tZqrcIULtq20Df+FVQ
+Sa0MnWxTugMuzD410sQygF4qdntbswiJMqjs014Irz/tm+pd5oygJ0fcdNbMg165Pqi7EkYGAXcB
+dwxioCDA3+BY9+JjuOmJu/xyX2GJtaKSQcOZxyqFzTaa6/ot21sez0BtKjirROKRm2zuai02L0N+
+ULaX8H5P6VwsGPbYOY7sAy5FHBROMrMzFVPYhFHZ7M3ZCZa2hsT4jGow6TGtG8Nje9405uMUjdF4
+PtKQjw6yZOmPUmO8LjFWS4aPCfE011N+l3EdYq09O3iQJ9a01B3KXiMF1WmtZ+l1gmyJ/ibAHZil
+vQzdOl6g9PoSJ4TM4ghTnTndEVMOmsSSu+SCVlGCOLQRaw9oLzamSWP62VuxPZ77mZYdfTRGuNBi
+KyhZL32S2YckO/tU7y4Bf+QKKibQSKCTDWPUwWaE8yCBeL5FjpbQuAlb53mGX1jptLeRotREbx96
+gnicYz0496dYauCjpTCA4VA0cdLJewzRmZeTwuXWD0talJsSF9J1Pe72nkaHSpULgNeK1+o+9yi0
+YpYwXZyvaZatK2eL0U0ZY6ekZkFPdC8JTF4Yo1ytawNfepqUKEhwznp6HO6+2l7L2R9Q3N49JMIe
+Z+ax1mVaWussz98QbNTRPo1xu4W33LJpd9H14dd66ype7UktfEDi3oUTccJ4nODjwBKFxS7lYWiq
+XoHu/b7ZVcK5TbRD0F/2GShg2ywwUl07k4LLqhofKxFBNd1grWY+Zt/cPtacBpV9ys2z1moMLrT3
+W0Elrjtt5y/dvDQYtObYS97pqj0eqmwvD3jCPRqamGthLiF0XkgB6IdHLBBwDGPiIDh7oPaRmTrN
+tYA/yQKFxRiok+jM6ciJq/ZgiOi5+W4DEmufPEubeSuYJaM3/JHEevM08yJAXUQwb9LS2+8FOfds
+FfOe3Bel6EDSjIEIKs4o9tyt67L1ylQlzhe0Q+7ue/bJnWMcD3q6wDSIQi8ThnRM65aqLWesi/ZM
+xhHmQvfKBbWcC194IPjbBLYR9JTPITbzwRcu+OSFHDHNSYCLt29sAHO6Gf0h/2UO9Xwvhrjhczyx
+Ygz6CqP4IwxQj5694Q1Pe2IR+KF/yy+5PvCL/vgwv5mPp9n4kx7fnY/nmV++410qF/ZVCMyv5nAP
+pkeOSce53yJ6ahF4aMJi52by1HcCj9mDT5i+7TF6RoPaLL+cN1hXem2DmX/mdIbeeqwQOLD5lKO/
+6FM4x77w6D5wMx3g0IAfa2D/pgY9a7bFQbinLDPz5dZi9ATIrd0cB5xfC0BfCCZO7TKP0jQ2Meih
+nRXhkA3smTAnDN9IW2vA++lsgNuZ2QP0UhqyjUPrDmgfWP2bWWiKA+YiEK7xou8cY0+d3/bk0oHR
+QLrq4KzDYF/ljQDmNhBHtkVNuoDey6TTeaD3SHO/Bf4d3IwGdqQp6FuhmwFbmbQBssDXVKDBYOpk
+Jy7wxOaSRwr0rDmGbsFdCM+7XU/84JPu3D/gW7QXgzlvbjixn99/8CpWFUQWHFEz/RyXvzNXTTOd
+OXLNNFc957Jn/YikNzEpUdRNxXcC6b76ccTwMGoKj5X7c7TvHFgc3Tf4892+5A+iR+D8OaaE6ACe
+gdgHcyCoPm/xiDCWP+OZRjpzfj5/2u0i4qQfmIEOsTV9Hw6jZ3Agnh6hiwjDtGYxWvt5TiWEuabN
+77YCyRXwO8P8wdzG/8489KwfFBZWI6Vvx76gmlOc03JI1HEfXYZEL4sNFQ3+bqf7e2hdSWQknwKF
+ICJjGyDs3fdmnnxubKXebpQYLjPgEt9GTzKkUgTvOoQa1J7N3nv4sR6uvYFLhkXZ+pbCoU3K9bfq
+gF7W82tNutRRZExad+k4GYYsCfmEbvizS4jsRr3fdzqjEthpEwm7pmN7OgVzRbrktjrFw1lc0vM8
+V7dyTJ71qlsd7v3KhmHzeJB35pqEOk2pEe5uPeCToNkmedmxcKbIj+MZzjFSsvCmimaMQB1uJJKa
++hoWUi7aEFLvIxKxJavqpggXBIk2hr0608dIgnfG5ZEprqmH0b0YSy6jVXTCuIB+WER4d5BPVy9Q
+M4taX0RIlDYxQ2CjBuq78AAcHQf5qoKP8BXHnDnd/+ed5fS+csL4g3eWqECaL+8suy9r8hx7c+4L
+EegEWdqAWN1w1NezP34xsxLkvRRI0DRzKOg0U+BKfQY128YlYsbwSczEg2LqKxRmcgiwHdhc9MQJ
+IwKQHlgBejWeMGDYYxTOQUiJOmIjJbzIzHH6lAMP+y/fR0v1g4wx4St8fcqTt3gz5wc+xXFZZ3qI
+JpXI5iJk7xmNL2tYsDpcqu0375Snd5EKsIvg8u5szTOyZ4v06Ny2TZXRpHUSinh4IFp8Eoi7GINJ
+02lPJnS/9jSxolJwp2slPMIEbjleWw3eec4XaetyEnSSqTPRZ9fVA0cPXMqzrPYQQyrRux3LaAh1
+wujbgcObg1nt4iiJ5IMbc/WNPc280I2T4nTkdwG8H6iS5xO2WfsFsruBwf2QkgZlb6w7om2G65Lr
+r2Gl4dk63F8rCEHoUJ3fW+pU2Srjlmcbp+JXY3DMifEI22HcHAvT7zzXiMTr7VbUR5a2lZtJkk4k
+1heZZFdru8ucCWMTr3Z4eNnjLm7LW7rcN7QjMpxrsCzjxndeyFUX7deIs3PQkgyH8k6luI0uUyLr
+va47TBjM4JmNHFzGPcP6BV6cYgQy8VQYZe5GmzZHMxyBYhGiUdekZQ/qwyxC3WGylQGdUpSf9ZCP
+a7qPdJd31fPRC0TOgzupO7nLuBGr2A02yuUQwt2KQG31sW8Gd9tQiHq+hPDt4OzJuY4pS8XRsepY
+tsd7dVEfJFmc15IYqwHverrpWyS1rFZibDPW1hUUb+85CGUzSBSTK8hpvee/ZxonW51TUXekMy3L
+uy25tMTg4mqbSLQQJ+skiQu2toIfBFYrOWql+EQipgfT15P1aq6FDK3xgSjIGWde0BPftYchDTdM
+i4QdudHFkN0u6fSKiT09QLv2mtSblt5nNzBR6UReePNs+khE4rHcXuoK21igUKHl1c3MXMgPu7y8
+rKQDxR6N/rffXv+lROXet/9Q+l9I4D1U
+""")
+
+##file distutils-init.py
+DISTUTILS_INIT = convert("""
+eJytV1uL4zYUfvevOE0ottuMW9q3gVDa3aUMXXbLMlDKMBiNrSTqOJKRlMxkf33PkXyRbGe7Dw2E
+UXTu37lpxLFV2oIyifAncxmOL0xLIfcG+gv80x9VW6maw7o/CANSWWBwFtqeWMPlGY6qPjV8A0bB
+C4eKSTgZ5LRgFeyErMEeOBhbN+Ipgeizhjtnhkn7DdyjuNLPoCS0l/ayQTG0djwZC08cLXozeMss
+aG5EzQ0IScpnWtHSTXuxByV/QCmxE7y+eS0uxWeoheaVVfqSJHiU7Mhhi6gULbOHorshkrEnKxpT
+0n3A8Y8SMpuwZx6aoix3ouFlmW8gHRSkeSJ2g7hU+kiHLDaQw3bmRDaTGfTnty7gPm0FHbIBg9U9
+oh1kZzAFLaue2R6htPCtAda2nGlDSUJ4PZBgCJBGVcwKTAMz/vJiLD+Oin5Z5QlvDPdulC6EsiyE
+NFzb7McNTKJzbJqzphx92VKRFY1idenzmq3K0emRcbWBD0ryqc4NZGmKOOOX9Pz5x+/l27tP797c
+f/z0d+4NruGNai8uAM0bfsYaw8itFk8ny41jsfpyO+BWlpqfhcG4yxLdi/0tQqoT4a8Vby382mt8
+p7XSo7aWGdPBc+b6utaBmCQ7rQKQoWtAuthQCiold2KfJIPTT8xwg9blPumc+YDZC/wYGdAyHpJk
+vUbHbHWAp5No6pK/WhhLEWrFjUwtPEv1Agf8YmnsuXUQYkeZoHm8ogP16gt2uHoxcEMdf2C6pmbw
+hUMsWGhanboh4IzzmsIpWs134jVPqD/c74bZHdY69UKKSn/+KfVhxLgUlToemayLMYQOqfEC61bh
+cbhwaqoGUzIyZRFHPmau5juaWqwRn3mpWmoEA5nhzS5gog/5jbcFQqOZvmBasZtwYlG93k5GEiyw
+buHhMWLjDarEGpMGB2LFs5nIJkhp/nUmZneFaRth++lieJtHepIvKgx6PJqIlD9X2j6pG1i9x3pZ
+5bHuCPFiirGHeO7McvoXkz786GaKVzC9DSpnOxJdc4xm6NSVq7lNEnKdVlnpu9BNYoKX2Iq3wvgh
+gGEUM66kK6j4NiyoneuPLSwaCWDxczgaolEWpiMyDVDb7dNuLAbriL8ig8mmeju31oNvQdpnvEPC
+1vAXbWacGRVrGt/uXN/gU0CDDwgooKRrHfTBb1/s9lYZ8ZqOBU0yLvpuP6+K9hLFsvIjeNhBi0KL
+MlOuWRn3FRwx5oHXjl0YImUx0+gLzjGchrgzca026ETmYJzPD+IpuKzNi8AFn048Thd63OdD86M6
+84zE8yQm0VqXdbbgvub2pKVnS76icBGdeTHHXTKspUmr4NYo/furFLKiMdQzFjHJNcdAnMhltBJK
+0/IKX3DVFqvPJ2dLE7bDBkH0l/PJ29074+F0CsGYOxsb7U3myTUncYfXqnLLfa6sJybX4g+hmcjO
+kMRBfA1JellfRRKJcyRpxdS4rIl6FdmQCWjo/o9Qz7yKffoP4JHjOvABcRn4CZIT2RH4jnxmfpVG
+qgLaAvQBNfuO6X0/Ux02nb4FKx3vgP+XnkX0QW9pLy/NsXgdN24dD3LxO2Nwil7Zlc1dqtP3d7/h
+kzp1/+7hGBuY4pk0XD/0Ao/oTe/XGrfyM773aB7iUhgkpy+dwAMalxMP0DrBcsVw/6p25+/hobP9
+GBknrWExDhLJ1bwt1NcCNblaFbMKCyvmX0PeRaQ=
+""")
+
+##file distutils.cfg
+DISTUTILS_CFG = convert("""
+eJxNj00KwkAMhfc9xYNuxe4Ft57AjYiUtDO1wXSmNJnK3N5pdSEEAu8nH6lxHVlRhtDHMPATA4uH
+xJ4EFmGbvfJiicSHFRzUSISMY6hq3GLCRLnIvSTnEefN0FIjw5tF0Hkk9Q5dRunBsVoyFi24aaLg
+9FDOlL0FPGluf4QjcInLlxd6f6rqkgPu/5nHLg0cXCscXoozRrP51DRT3j9QNl99AP53T2Q=
+""")
+
+##file activate_this.py
+ACTIVATE_THIS = convert("""
+eJyNU01v2zAMvetXEB4K21jnDOstQA4dMGCHbeihlyEIDMWmE62yJEiKE//7kXKdpEWLzYBt8evx
+kRSzLPs6wiEoswM8YdMpjUXcq1Dz6RZa1cSiTkJdr86GsoTRHuCotBayiWqQEYGtMCgfD1KjGYBe
+5a3p0cRKiEe2NtLAFikftnDco0ko/SFEVgEZ8aRCZDIPY9xbA8pE9M4jfW/B2CjiHq9zbJVZuOQq
+siwTIvpxKYCembPAU4Muwi/Z4zfvrZ/MXipKeB8C+qisSZYiWfjJfs+0/MFMdWn1hJcO5U7G/SLa
+xVx8zU6VG/PXLXvfsyyzUqjeWR8hjGE+2iCE1W1tQ82hsCJN9dzKaoexyB/uH79TnjwvxcW0ntSb
+yZ8jq1Z5Q1UXsyy3gf9nbjTEj7NzQMfCJa/YSmrQ+2D/BqfiOi6sclrGzvoeVivIj8rcfcmnIQRF
+7XCyeZI7DFe5/lhlCs5PRf5QW66VXT/NrlQ46oD/D6InkOmi3IQcbhKxAX2g4a+Xd5s3UtCtG2py
+m8eg6WYWqR6SL5OjKMGfSrYt/6kxxQtOpeAgj1LXBNmpE2ElmCSIy5H0zFd8gJ924HWijWhb2hRC
+6wNEm1QdDZtuSZcEprIUBo/XRNcbQe1OUbQ/r3hPTaPJJDNtFLu8KHV5XoNr3Eo6h6YtOKw8e8yw
+VF5PnJ+ts3a9/Mz38RpG/AUSzYUW
+""")
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+FAT_MAGIC = 0xcafebabe
+BIG_ENDIAN = '>'
+LITTLE_ENDIAN = '<'
+LC_LOAD_DYLIB = 0xc
+maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint')
+
+
+class fileview(object):
+    """
+    A proxy for file-like objects that exposes a given view of a file.
+    Modified from macholib.
+    """
+
+    def __init__(self, fileobj, start=0, size=maxint):
+        if isinstance(fileobj, fileview):
+            self._fileobj = fileobj._fileobj
+        else:
+            self._fileobj = fileobj
+        self._start = start
+        self._end = start + size
+        self._pos = 0
+
+    def __repr__(self):
+        return '<fileview [%d, %d] %r>' % (
+            self._start, self._end, self._fileobj)
+
+    def tell(self):
+        return self._pos
+
+    def _checkwindow(self, seekto, op):
+        if not (self._start <= seekto <= self._end):
+            raise IOError("%s to offset %d is outside window [%d, %d]" % (
+                op, seekto, self._start, self._end))
+
+    def seek(self, offset, whence=0):
+        seekto = offset
+        if whence == os.SEEK_SET:
+            seekto += self._start
+        elif whence == os.SEEK_CUR:
+            seekto += self._start + self._pos
+        elif whence == os.SEEK_END:
+            seekto += self._end
+        else:
+            raise IOError("Invalid whence argument to seek: %r" % (whence,))
+        self._checkwindow(seekto, 'seek')
+        self._fileobj.seek(seekto)
+        self._pos = seekto - self._start
+
+    def write(self, bytes):
+        here = self._start + self._pos
+        self._checkwindow(here, 'write')
+        self._checkwindow(here + len(bytes), 'write')
+        self._fileobj.seek(here, os.SEEK_SET)
+        self._fileobj.write(bytes)
+        self._pos += len(bytes)
+
+    def read(self, size=maxint):
+        assert size >= 0
+        here = self._start + self._pos
+        self._checkwindow(here, 'read')
+        size = min(size, self._end - here)
+        self._fileobj.seek(here, os.SEEK_SET)
+        bytes = self._fileobj.read(size)
+        self._pos += len(bytes)
+        return bytes
+
+
+def read_data(file, endian, num=1):
+    """
+    Read a given number of 32-bits unsigned integers from the given file
+    with the given endianness.
+    """
+    res = struct.unpack(endian + 'L' * num, file.read(num * 4))
+    if len(res) == 1:
+        return res[0]
+    return res
+
+
+def mach_o_change(path, what, value):
+    """
+    Replace a given name (what) in any LC_LOAD_DYLIB command found in
+    the given binary with a new name (value), provided it's shorter.
+    """
+
+    def do_macho(file, bits, endian):
+        # Read Mach-O header (the magic number is assumed read by the caller)
+        cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
+        # 64-bits header has one more field.
+        if bits == 64:
+            read_data(file, endian)
+        # The header is followed by ncmds commands
+        for n in range(ncmds):
+            where = file.tell()
+            # Read command header
+            cmd, cmdsize = read_data(file, endian, 2)
+            if cmd == LC_LOAD_DYLIB:
+                # The first data field in LC_LOAD_DYLIB commands is the
+                # offset of the name, starting from the beginning of the
+                # command.
+                name_offset = read_data(file, endian)
+                file.seek(where + name_offset, os.SEEK_SET)
+                # Read the NUL terminated string
+                load = file.read(cmdsize - name_offset).decode()
+                load = load[:load.index('\0')]
+                # If the string is what is being replaced, overwrite it.
+                if load == what:
+                    file.seek(where + name_offset, os.SEEK_SET)
+                    file.write(value.encode() + '\0'.encode())
+            # Seek to the next command
+            file.seek(where + cmdsize, os.SEEK_SET)
+
+    def do_file(file, offset=0, size=maxint):
+        file = fileview(file, offset, size)
+        # Read magic number
+        magic = read_data(file, BIG_ENDIAN)
+        if magic == FAT_MAGIC:
+            # Fat binaries contain nfat_arch Mach-O binaries
+            nfat_arch = read_data(file, BIG_ENDIAN)
+            for n in range(nfat_arch):
+                # Read arch header
+                cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
+                do_file(file, offset, size)
+        elif magic == MH_MAGIC:
+            do_macho(file, 32, BIG_ENDIAN)
+        elif magic == MH_CIGAM:
+            do_macho(file, 32, LITTLE_ENDIAN)
+        elif magic == MH_MAGIC_64:
+            do_macho(file, 64, BIG_ENDIAN)
+        elif magic == MH_CIGAM_64:
+            do_macho(file, 64, LITTLE_ENDIAN)
+
+    assert(len(what) >= len(value))
+    do_file(open(path, 'r+b'))
+
+
+if __name__ == '__main__':
+    main()
+
+## TODO:
+## Copy python.exe.manifest
+## Monkeypatch distutils.sysconfig
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.bat b/bootstrap/virtualenv/virtualenv_embedded/activate.bat
new file mode 100644
index 0000000..4c2003e
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.bat
@@ -0,0 +1,26 @@
+@echo off

+set "VIRTUAL_ENV=__VIRTUAL_ENV__"

+

+if defined _OLD_VIRTUAL_PROMPT (

+    set "PROMPT=%_OLD_VIRTUAL_PROMPT%"

+) else (

+    if not defined PROMPT (

+        set "PROMPT=$P$G"

+    )

+	set "_OLD_VIRTUAL_PROMPT=%PROMPT%"	

+)

+set "PROMPT=__VIRTUAL_WINPROMPT__ %PROMPT%"

+

+if not defined _OLD_VIRTUAL_PYTHONHOME (

+    set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"

+)

+set PYTHONHOME=

+

+if defined _OLD_VIRTUAL_PATH (

+    set "PATH=%_OLD_VIRTUAL_PATH%"

+) else (

+    set "_OLD_VIRTUAL_PATH=%PATH%"

+)

+set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"

+

+:END

diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.csh b/bootstrap/virtualenv/virtualenv_embedded/activate.csh
new file mode 100644
index 0000000..9db7744
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.csh
@@ -0,0 +1,42 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+
+
+
+if ("__VIRTUAL_PROMPT__" != "") then
+    set env_name = "__VIRTUAL_PROMPT__"
+else
+    if (`basename "$VIRTUAL_ENV"` == "__") then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+    else
+        set env_name = `basename "$VIRTUAL_ENV"`
+    endif
+endif
+
+# Could be in a non-interactive environment,
+# in which case, $prompt is undefined and we wouldn't
+# care about the prompt anyway.
+if ( $?prompt ) then
+    set _OLD_VIRTUAL_PROMPT="$prompt"
+    set prompt = "[$env_name] $prompt"
+endif
+
+unset env_name
+
+alias pydoc python -m pydoc
+
+rehash
+
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.fish b/bootstrap/virtualenv/virtualenv_embedded/activate.fish
new file mode 100644
index 0000000..eaa241d
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.fish
@@ -0,0 +1,74 @@
+# This file must be used with "source bin/activate.fish" *from fish* (http://fishshell.com)
+# you cannot run it directly
+
+function deactivate  -d "Exit virtualenv and return to normal shell environment"
+    # reset old environment variables
+    if test -n "$_OLD_VIRTUAL_PATH" 
+        set -gx PATH $_OLD_VIRTUAL_PATH
+        set -e _OLD_VIRTUAL_PATH
+    end
+    if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+        set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+        set -e _OLD_VIRTUAL_PYTHONHOME
+    end
+    
+    if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+        # set an empty local fish_function_path, so fish_prompt doesn't automatically reload
+        set -l fish_function_path
+        # erase the virtualenv's fish_prompt function, and restore the original
+        functions -e fish_prompt
+        functions -c _old_fish_prompt fish_prompt
+        functions -e _old_fish_prompt
+        set -e _OLD_FISH_PROMPT_OVERRIDE
+    end
+    
+    set -e VIRTUAL_ENV
+    if test "$argv[1]" != "nondestructive"
+        # Self destruct!
+        functions -e deactivate
+    end
+end
+
+# unset irrelevant variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__BIN_NAME__" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+    set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+    set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+    # fish uses a function instead of an env var to generate the prompt.
+    
+    # copy the current fish_prompt function as the function _old_fish_prompt
+    functions -c fish_prompt _old_fish_prompt
+    
+    # with the original prompt function copied, we can override with our own.
+    function fish_prompt
+        # Prompt override?
+        if test -n "__VIRTUAL_PROMPT__"
+            printf "%s%s" "__VIRTUAL_PROMPT__" (set_color normal)
+            _old_fish_prompt
+            return
+        end
+        # ...Otherwise, prepend env
+        set -l _checkbase (basename "$VIRTUAL_ENV")
+        if test $_checkbase = "__"
+            # special case for Aspen magic directories
+            # see http://www.zetadev.com/software/aspen/
+            printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) 
+            _old_fish_prompt
+        else
+            printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
+            _old_fish_prompt
+        end
+    end 
+    
+    set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.ps1 b/bootstrap/virtualenv/virtualenv_embedded/activate.ps1
new file mode 100644
index 0000000..0f4adf1
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.ps1
@@ -0,0 +1,150 @@
+# This file must be dot sourced from PoSh; you cannot run it

+# directly. Do this: . ./activate.ps1

+

+# FIXME: clean up unused vars.

+$script:THIS_PATH = $myinvocation.mycommand.path

+$script:BASE_DIR = split-path (resolve-path "$THIS_PATH/..") -Parent

+$script:DIR_NAME = split-path $BASE_DIR -Leaf

+

+function global:deactivate ( [switch] $NonDestructive ){

+

+    if ( test-path variable:_OLD_VIRTUAL_PATH ) {

+        $env:PATH = $variable:_OLD_VIRTUAL_PATH

+        remove-variable "_OLD_VIRTUAL_PATH" -scope global

+    }

+

+    if ( test-path function:_old_virtual_prompt ) {

+        $function:prompt = $function:_old_virtual_prompt

+        remove-item function:\_old_virtual_prompt

+    }

+

+    if ($env:VIRTUAL_ENV) {

+        $old_env = split-path $env:VIRTUAL_ENV -leaf

+        remove-item env:VIRTUAL_ENV -erroraction silentlycontinue

+    }

+

+    if ( !$NonDestructive ) {

+        # Self destruct!

+        remove-item function:deactivate

+    }

+}

+

+# unset irrelevant variables

+deactivate -nondestructive

+

+$VIRTUAL_ENV = $BASE_DIR

+$env:VIRTUAL_ENV = $VIRTUAL_ENV

+

+$global:_OLD_VIRTUAL_PATH = $env:PATH

+$env:PATH = "$env:VIRTUAL_ENV/Scripts;" + $env:PATH

+if (! $env:VIRTUAL_ENV_DISABLE_PROMPT) {

+    function global:_old_virtual_prompt { "" }

+    $function:_old_virtual_prompt = $function:prompt

+    function global:prompt {

+        # Add a prefix to the current prompt, but don't discard it.

+        write-host "($(split-path $env:VIRTUAL_ENV -leaf)) " -nonewline

+        & $function:_old_virtual_prompt

+    }

+}

+

+# SIG # Begin signature block

+# MIISeAYJKoZIhvcNAQcCoIISaTCCEmUCAQExCzAJBgUrDgMCGgUAMGkGCisGAQQB

+# gjcCAQSgWzBZMDQGCisGAQQBgjcCAR4wJgIDAQAABBAfzDtgWUsITrck0sYpfvNR

+# AgEAAgEAAgEAAgEAAgEAMCEwCQYFKw4DAhoFAAQUS5reBwSg3zOUwhXf2jPChZzf

+# yPmggg6tMIIGcDCCBFigAwIBAgIBJDANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQG

+# EwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERp

+# Z2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2Vy

+# dGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDcxMDI0MjIwMTQ2WhcNMTcxMDI0MjIw

+# MTQ2WjCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0ZC4xKzAp

+# BgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxODA2BgNV

+# BAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUgT2JqZWN0

+# IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiOLIjUemqAbPJ1J

+# 0D8MlzgWKbr4fYlbRVjvhHDtfhFN6RQxq0PjTQxRgWzwFQNKJCdU5ftKoM5N4YSj

+# Id6ZNavcSa6/McVnhDAQm+8H3HWoD030NVOxbjgD/Ih3HaV3/z9159nnvyxQEckR

+# ZfpJB2Kfk6aHqW3JnSvRe+XVZSufDVCe/vtxGSEwKCaNrsLc9pboUoYIC3oyzWoU

+# TZ65+c0H4paR8c8eK/mC914mBo6N0dQ512/bkSdaeY9YaQpGtW/h/W/FkbQRT3sC

+# pttLVlIjnkuY4r9+zvqhToPjxcfDYEf+XD8VGkAqle8Aa8hQ+M1qGdQjAye8OzbV

+# uUOw7wIDAQABo4IB6TCCAeUwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC

+# AQYwHQYDVR0OBBYEFNBOD0CZbLhLGW87KLjg44gHNKq3MB8GA1UdIwQYMBaAFE4L

+# 7xqkQFulF2mHMMo0aEPQQa7yMD0GCCsGAQUFBwEBBDEwLzAtBggrBgEFBQcwAoYh

+# aHR0cDovL3d3dy5zdGFydHNzbC5jb20vc2ZzY2EuY3J0MFsGA1UdHwRUMFIwJ6Al

+# oCOGIWh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3Nmc2NhLmNybDAnoCWgI4YhaHR0

+# cDovL2NybC5zdGFydHNzbC5jb20vc2ZzY2EuY3JsMIGABgNVHSAEeTB3MHUGCysG

+# AQQBgbU3AQIBMGYwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29t

+# L3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29t

+# L2ludGVybWVkaWF0ZS5wZGYwEQYJYIZIAYb4QgEBBAQDAgABMFAGCWCGSAGG+EIB

+# DQRDFkFTdGFydENvbSBDbGFzcyAyIFByaW1hcnkgSW50ZXJtZWRpYXRlIE9iamVj

+# dCBTaWduaW5nIENlcnRpZmljYXRlczANBgkqhkiG9w0BAQUFAAOCAgEAcnMLA3Va

+# N4OIE9l4QT5OEtZy5PByBit3oHiqQpgVEQo7DHRsjXD5H/IyTivpMikaaeRxIv95

+# baRd4hoUcMwDj4JIjC3WA9FoNFV31SMljEZa66G8RQECdMSSufgfDYu1XQ+cUKxh

+# D3EtLGGcFGjjML7EQv2Iol741rEsycXwIXcryxeiMbU2TPi7X3elbwQMc4JFlJ4B

+# y9FhBzuZB1DV2sN2irGVbC3G/1+S2doPDjL1CaElwRa/T0qkq2vvPxUgryAoCppU

+# FKViw5yoGYC+z1GaesWWiP1eFKAL0wI7IgSvLzU3y1Vp7vsYaxOVBqZtebFTWRHt

+# XjCsFrrQBngt0d33QbQRI5mwgzEp7XJ9xu5d6RVWM4TPRUsd+DDZpBHm9mszvi9g

+# VFb2ZG7qRRXCSqys4+u/NLBPbXi/m/lU00cODQTlC/euwjk9HQtRrXQ/zqsBJS6U

+# J+eLGw1qOfj+HVBl/ZQpfoLk7IoWlRQvRL1s7oirEaqPZUIWY/grXq9r6jDKAp3L

+# ZdKQpPOnnogtqlU4f7/kLjEJhrrc98mrOWmVMK/BuFRAfQ5oDUMnVmCzAzLMjKfG

+# cVW/iMew41yfhgKbwpfzm3LBr1Zv+pEBgcgW6onRLSAn3XHM0eNtz+AkxH6rRf6B

+# 2mYhLEEGLapH8R1AMAo4BbVFOZR5kXcMCwowggg1MIIHHaADAgECAgIEuDANBgkq

+# hkiG9w0BAQUFADCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0

+# ZC4xKzApBgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcx

+# ODA2BgNVBAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUg

+# T2JqZWN0IENBMB4XDTExMTIwMzE1MzQxOVoXDTEzMTIwMzE0NTgwN1owgYwxIDAe

+# BgNVBA0TFzU4MTc5Ni1HaDd4Zkp4a3hRU0lPNEUwMQswCQYDVQQGEwJERTEPMA0G

+# A1UECBMGQmVybGluMQ8wDQYDVQQHEwZCZXJsaW4xFjAUBgNVBAMTDUphbm5pcyBM

+# ZWlkZWwxITAfBgkqhkiG9w0BCQEWEmphbm5pc0BsZWlkZWwuaW5mbzCCAiIwDQYJ

+# KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMcPeABYdN7nPq/AkZ/EkyUBGx/l2Yui

+# Lfm8ZdLG0ulMb/kQL3fRY7sUjYPyn9S6PhqqlFnNoGHJvbbReCdUC9SIQYmOEjEA

+# raHfb7MZU10NjO4U2DdGucj2zuO5tYxKizizOJF0e4yRQZVxpUGdvkW/+GLjCNK5

+# L7mIv3Z1dagxDKHYZT74HXiS4VFUwHF1k36CwfM2vsetdm46bdgSwV+BCMmZICYT

+# IJAS9UQHD7kP4rik3bFWjUx08NtYYFAVOd/HwBnemUmJe4j3IhZHr0k1+eDG8hDH

+# KVvPgLJIoEjC4iMFk5GWsg5z2ngk0LLu3JZMtckHsnnmBPHQK8a3opUNd8hdMNJx

+# gOwKjQt2JZSGUdIEFCKVDqj0FmdnDMPfwy+FNRtpBMl1sz78dUFhSrnM0D8NXrqa

+# 4rG+2FoOXlmm1rb6AFtpjAKksHRpYcPk2DPGWp/1sWB+dUQkS3gOmwFzyqeTuXpT

+# 0juqd3iAxOGx1VRFQ1VHLLf3AzV4wljBau26I+tu7iXxesVucSdsdQu293jwc2kN

+# xK2JyHCoZH+RyytrwS0qw8t7rMOukU9gwP8mn3X6mgWlVUODMcHTULjSiCEtvyZ/

+# aafcwjUbt4ReEcnmuZtWIha86MTCX7U7e+cnpWG4sIHPnvVTaz9rm8RyBkIxtFCB

+# nQ3FnoQgyxeJAgMBAAGjggOdMIIDmTAJBgNVHRMEAjAAMA4GA1UdDwEB/wQEAwIH

+# gDAuBgNVHSUBAf8EJDAiBggrBgEFBQcDAwYKKwYBBAGCNwIBFQYKKwYBBAGCNwoD

+# DTAdBgNVHQ4EFgQUWyCgrIWo8Ifvvm1/YTQIeMU9nc8wHwYDVR0jBBgwFoAU0E4P

+# QJlsuEsZbzsouODjiAc0qrcwggIhBgNVHSAEggIYMIICFDCCAhAGCysGAQQBgbU3

+# AQICMIIB/zAuBggrBgEFBQcCARYiaHR0cDovL3d3dy5zdGFydHNzbC5jb20vcG9s

+# aWN5LnBkZjA0BggrBgEFBQcCARYoaHR0cDovL3d3dy5zdGFydHNzbC5jb20vaW50

+# ZXJtZWRpYXRlLnBkZjCB9wYIKwYBBQUHAgIwgeowJxYgU3RhcnRDb20gQ2VydGlm

+# aWNhdGlvbiBBdXRob3JpdHkwAwIBARqBvlRoaXMgY2VydGlmaWNhdGUgd2FzIGlz

+# c3VlZCBhY2NvcmRpbmcgdG8gdGhlIENsYXNzIDIgVmFsaWRhdGlvbiByZXF1aXJl

+# bWVudHMgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeSwgcmVsaWFuY2Ugb25seSBm

+# b3IgdGhlIGludGVuZGVkIHB1cnBvc2UgaW4gY29tcGxpYW5jZSBvZiB0aGUgcmVs

+# eWluZyBwYXJ0eSBvYmxpZ2F0aW9ucy4wgZwGCCsGAQUFBwICMIGPMCcWIFN0YXJ0

+# Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MAMCAQIaZExpYWJpbGl0eSBhbmQg

+# d2FycmFudGllcyBhcmUgbGltaXRlZCEgU2VlIHNlY3Rpb24gIkxlZ2FsIGFuZCBM

+# aW1pdGF0aW9ucyIgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeS4wNgYDVR0fBC8w

+# LTAroCmgJ4YlaHR0cDovL2NybC5zdGFydHNzbC5jb20vY3J0YzItY3JsLmNybDCB

+# iQYIKwYBBQUHAQEEfTB7MDcGCCsGAQUFBzABhitodHRwOi8vb2NzcC5zdGFydHNz

+# bC5jb20vc3ViL2NsYXNzMi9jb2RlL2NhMEAGCCsGAQUFBzAChjRodHRwOi8vYWlh

+# LnN0YXJ0c3NsLmNvbS9jZXJ0cy9zdWIuY2xhc3MyLmNvZGUuY2EuY3J0MCMGA1Ud

+# EgQcMBqGGGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOC

+# AQEAhrzEV6zwoEtKjnFRhCsjwiPykVpo5Eiye77Ve801rQDiRKgSCCiW6g3HqedL

+# OtaSs65Sj2pm3Viea4KR0TECLcbCTgsdaHqw2x1yXwWBQWZEaV6EB05lIwfr94P1

+# SFpV43zkuc+bbmA3+CRK45LOcCNH5Tqq7VGTCAK5iM7tvHwFlbQRl+I6VEL2mjpF

+# NsuRjDOVrv/9qw/a22YJ9R7Y1D0vUSs3IqZx2KMUaYDP7H2mSRxJO2nADQZBtriF

+# gTyfD3lYV12MlIi5CQwe3QC6DrrfSMP33i5Wa/OFJiQ27WPxmScYVhiqozpImFT4

+# PU9goiBv9RKXdgTmZE1PN0NQ5jGCAzUwggMxAgEBMIGTMIGMMQswCQYDVQQGEwJJ

+# TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0

+# YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg

+# MiBQcmltYXJ5IEludGVybWVkaWF0ZSBPYmplY3QgQ0ECAgS4MAkGBSsOAwIaBQCg

+# eDAYBgorBgEEAYI3AgEMMQowCKACgAChAoAAMBkGCSqGSIb3DQEJAzEMBgorBgEE

+# AYI3AgEEMBwGCisGAQQBgjcCAQsxDjAMBgorBgEEAYI3AgEVMCMGCSqGSIb3DQEJ

+# BDEWBBRVGw0FDSiaIi38dWteRUAg/9Pr6DANBgkqhkiG9w0BAQEFAASCAgCInvOZ

+# FdaNFzbf6trmFDZKMojyx3UjKMCqNjHVBbuKY0qXwFC/ElYDV1ShJ2CBZbdurydO

+# OQ6cIQ0KREOCwmX/xB49IlLHHUxNhEkVv7HGU3EKAFf9IBt9Yr7jikiR9cjIsfHK

+# 4cjkoKJL7g28yEpLLkHt1eo37f1Ga9lDWEa5Zq3U5yX+IwXhrUBm1h8Xr033FhTR

+# VEpuSz6LHtbrL/zgJnCzJ2ahjtJoYevdcWiNXffosJHFaSfYDDbiNsPRDH/1avmb

+# 5j/7BhP8BcBaR6Fp8tFbNGIcWHHGcjqLMnTc4w13b7b4pDhypqElBa4+lCmwdvv9

+# GydYtRgPz8GHeoBoKj30YBlMzRIfFYaIFGIC4Ai3UEXkuH9TxYohVbGm/W0Kl4Lb

+# RJ1FwiVcLcTOJdgNId2vQvKc+jtNrjcg5SP9h2v/C4aTx8tyc6tE3TOPh2f9b8DL

+# S+SbVArJpuJqrPTxDDoO1QNjTgLcdVYeZDE+r/NjaGZ6cMSd8db3EaG3ijD/0bud

+# SItbm/OlNVbQOFRR76D+ZNgPcU5iNZ3bmvQQIg6aSB9MHUpIE/SeCkNl9YeVk1/1

+# GFULgNMRmIYP4KLvu9ylh5Gu3hvD5VNhH6+FlXANwFy07uXks5uF8mfZVxVCnodG

+# xkNCx+6PsrA5Z7WP4pXcmYnMn97npP/Q9EHJWw==

+# SIG # End signature block

diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate.sh b/bootstrap/virtualenv/virtualenv_embedded/activate.sh
new file mode 100644
index 0000000..e50c782
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate.sh
@@ -0,0 +1,80 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+    unset pydoc
+
+    # reset old environment variables
+    if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
+        PATH="$_OLD_VIRTUAL_PATH"
+        export PATH
+        unset _OLD_VIRTUAL_PATH
+    fi
+    if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
+        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+        export PYTHONHOME
+        unset _OLD_VIRTUAL_PYTHONHOME
+    fi
+
+    # This should detect bash and zsh, which have a hash command that must
+    # be called to get it to forget past commands.  Without forgetting
+    # past commands the $PATH changes we made may not be respected
+    if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+        hash -r 2>/dev/null
+    fi
+
+    if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
+        PS1="$_OLD_VIRTUAL_PS1"
+        export PS1
+        unset _OLD_VIRTUAL_PS1
+    fi
+
+    unset VIRTUAL_ENV
+    if [ ! "$1" = "nondestructive" ] ; then
+    # Self destruct!
+        unset -f deactivate
+    fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="__VIRTUAL_ENV__"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "$PYTHONHOME" ] ; then
+    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+    unset PYTHONHOME
+fi
+
+if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
+    _OLD_VIRTUAL_PS1="$PS1"
+    if [ "x__VIRTUAL_PROMPT__" != x ] ; then
+        PS1="__VIRTUAL_PROMPT__$PS1"
+    else
+    if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+    else
+        PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+    fi
+    fi
+    export PS1
+fi
+
+alias pydoc="python -m pydoc"
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands.  Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+    hash -r 2>/dev/null
+fi
diff --git a/bootstrap/virtualenv/virtualenv_embedded/activate_this.py b/bootstrap/virtualenv/virtualenv_embedded/activate_this.py
new file mode 100644
index 0000000..f18193b
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/activate_this.py
@@ -0,0 +1,34 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+    __file__
+except NameError:
+    raise AssertionError(
+        "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+old_os_path = os.environ.get('PATH', '')
+os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+if sys.platform == 'win32':
+    site_packages = os.path.join(base, 'Lib', 'site-packages')
+else:
+    site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+    if item not in prev_sys_path:
+        new_sys_path.append(item)
+        sys.path.remove(item)
+sys.path[:0] = new_sys_path
diff --git a/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat b/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat
new file mode 100644
index 0000000..fd4db26
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/deactivate.bat
@@ -0,0 +1,20 @@
+@echo off
+
+set VIRTUAL_ENV=
+
+if defined _OLD_VIRTUAL_PROMPT (
+    set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+	set _OLD_VIRTUAL_PROMPT=
+)
+
+if defined _OLD_VIRTUAL_PYTHONHOME (
+    set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
+    set _OLD_VIRTUAL_PYTHONHOME=
+)
+
+if defined _OLD_VIRTUAL_PATH (
+    set "PATH=%_OLD_VIRTUAL_PATH%"
+	set _OLD_VIRTUAL_PATH=
+)
+
+:END
diff --git a/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py b/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py
new file mode 100644
index 0000000..29fc1da
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/distutils-init.py
@@ -0,0 +1,101 @@
+import os
+import sys
+import warnings 
+import imp
+import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
+              # Important! To work on pypy, this must be a module that resides in the
+              # lib-python/modified-x.y.z directory
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+    warnings.warn(
+        "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+    __path__.insert(0, distutils_path)
+    real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
+    # Copy the relevant attributes
+    try:
+        __revision__ = real_distutils.__revision__
+    except AttributeError:
+        pass
+    __version__ = real_distutils.__version__
+
+from distutils import dist, sysconfig
+
+try:
+    basestring
+except NameError:
+    basestring = str
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+    from distutils.command.build_ext import build_ext as old_build_ext
+    class build_ext(old_build_ext):
+        def finalize_options (self):
+            if self.library_dirs is None:
+                self.library_dirs = []
+            elif isinstance(self.library_dirs, basestring):
+                self.library_dirs = self.library_dirs.split(os.pathsep)
+            
+            self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+            old_build_ext.finalize_options(self)
+            
+    from distutils.command import build_ext as build_ext_module 
+    build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+    found = old_find_config_files(self)
+    system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+    #if os.path.exists(system_distutils):
+    #    found.insert(0, system_distutils)
+        # What to call the per-user config file
+    if os.name == 'posix':
+        user_filename = ".pydistutils.cfg"
+    else:
+        user_filename = "pydistutils.cfg"
+    user_filename = os.path.join(sys.prefix, user_filename)
+    if os.path.isfile(user_filename):
+        for item in list(found):
+            if item.endswith('pydistutils.cfg'):
+                found.remove(item)
+        found.append(user_filename)
+    return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+    if prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    if standard_lib and prefix is None:
+        prefix = sys.real_prefix
+    return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+    real_vars = old_get_config_vars(*args)
+    if sys.platform == 'win32':
+        lib_dir = os.path.join(sys.real_prefix, "libs")
+        if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+            real_vars['LIBDIR'] = lib_dir # asked for all
+        elif isinstance(real_vars, list) and 'LIBDIR' in args:
+            real_vars = real_vars + [lib_dir] # asked for list
+    return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg b/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg
new file mode 100644
index 0000000..1af230e
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/distutils.cfg
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install.  For instance:
+#
+#   [easy_install]
+#   find_links = http://mylocalsite
diff --git a/bootstrap/virtualenv/virtualenv_embedded/site.py b/bootstrap/virtualenv/virtualenv_embedded/site.py
new file mode 100644
index 0000000..7969769
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_embedded/site.py
@@ -0,0 +1,758 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code.  Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path.  On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages.  On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely).  The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python2.X/site-packages/bar
+  /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+try:
+    import __builtin__ as builtins
+except ImportError:
+    import builtins
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+    ModuleType = type(os)
+
+def makepath(*paths):
+    dir = os.path.join(*paths)
+    if _is_jython and (dir == '__classpath__' or
+                       dir.startswith('__pyclasspath__')):
+        return dir, dir
+    dir = os.path.abspath(dir)
+    return dir, os.path.normcase(dir)
+
+def abs__file__():
+    """Set all module' __file__ attribute to an absolute path"""
+    for m in sys.modules.values():
+        if ((_is_jython and not isinstance(m, ModuleType)) or
+            hasattr(m, '__loader__')):
+            # only modules need the abspath in Jython. and don't mess
+            # with a PEP 302-supplied __file__
+            continue
+        f = getattr(m, '__file__', None)
+        if f is None:
+            continue
+        m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if not dircase in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python.  See http://www.python.org/sf/586680
+def addbuilddir():
+    """Append ./build/lib.<platform> in case we're running in the build dir
+    (especially for Guido :-)"""
+    from distutils.util import get_platform
+    s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+    if hasattr(sys, 'gettotalrefcount'):
+        s += '-pydebug'
+    s = os.path.join(os.path.dirname(sys.path[-1]), s)
+    sys.path.append(s)
+
+def _init_pathinfo():
+    """Return a set containing all existing directory entries from sys.path"""
+    d = set()
+    for dir in sys.path:
+        try:
+            if os.path.isdir(dir):
+                dir, dircase = makepath(dir)
+                d.add(dircase)
+        except TypeError:
+            continue
+    return d
+
+def addpackage(sitedir, name, known_paths):
+    """Add a new path to known_paths by combining sitedir and 'name' or execute
+    sitedir if it starts with 'import'"""
+    if known_paths is None:
+        _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    fullname = os.path.join(sitedir, name)
+    try:
+        f = open(fullname, "rU")
+    except IOError:
+        return
+    try:
+        for line in f:
+            if line.startswith("#"):
+                continue
+            if line.startswith("import"):
+                exec(line)
+                continue
+            line = line.rstrip()
+            dir, dircase = makepath(sitedir, line)
+            if not dircase in known_paths and os.path.exists(dir):
+                sys.path.append(dir)
+                known_paths.add(dircase)
+    finally:
+        f.close()
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+    try:
+        names = os.listdir(sitedir)
+    except os.error:
+        return
+    names.sort()
+    for name in names:
+        if name.endswith(os.extsep + "pth"):
+            addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+    """Add site-packages (and possibly site-python) to sys.path"""
+    prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+    if exec_prefix != sys_prefix:
+        prefixes.append(os.path.join(exec_prefix, "local"))
+
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos') or _is_jython:
+                sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+            elif _is_pypy:
+                sitedirs = [os.path.join(prefix, 'site-packages')]
+            elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+                if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+                    sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+                                os.path.join(prefix, "Extras", "lib", "python")]
+
+                else: # any other Python distros on OSX work this way
+                    sitedirs = [os.path.join(prefix, "lib",
+                                             "python" + sys.version[:3], "site-packages")]
+
+            elif os.sep == '/':
+                sitedirs = [os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python"),
+                            os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+                lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+                if (os.path.exists(lib64_dir) and
+                    os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+                    if _is_64bit:
+                        sitedirs.insert(0, lib64_dir)
+                    else:
+                        sitedirs.append(lib64_dir)
+                try:
+                    # sys.getobjects only available in --with-pydebug build
+                    sys.getobjects
+                    sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+                except AttributeError:
+                    pass
+                # Debian-specific dist-packages directories:
+                sitedirs.append(os.path.join(prefix, "local/lib",
+                                             "python" + sys.version[:3],
+                                             "dist-packages"))
+                if sys.version[0] == '2':
+                    sitedirs.append(os.path.join(prefix, "lib",
+                                                 "python" + sys.version[:3],
+                                                 "dist-packages"))
+                else:
+                    sitedirs.append(os.path.join(prefix, "lib",
+                                                 "python" + sys.version[0],
+                                                 "dist-packages"))
+                sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+            else:
+                sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+            for sitedir in sitedirs:
+                if os.path.isdir(sitedir):
+                    addsitedir(sitedir, known_paths)
+    return None
+
+def check_enableusersite():
+    """Check if user site directory is safe for inclusion
+
+    The function tests for the command line flag (including environment var),
+    process uid/gid equal to effective uid/gid.
+
+    None: Disabled for security reasons
+    False: Disabled by user (command line option)
+    True: Safe and enabled
+    """
+    if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+        return False
+
+    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+        # check process uid == effective uid
+        if os.geteuid() != os.getuid():
+            return None
+    if hasattr(os, "getgid") and hasattr(os, "getegid"):
+        # check process gid == effective gid
+        if os.getegid() != os.getgid():
+            return None
+
+    return True
+
+def addusersitepackages(known_paths):
+    """Add a per user site-package to sys.path
+
+    Each user has its own python directory with site-packages in the
+    home directory.
+
+    USER_BASE is the root directory for all Python versions
+
+    USER_SITE is the user specific site-packages directory
+
+    USER_SITE/.. can be used for data.
+    """
+    global USER_BASE, USER_SITE, ENABLE_USER_SITE
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    #if sys.platform in ('os2emx', 'riscos'):
+    #    # Don't know what to put here
+    #    USER_BASE = ''
+    #    USER_SITE = ''
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser(base, "Python")
+        USER_SITE = os.path.join(USER_BASE,
+                                 "Python" + sys.version[0] + sys.version[2],
+                                 "site-packages")
+    else:
+        if env_base:
+            USER_BASE = env_base
+        else:
+            USER_BASE = joinuser("~", ".local")
+        USER_SITE = os.path.join(USER_BASE, "lib",
+                                 "python" + sys.version[:3],
+                                 "site-packages")
+
+    if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+        addsitedir(USER_SITE, known_paths)
+    if ENABLE_USER_SITE:
+        for dist_libdir in ("lib", "local/lib"):
+            user_site = os.path.join(USER_BASE, dist_libdir,
+                                     "python" + sys.version[:3],
+                                     "dist-packages")
+            if os.path.isdir(user_site):
+                addsitedir(user_site, known_paths)
+    return known_paths
+
+
+
+def setBEGINLIBPATH():
+    """The OS/2 EMX port has optional extension modules that do double duty
+    as DLLs (and must use the .DLL file extension) for other extensions.
+    The library search path needs to be amended so these will be found
+    during module import.  Use BEGINLIBPATH so that these are at the start
+    of the library search path.
+
+    """
+    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+    libpath = os.environ['BEGINLIBPATH'].split(';')
+    if libpath[-1]:
+        libpath.append(dllpath)
+    else:
+        libpath[-1] = dllpath
+    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+    """Define new built-ins 'quit' and 'exit'.
+    These are simply strings that display a hint on how to exit.
+
+    """
+    if os.sep == ':':
+        eof = 'Cmd-Q'
+    elif os.sep == '\\':
+        eof = 'Ctrl-Z plus Return'
+    else:
+        eof = 'Ctrl-D (i.e. EOF)'
+
+    class Quitter(object):
+        def __init__(self, name):
+            self.name = name
+        def __repr__(self):
+            return 'Use %s() or %s to exit' % (self.name, eof)
+        def __call__(self, code=None):
+            # Shells like IDLE catch the SystemExit, but listen when their
+            # stdin wrapper is closed.
+            try:
+                sys.stdin.close()
+            except:
+                pass
+            raise SystemExit(code)
+    builtins.quit = Quitter('quit')
+    builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+    """interactive prompt objects for printing the license text, a list of
+    contributors and the copyright notice."""
+
+    MAXLINES = 23
+
+    def __init__(self, name, data, files=(), dirs=()):
+        self.__name = name
+        self.__data = data
+        self.__files = files
+        self.__dirs = dirs
+        self.__lines = None
+
+    def __setup(self):
+        if self.__lines:
+            return
+        data = None
+        for dir in self.__dirs:
+            for filename in self.__files:
+                filename = os.path.join(dir, filename)
+                try:
+                    fp = open(filename, "rU")
+                    data = fp.read()
+                    fp.close()
+                    break
+                except IOError:
+                    pass
+            if data:
+                break
+        if not data:
+            data = self.__data
+        self.__lines = data.split('\n')
+        self.__linecnt = len(self.__lines)
+
+    def __repr__(self):
+        self.__setup()
+        if len(self.__lines) <= self.MAXLINES:
+            return "\n".join(self.__lines)
+        else:
+            return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+    def __call__(self):
+        self.__setup()
+        prompt = 'Hit Return for more, or q (and Return) to quit: '
+        lineno = 0
+        while 1:
+            try:
+                for i in range(lineno, lineno + self.MAXLINES):
+                    print(self.__lines[i])
+            except IndexError:
+                break
+            else:
+                lineno += self.MAXLINES
+                key = None
+                while key is None:
+                    try:
+                        key = raw_input(prompt)
+                    except NameError:
+                        key = input(prompt)
+                    if key not in ('', 'q'):
+                        key = None
+                if key == 'q':
+                    break
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in __builtin__"""
+    builtins.copyright = _Printer("copyright", sys.copyright)
+    if _is_jython:
+        builtins.credits = _Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    elif _is_pypy:
+        builtins.credits = _Printer(
+            "credits",
+            "PyPy is maintained by the PyPy developers: http://pypy.org/")
+    else:
+        builtins.credits = _Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    here = os.path.dirname(os.__file__)
+    builtins.license = _Printer(
+        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+        ["LICENSE.txt", "LICENSE"],
+        [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+    """Define the built-in 'help'.
+    This is a wrapper around pydoc.help (with a twist).
+
+    """
+
+    def __repr__(self):
+        return "Type help() for interactive help, " \
+               "or help(object) for help about object."
+    def __call__(self, *args, **kwds):
+        import pydoc
+        return pydoc.help(*args, **kwds)
+
+def sethelper():
+    builtins.help = _Helper()
+
+def aliasmbcs():
+    """On Windows, some default encodings are not provided by Python,
+    while they are always available as "mbcs" in each locale. Make
+    them usable by aliasing to "mbcs" in such a case."""
+    if sys.platform == 'win32':
+        import locale, codecs
+        enc = locale.getdefaultlocale()[1]
+        if enc.startswith('cp'):            # "cp***" ?
+            try:
+                codecs.lookup(enc)
+            except LookupError:
+                import encodings
+                encodings._cache[enc] = encodings._unknown
+                encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+    """Set the string encoding used by the Unicode implementation.  The
+    default is 'ascii', but if you're willing to experiment, you can
+    change this."""
+    encoding = "ascii" # Default value set by _PyUnicode_Init()
+    if 0:
+        # Enable to support locale aware default string encodings.
+        import locale
+        loc = locale.getdefaultlocale()
+        if loc[1]:
+            encoding = loc[1]
+    if 0:
+        # Enable to switch off string to Unicode coercion and implicit
+        # Unicode to string conversion.
+        encoding = "undefined"
+    if encoding != "ascii":
+        # On Non-Unicode builds this will raise an AttributeError...
+        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        import sitecustomize
+    except ImportError:
+        pass
+
+def virtual_install_main_packages():
+    f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+    sys.real_prefix = f.read().strip()
+    f.close()
+    pos = 2
+    hardcoded_relative_dirs = []
+    if sys.path[0] == '':
+        pos += 1
+    if _is_jython:
+        paths = [os.path.join(sys.real_prefix, 'Lib')]
+    elif _is_pypy:
+        if sys.version_info > (3, 2):
+            cpyver = '%d' % sys.version_info[0]
+        elif sys.pypy_version_info >= (1, 5):
+            cpyver = '%d.%d' % sys.version_info[:2]
+        else:
+            cpyver = '%d.%d.%d' % sys.version_info[:3]
+        paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+                 os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+        if sys.pypy_version_info < (1, 9):
+            paths.insert(1, os.path.join(sys.real_prefix,
+                                         'lib-python', 'modified-%s' % cpyver))
+        hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+        #
+        # This is hardcoded in the Python executable, but relative to sys.prefix:
+        for path in paths[:]:
+            plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+            if os.path.exists(plat_path):
+                paths.append(plat_path)
+    elif sys.platform == 'win32':
+        paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+    else:
+        paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+        hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+        lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+        if os.path.exists(lib64_path):
+            if _is_64bit:
+                paths.insert(0, lib64_path)
+            else:
+                paths.append(lib64_path)
+        # This is hardcoded in the Python executable, but relative to
+        # sys.prefix.  Debian change: we need to add the multiarch triplet
+        # here, which is where the real stuff lives.  As per PEP 421, in
+        # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+        # it lives in sys.
+        try:
+            arch = getattr(sys, 'implementation', sys)._multiarch
+        except AttributeError:
+            # This is a non-multiarch aware Python.  Fallback to the old way.
+            arch = sys.platform
+        plat_path = os.path.join(sys.real_prefix, 'lib',
+                                 'python'+sys.version[:3],
+                                 'plat-%s' % arch)
+        if os.path.exists(plat_path):
+            paths.append(plat_path)
+    # This is hardcoded in the Python executable, but
+    # relative to sys.prefix, so we have to fix up:
+    for path in list(paths):
+        tk_dir = os.path.join(path, 'lib-tk')
+        if os.path.exists(tk_dir):
+            paths.append(tk_dir)
+
+    # These are hardcoded in the Apple's Python executable,
+    # but relative to sys.prefix, so we have to fix them up:
+    if sys.platform == 'darwin':
+        hardcoded_paths = [os.path.join(relative_dir, module)
+                           for relative_dir in hardcoded_relative_dirs
+                           for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+        for path in hardcoded_paths:
+            if os.path.exists(path):
+                paths.append(path)
+
+    sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+    """
+    Force easy_installed eggs in the global environment to get placed
+    in sys.path after all packages inside the virtualenv.  This
+    maintains the "least surprise" result that packages in the
+    virtualenv always mask global packages, never the other way
+    around.
+
+    """
+    egginsert = getattr(sys, '__egginsert', 0)
+    for i, path in enumerate(sys.path):
+        if i > egginsert and path.startswith(sys.prefix):
+            egginsert = i
+    sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+    force_global_eggs_after_local_site_packages()
+    return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+    """Adjust the special classpath sys.path entries for Jython. These
+    entries should follow the base virtualenv lib directories.
+    """
+    paths = []
+    classpaths = []
+    for path in sys.path:
+        if path == '__classpath__' or path.startswith('__pyclasspath__'):
+            classpaths.append(path)
+        else:
+            paths.append(path)
+    sys.path = paths
+    sys.path.extend(classpaths)
+
+def execusercustomize():
+    """Run custom user specific code, if available."""
+    try:
+        import usercustomize
+    except ImportError:
+        pass
+
+
+def main():
+    global ENABLE_USER_SITE
+    virtual_install_main_packages()
+    abs__file__()
+    paths_in_sys = removeduppaths()
+    if (os.name == "posix" and sys.path and
+        os.path.basename(sys.path[-1]) == "Modules"):
+        addbuilddir()
+    if _is_jython:
+        fixclasspath()
+    GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+    if not GLOBAL_SITE_PACKAGES:
+        ENABLE_USER_SITE = False
+    if ENABLE_USER_SITE is None:
+        ENABLE_USER_SITE = check_enableusersite()
+    paths_in_sys = addsitepackages(paths_in_sys)
+    paths_in_sys = addusersitepackages(paths_in_sys)
+    if GLOBAL_SITE_PACKAGES:
+        paths_in_sys = virtual_addsitepackages(paths_in_sys)
+    if sys.platform == 'os2emx':
+        setBEGINLIBPATH()
+    setquit()
+    setcopyright()
+    sethelper()
+    aliasmbcs()
+    setencoding()
+    execsitecustomize()
+    if ENABLE_USER_SITE:
+        execusercustomize()
+    # Remove sys.setdefaultencoding() so that users cannot change the
+    # encoding after initialization.  The test for presence is needed when
+    # this module is run as a script, because this code is executed twice.
+    if hasattr(sys, "setdefaultencoding"):
+        del sys.setdefaultencoding
+
+main()
+
+def _script():
+    help = """\
+    %s [--user-base] [--user-site]
+
+    Without arguments print some useful information
+    With arguments print the value of USER_BASE and/or USER_SITE separated
+    by '%s'.
+
+    Exit codes with --user-base or --user-site:
+      0 - user site directory is enabled
+      1 - user site directory is disabled by user
+      2 - uses site directory is disabled by super user
+          or for security reasons
+     >2 - unknown error
+    """
+    args = sys.argv[1:]
+    if not args:
+        print("sys.path = [")
+        for dir in sys.path:
+            print("    %r," % (dir,))
+        print("]")
+        def exists(path):
+            if os.path.isdir(path):
+                return "exists"
+            else:
+                return "doesn't exist"
+        print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+        print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+        print("ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE)
+        sys.exit(0)
+
+    buffer = []
+    if '--user-base' in args:
+        buffer.append(USER_BASE)
+    if '--user-site' in args:
+        buffer.append(USER_SITE)
+
+    if buffer:
+        print(os.pathsep.join(buffer))
+        if ENABLE_USER_SITE:
+            sys.exit(0)
+        elif ENABLE_USER_SITE is False:
+            sys.exit(1)
+        elif ENABLE_USER_SITE is None:
+            sys.exit(2)
+        else:
+            sys.exit(3)
+    else:
+        import textwrap
+        print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+        sys.exit(10)
+
+if __name__ == '__main__':
+    _script()
diff --git a/bootstrap/virtualenv/virtualenv_support/__init__.py b/bootstrap/virtualenv/virtualenv_support/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/__init__.py
diff --git a/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl b/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl
new file mode 100644
index 0000000..34c8ff1
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/pip-6.0-py2.py3-none-any.whl
Binary files differ
diff --git a/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl b/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl
new file mode 100644
index 0000000..fa3a6a5
--- /dev/null
+++ b/bootstrap/virtualenv/virtualenv_support/setuptools-8.2.1-py2.py3-none-any.whl
Binary files differ
diff --git a/buildbucket.py b/buildbucket.py
new file mode 100755
index 0000000..f00cc8f
--- /dev/null
+++ b/buildbucket.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tool for interacting with Buildbucket.
+
+Usage:
+  $ depot-tools-auth login https://cr-buildbucket.appspot.com
+  $ buildbucket.py \
+    put \
+    --bucket master.tryserver.chromium.linux \
+    --builder my-builder \
+
+  Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
+"""
+
+import argparse
+import json
+import urlparse
+import os
+import sys
+
+from third_party import httplib2
+
+import auth
+
+
+BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
+PUT_BUILD_URL = urlparse.urljoin(
+  BUILDBUCKET_URL,
+  '_ah/api/buildbucket/v1/builds',
+)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+    '-v',
+    '--verbose',
+    action='store_true',
+  )
+  subparsers = parser.add_subparsers(dest='command')
+  put_parser = subparsers.add_parser('put')
+  put_parser.add_argument(
+    '-b',
+    '--bucket',
+    help=(
+      'The bucket to schedule the build on. Typically the master name, e.g.'
+      ' master.tryserver.chromium.linux.'
+    ),
+    required=True,
+  )
+  put_parser.add_argument(
+    '-c',
+    '--changes',
+    help='A flie to load a JSON list of changes dicts from.',
+  )
+  put_parser.add_argument(
+    '-n',
+    '--builder-name',
+    help='The builder to schedule the build on.',
+    required=True,
+  )
+  put_parser.add_argument(
+    '-p',
+    '--properties',
+    help='A file to load a JSON dict of properties from.',
+  )
+  args = parser.parse_args()
+  # TODO(smut): When more commands are implemented, refactor this.
+  assert args.command == 'put'
+
+  changes = []
+  if args.changes:
+    try:
+      with open(args.changes) as fp:
+        changes.extend(json.load(fp))
+    except (TypeError, ValueError):
+      sys.stderr.write('%s contained invalid JSON list.\n' % args.changes)
+      raise
+
+  properties = {}
+  if args.properties:
+    try:
+      with open(args.properties) as fp:
+        properties.update(json.load(fp))
+    except (TypeError, ValueError):
+      sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
+      raise
+
+  authenticator = auth.get_authenticator_for_host(
+    BUILDBUCKET_URL,
+    auth.make_auth_config(use_oauth2=True),
+  )
+  http = authenticator.authorize(httplib2.Http())
+  http.force_exception_to_status_code = True
+  response, content = http.request(
+    PUT_BUILD_URL,
+    'PUT',
+    body=json.dumps({
+      'bucket': args.bucket,
+      'parameters_json': json.dumps({
+        'builder_name': args.builder_name,
+        'changes': changes,
+        'properties': properties,
+      }),
+    }),
+    headers={'Content-Type': 'application/json'},
+  )
+
+  if args.verbose:
+    print content
+
+  return response.status != 200
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/checkout.py b/checkout.py
index 5dbb375..8d5ccce 100644
--- a/checkout.py
+++ b/checkout.py
@@ -676,7 +676,7 @@
           else:
             # No need to do anything special with p.is_new or if not
             # p.diff_hunks. git apply manages all that already.
-            cmd = ['apply', '--index', '-p%s' % p.patchlevel]
+            cmd = ['apply', '--index', '-3', '-p%s' % p.patchlevel]
             if verbose:
               cmd.append('--verbose')
             stdout.append(self._check_output_git(cmd, stdin=p.get(True)))
diff --git a/chrome-update.py b/chrome-update.py
index 4f5731c..4fb11e5 100755
--- a/chrome-update.py
+++ b/chrome-update.py
@@ -60,7 +60,7 @@
   return subprocess.call(cmd, cwd=chrome_root, shell=IS_WIN)
 
 
-def Main(args):
+def main(args):
   if len(args) < 3:
     print('Usage: chrome-update.py <path> [options]')
     print('See options from compile.py at')
@@ -84,4 +84,8 @@
 
 
 if __name__ == "__main__":
-  sys.exit(Main(sys.argv))
+  try:
+    sys.exit(main(sys.argv))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/cit b/cit
new file mode 100755
index 0000000..410341f
--- /dev/null
+++ b/cit
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/cit.py" "$@"
diff --git a/cit.bat b/cit.bat
new file mode 100755
index 0000000..a1da0dd
--- /dev/null
+++ b/cit.bat
@@ -0,0 +1,11 @@
+@echo off
+:: Copyright (c) 2015 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+setlocal
+
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+
+:: Defer control.
+%~dp0python "%~dp0\cit.py" %*
diff --git a/cit.py b/cit.py
new file mode 100755
index 0000000..5210cee
--- /dev/null
+++ b/cit.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Wrapper for updating and calling infra.git tools.
+
+This tool does a two things:
+* Maintains a infra.git checkout pinned at "deployed" in the home dir
+* Acts as an alias to infra.tools.*
+"""
+
+# TODO(hinoka): Use cipd/glyco instead of git/gclient.
+
+import sys
+import os
+import subprocess
+import re
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+GCLIENT = os.path.join(SCRIPT_DIR, 'gclient.py')
+TARGET_DIR = os.path.expanduser('~/.chrome-infra')
+INFRA_DIR = os.path.join(TARGET_DIR, 'infra')
+
+
+def get_git_rev(target, branch):
+  return subprocess.check_output(
+      ['git', 'log', '--format=%B', '-n1', branch], cwd=target)
+
+
+def need_to_update():
+  """Checks to see if we need to update the ~/.chrome-infra/infra checkout."""
+  try:
+    cmd = [sys.executable, GCLIENT, 'revinfo']
+    subprocess.check_call(
+        cmd, cwd=os.path.join(TARGET_DIR), stdout=subprocess.PIPE)
+  except subprocess.CalledProcessError:
+    return True  # Gclient failed, definitely need to update.
+  except OSError:
+    return True  # Gclient failed, definitely need to update.
+
+  local_rev = get_git_rev(INFRA_DIR, 'HEAD')
+
+  subprocess.check_call(
+      ['git', 'fetch', 'origin'], cwd=INFRA_DIR,
+      stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  origin_rev = get_git_rev(INFRA_DIR, 'origin/deployed')
+  return origin_rev != local_rev
+
+
+def ensure_infra():
+  """Ensures that infra.git is present in ~/.chrome-infra."""
+  print 'Fetching infra into %s, may take a couple of minutes...' % TARGET_DIR
+  if not os.path.isdir(TARGET_DIR):
+    os.mkdir(TARGET_DIR)
+  if not os.path.exists(os.path.join(TARGET_DIR, '.gclient')):
+    subprocess.check_call(
+        [sys.executable, os.path.join(SCRIPT_DIR, 'fetch.py'), 'infra'],
+        cwd=TARGET_DIR,
+        stdout=subprocess.PIPE)
+  subprocess.check_call(
+      [sys.executable, GCLIENT, 'sync', '--revision', 'origin/deployed'],
+      cwd=TARGET_DIR,
+      stdout=subprocess.PIPE)
+
+
+def get_available_tools():
+  tools = []
+  starting = os.path.join(TARGET_DIR, 'infra', 'infra', 'tools')
+  for root, _, files in os.walk(starting):
+    if '__main__.py' in files:
+      tools.append(root[len(starting)+1:].replace(os.path.sep, '.'))
+  return tools
+
+
+def run(args):
+  if args:
+    tool_name = args[0]
+    cmd = [
+        sys.executable, os.path.join(TARGET_DIR, 'infra', 'run.py'),
+        'infra.tools.%s' % tool_name]
+    cmd.extend(args[1:])
+    return subprocess.call(cmd)
+
+  tools = get_available_tools()
+  print """usage: cit.py <name of tool> [args for tool]
+
+  Wrapper for maintaining and calling tools in "infra.git/run.py infra.tools.*"
+
+  Available tools are:
+  """
+  for tool in tools:
+    print '  * %s' % tool
+
+
+def main():
+  if need_to_update():
+    ensure_infra()
+  return run(sys.argv[1:])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/clang_format.py b/clang_format.py
index 8320e6d..5bfeb1a 100755
--- a/clang_format.py
+++ b/clang_format.py
@@ -62,8 +62,12 @@
   if any(match in args for match in help_syntax):
     print '\nDepot tools redirects you to the clang-format at:\n    %s\n' % tool
 
-  return subprocess.call([tool] + sys.argv[1:])
+  return subprocess.call([tool] + args)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/commit_queue b/commit_queue
new file mode 100755
index 0000000..a634371
--- /dev/null
+++ b/commit_queue
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
+  "$base_dir"/update_depot_tools "$@"
+fi
+
+PYTHONDONTWRITEBYTECODE=1 exec "$base_dir/ENV/bin/python" "$base_dir/commit_queue.py" "$@"
diff --git a/commit_queue.bat b/commit_queue.bat
new file mode 100755
index 0000000..76018a6
--- /dev/null
+++ b/commit_queue.bat
@@ -0,0 +1,14 @@
+@echo off

+:: Copyright 2015 The Chromium Authors. All rights reserved.

+:: Use of this source code is governed by a BSD-style license that can be

+:: found in the LICENSE file.

+setlocal

+

+:: This is required with cygwin only.

+PATH=%~dp0;%PATH%

+

+:: Synchronize the root directory before deferring control back to gclient.py.

+call "%~dp0\update_depot_tools.bat" %*

+

+:: Defer control.

+%~dp0\ENV\bin\python "%~dp0\commit_queue.py" %*

diff --git a/commit_queue.py b/commit_queue.py
index 7639065..25ddb38 100755
--- a/commit_queue.py
+++ b/commit_queue.py
@@ -9,6 +9,7 @@
 __version__ = '0.1'
 
 import functools
+import json
 import logging
 import optparse
 import os
@@ -17,9 +18,16 @@
 
 import breakpad  # pylint: disable=W0611
 
+import auth
 import fix_encoding
 import rietveld
 
+THIRD_PARTY_DIR = os.path.join(os.path.dirname(__file__), 'third_party')
+sys.path.insert(0, THIRD_PARTY_DIR)
+
+from cq_client import cq_pb2
+from cq_client import validate_config
+from protobuf26 import text_format
 
 def usage(more):
   def hook(fn):
@@ -36,9 +44,10 @@
 
     def new_parse_args(args=None, values=None):
       options, args = old_parse_args(args, values)
+      auth_config = auth.extract_auth_config_from_options(options)
       if not options.issue:
         parser.error('Require --issue')
-      obj = rietveld.Rietveld(options.server, options.user, None)
+      obj = rietveld.Rietveld(options.server, auth_config, options.user)
       return options, args, obj
 
     parser.parse_args = new_parse_args
@@ -59,6 +68,7 @@
         metavar='S',
         default='http://codereview.chromium.org',
         help='Rietveld server, default: %default')
+    auth.add_auth_options(parser)
 
     # Call the original function with the modified parser.
     return fn(parser, args, *extra_args, **kwargs)
@@ -99,6 +109,60 @@
   return set_commit(obj, options.issue, '0')
 
 
+def CMDbuilders(parser, args):
+  """Prints json-formatted list of builders given a path to cq.cfg file.
+
+  The output is a dictionary in the following format:
+    {
+      'master_name': [
+        'builder_name',
+        'another_builder'
+      ],
+      'another_master': [
+        'third_builder'
+      ]
+    }
+  """
+  _, args = parser.parse_args(args)
+  if len(args) != 1:
+    parser.error('Expected a single path to CQ config. Got: %s' %
+                 ' '.join(args))
+
+  with open(args[0]) as config_file:
+    cq_config = config_file.read()
+
+  config = cq_pb2.Config()
+  text_format.Merge(cq_config, config)
+  masters = {}
+  if config.HasField('verifiers') and config.verifiers.HasField('try_job'):
+    for bucket in config.verifiers.try_job.buckets:
+      masters.setdefault(bucket.name, [])
+      for builder in bucket.builders:
+        if not builder.HasField('experiment_percentage'):
+          masters[bucket.name].append(builder.name)
+  print json.dumps(masters)
+
+CMDbuilders.func_usage_more = '<path-to-cq-config>'
+
+
+def CMDvalidate(parser, args):
+  """Validates a CQ config.
+
+  Takes a single argument - path to the CQ config to be validated. Returns 0 on
+  valid config, non-zero on invalid config. Errors and warnings are printed to
+  screen.
+  """
+  _, args = parser.parse_args(args)
+  if len(args) != 1:
+    parser.error('Expected a single path to CQ config. Got: %s' %
+                 ' '.join(args))
+
+  with open(args[0]) as config_file:
+    cq_config = config_file.read()
+  return 0 if validate_config.IsValid(cq_config) else 1
+
+CMDvalidate.func_usage_more = '<path-to-cq-config>'
+
 ###############################################################################
 ## Boilerplate code
 
@@ -181,4 +245,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/cpplint.py b/cpplint.py
index 3011345..ccc25d4 100755
--- a/cpplint.py
+++ b/cpplint.py
@@ -175,71 +175,77 @@
 # If you add a new error message with a new category, add it to the list
 # here!  cpplint_unittest.py should tell you if you forget to do this.
 _ERROR_CATEGORIES = [
-  'build/class',
-  'build/c++11',
-  'build/deprecated',
-  'build/endif_comment',
-  'build/explicit_make_pair',
-  'build/forward_decl',
-  'build/header_guard',
-  'build/include',
-  'build/include_alpha',
-  'build/include_order',
-  'build/include_what_you_use',
-  'build/namespaces',
-  'build/printf_format',
-  'build/storage_class',
-  'legal/copyright',
-  'readability/alt_tokens',
-  'readability/braces',
-  'readability/casting',
-  'readability/check',
-  'readability/constructors',
-  'readability/fn_size',
-  'readability/function',
-  'readability/inheritance',
-  'readability/multiline_comment',
-  'readability/multiline_string',
-  'readability/namespace',
-  'readability/nolint',
-  'readability/nul',
-  'readability/streams',
-  'readability/todo',
-  'readability/utf8',
-  'runtime/arrays',
-  'runtime/casting',
-  'runtime/explicit',
-  'runtime/int',
-  'runtime/init',
-  'runtime/invalid_increment',
-  'runtime/member_string_references',
-  'runtime/memset',
-  'runtime/indentation_namespace',
-  'runtime/operator',
-  'runtime/printf',
-  'runtime/printf_format',
-  'runtime/references',
-  'runtime/string',
-  'runtime/threadsafe_fn',
-  'runtime/vlog',
-  'whitespace/blank_line',
-  'whitespace/braces',
-  'whitespace/comma',
-  'whitespace/comments',
-  'whitespace/empty_conditional_body',
-  'whitespace/empty_loop_body',
-  'whitespace/end_of_line',
-  'whitespace/ending_newline',
-  'whitespace/forcolon',
-  'whitespace/indent',
-  'whitespace/line_length',
-  'whitespace/newline',
-  'whitespace/operators',
-  'whitespace/parens',
-  'whitespace/semicolon',
-  'whitespace/tab',
-  'whitespace/todo'
-  ]
+    'build/class',
+    'build/c++11',
+    'build/deprecated',
+    'build/endif_comment',
+    'build/explicit_make_pair',
+    'build/forward_decl',
+    'build/header_guard',
+    'build/include',
+    'build/include_alpha',
+    'build/include_order',
+    'build/include_what_you_use',
+    'build/namespaces',
+    'build/printf_format',
+    'build/storage_class',
+    'legal/copyright',
+    'readability/alt_tokens',
+    'readability/braces',
+    'readability/casting',
+    'readability/check',
+    'readability/constructors',
+    'readability/fn_size',
+    'readability/function',
+    'readability/inheritance',
+    'readability/multiline_comment',
+    'readability/multiline_string',
+    'readability/namespace',
+    'readability/nolint',
+    'readability/nul',
+    'readability/strings',
+    'readability/todo',
+    'readability/utf8',
+    'runtime/arrays',
+    'runtime/casting',
+    'runtime/explicit',
+    'runtime/int',
+    'runtime/init',
+    'runtime/invalid_increment',
+    'runtime/member_string_references',
+    'runtime/memset',
+    'runtime/indentation_namespace',
+    'runtime/operator',
+    'runtime/printf',
+    'runtime/printf_format',
+    'runtime/references',
+    'runtime/string',
+    'runtime/threadsafe_fn',
+    'runtime/vlog',
+    'whitespace/blank_line',
+    'whitespace/braces',
+    'whitespace/comma',
+    'whitespace/comments',
+    'whitespace/empty_conditional_body',
+    'whitespace/empty_loop_body',
+    'whitespace/end_of_line',
+    'whitespace/ending_newline',
+    'whitespace/forcolon',
+    'whitespace/indent',
+    'whitespace/line_length',
+    'whitespace/newline',
+    'whitespace/operators',
+    'whitespace/parens',
+    'whitespace/semicolon',
+    'whitespace/tab',
+    'whitespace/todo',
+    ]
+
+# These error categories are no longer enforced by cpplint, but for backwards-
+# compatibility they may still appear in NOLINT comments.
+_LEGACY_ERROR_CATEGORIES = [
+    'readability/streams',
+    ]
 
 # The default state of the category filter. This is overridden by the --filter=
 # flag. By default all errors are on, so only add here categories that should be
@@ -522,7 +528,7 @@
         category = category[1:-1]
         if category in _ERROR_CATEGORIES:
           _error_suppressions.setdefault(category, set()).add(suppressed_line)
-        else:
+        elif category not in _LEGACY_ERROR_CATEGORIES:
           error(filename, linenum, 'readability/nolint', 5,
                 'Unknown NOLINT error category: %s' % category)
 
@@ -690,7 +696,7 @@
     # If previous line was a blank line, assume that the headers are
     # intentionally sorted the way they are.
     if (self._last_header > header_path and
-        not Match(r'^\s*$', clean_lines.elided[linenum - 1])):
+        Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
       return False
     return True
 
@@ -1246,7 +1252,7 @@
   # Having // dummy comments makes the lines non-empty, so we will not get
   # unnecessary blank line warnings later in the code.
   for i in range(begin, end):
-    lines[i] = '// dummy'
+    lines[i] = '/**/'
 
 
 def RemoveMultiLineComments(filename, lines, error):
@@ -1282,12 +1288,14 @@
 
 
 class CleansedLines(object):
-  """Holds 3 copies of all lines with different preprocessing applied to them.
+  """Holds 4 copies of all lines with different preprocessing applied to them.
 
-  1) elided member contains lines without strings and comments,
-  2) lines member contains lines without comments, and
+  1) elided member contains lines without strings and comments.
+  2) lines member contains lines without comments.
   3) raw_lines member contains all the lines without processing.
-  All these three members are of <type 'list'>, and of the same length.
+  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
+     strings removed.
+  All these members are of <type 'list'>, and of the same length.
   """
 
   def __init__(self, lines):
@@ -1656,15 +1664,17 @@
   # flymake.
   filename = re.sub(r'_flymake\.h$', '.h', filename)
   filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
-
+  # Replace 'c++' with 'cpp'.
+  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
+  
   fileinfo = FileInfo(filename)
   file_path_from_root = fileinfo.RepositoryName()
   if _root:
     file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root)
-  return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
+  return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
 
 
-def CheckForHeaderGuard(filename, lines, error):
+def CheckForHeaderGuard(filename, clean_lines, error):
   """Checks that the file contains a header guard.
 
   Logs an error if no #ifndef header guard is present.  For other
@@ -1672,7 +1682,7 @@
 
   Args:
     filename: The name of the C++ header file.
-    lines: An array of strings, each representing a line of the file.
+    clean_lines: A CleansedLines instance containing the file.
     error: The function to call with any errors found.
   """
 
@@ -1682,18 +1692,19 @@
   # Because this is silencing a warning for a nonexistent line, we
   # only support the very specific NOLINT(build/header_guard) syntax,
   # and not the general NOLINT or NOLINT(*) syntax.
-  for i in lines:
+  raw_lines = clean_lines.lines_without_raw_strings
+  for i in raw_lines:
     if Search(r'//\s*NOLINT\(build/header_guard\)', i):
       return
 
   cppvar = GetHeaderGuardCPPVariable(filename)
 
-  ifndef = None
+  ifndef = ''
   ifndef_linenum = 0
-  define = None
-  endif = None
+  define = ''
+  endif = ''
   endif_linenum = 0
-  for linenum, line in enumerate(lines):
+  for linenum, line in enumerate(raw_lines):
     linesplit = line.split()
     if len(linesplit) >= 2:
       # find the first occurrence of #ifndef and #define, save arg
@@ -1708,18 +1719,12 @@
       endif = line
       endif_linenum = linenum
 
-  if not ifndef:
+  if not ifndef or not define or ifndef != define:
     error(filename, 0, 'build/header_guard', 5,
           'No #ifndef header guard found, suggested CPP variable is: %s' %
           cppvar)
     return
 
-  if not define:
-    error(filename, 0, 'build/header_guard', 5,
-          'No #define header guard found, suggested CPP variable is: %s' %
-          cppvar)
-    return
-
   # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
   # for backward compatibility.
   if ifndef != cppvar:
@@ -1727,26 +1732,69 @@
     if ifndef != cppvar + '_':
       error_level = 5
 
-    ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum,
+    ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
                             error)
     error(filename, ifndef_linenum, 'build/header_guard', error_level,
           '#ifndef header guard has wrong style, please use: %s' % cppvar)
 
-  if define != ifndef:
-    error(filename, 0, 'build/header_guard', 5,
-          '#ifndef and #define don\'t match, suggested CPP variable is: %s' %
-          cppvar)
+  # Check for "//" comments on endif line.
+  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
+                          error)
+  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
+  if match:
+    if match.group(1) == '_':
+      # Issue low severity warning for deprecated double trailing underscore
+      error(filename, endif_linenum, 'build/header_guard', 0,
+            '#endif line should be "#endif  // %s"' % cppvar)
     return
 
-  if endif != ('#endif  // %s' % cppvar):
-    error_level = 0
-    if endif != ('#endif  // %s' % (cppvar + '_')):
-      error_level = 5
+  # Didn't find the corresponding "//" comment.  If this file does not
+  # contain any "//" comments at all, it could be that the compiler
+  # only wants "/**/" comments, look for those instead.
+  no_single_line_comments = True
+  for i in xrange(1, len(raw_lines) - 1):
+    line = raw_lines[i]
+    if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
+      no_single_line_comments = False
+      break
 
-    ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
-                            error)
-    error(filename, endif_linenum, 'build/header_guard', error_level,
-          '#endif line should be "#endif  // %s"' % cppvar)
+  if no_single_line_comments:
+    match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
+    if match:
+      if match.group(1) == '_':
+        # Low severity warning for double trailing underscore
+        error(filename, endif_linenum, 'build/header_guard', 0,
+              '#endif line should be "#endif  /* %s */"' % cppvar)
+      return
+
+  # Didn't find anything
+  error(filename, endif_linenum, 'build/header_guard', 5,
+        '#endif line should be "#endif  // %s"' % cppvar)
+
+
+def CheckHeaderFileIncluded(filename, include_state, error):
+  """Logs an error if a .cc file does not include its header."""
+
+  # Do not check test files
+  if filename.endswith('_test.cc') or filename.endswith('_unittest.cc'):
+    return
+
+  fileinfo = FileInfo(filename)
+  headerfile = filename[0:len(filename) - 2] + 'h'
+  if not os.path.exists(headerfile):
+    return
+  headername = FileInfo(headerfile).RepositoryName()
+  first_include = 0
+  for section_list in include_state.include_list:
+    for f in section_list:
+      if headername in f[0] or f[0] in headername:
+        return
+      if not first_include:
+        first_include = f[1]
+
+  error(filename, first_include, 'build/include', 5,
+        '%s should include its header file %s' % (fileinfo.RepositoryName(),
+                                                  headername))
 
 
 def CheckForBadCharacters(filename, lines, error):
@@ -2042,6 +2090,23 @@
       self.is_derived = True
 
   def CheckEnd(self, filename, clean_lines, linenum, error):
+    # If there is a DISALLOW macro, it should appear near the end of
+    # the class.
+    seen_last_thing_in_class = False
+    for i in xrange(linenum - 1, self.starting_linenum, -1):
+      match = Search(
+          r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
+          self.name + r'\)',
+          clean_lines.elided[i])
+      if match:
+        if seen_last_thing_in_class:
+          error(filename, i, 'readability/constructors', 3,
+                match.group(1) + ' should be the last thing in the class')
+        break
+
+      if not Match(r'^\s*$', clean_lines.elided[i]):
+        seen_last_thing_in_class = True
+
     # Check that closing brace is aligned with beginning of the class.
     # Only do this if the closing brace is indented by only whitespaces.
     # This means we will not check single-line class definitions.
@@ -2722,7 +2787,8 @@
             'Extra space after (')
     if (Search(r'\w\s+\(', fncall) and
         not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
-        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall)):
+        not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
+        not Search(r'\bcase\s+\(', fncall)):
       # TODO(unknown): Space after an operator function seem to be a common
       # error, silence those for now by restricting them to highest verbosity.
       if Search(r'\boperator_*\b', line):
@@ -2892,11 +2958,14 @@
                 'TODO(my_username) should be followed by a space')
 
       # If the comment contains an alphanumeric character, there
-      # should be a space somewhere between it and the //.
-      if Match(r'//[^ ]*\w', comment):
+      # should be a space somewhere between it and the // unless
+      # it's a /// or //! Doxygen comment.
+      if (Match(r'//[^ ]*\w', comment) and
+          not Match(r'(///|//\!)(\s+|$)', comment)):
         error(filename, linenum, 'whitespace/comments', 4,
               'Should have a space between // and comment')
 
+
 def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
   """Checks for improper use of DISALLOW* macros.
 
@@ -3083,7 +3152,12 @@
   # Otherwise not.  Note we only check for non-spaces on *both* sides;
   # sometimes people put non-spaces on one side when aligning ='s among
   # many lines (not that this is behavior that I approve of...)
-  if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
+  if ((Search(r'[\w.]=', line) or
+       Search(r'=[\w.]', line))
+      and not Search(r'\b(if|while|for) ', line)
+      # Operators taken from [lex.operators] in C++11 standard.
+      and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
+      and not Search(r'operator=', line)):
     error(filename, linenum, 'whitespace/operators', 4,
           'Missing spaces around =')
 
@@ -3135,9 +3209,8 @@
   #
   # We also allow operators following an opening parenthesis, since
   # those tend to be macros that deal with operators.
-  match = Search(r'(operator|\S)(?:L|UL|ULL|l|ul|ull)?<<([^\s,=])', line)
-  if (match and match.group(1) != '(' and
-      not (match.group(1).isdigit() and match.group(2).isdigit()) and
+  match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])', line)
+  if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
       not (match.group(1) == 'operator' and match.group(2) == ';')):
     error(filename, linenum, 'whitespace/operators', 3,
           'Missing spaces around <<')
@@ -3255,7 +3328,7 @@
   # an initializer list, for instance), you should have spaces before your
   # braces. And since you should never have braces at the beginning of a line,
   # this is an easy test.
-  match = Match(r'^(.*[^ ({]){', line)
+  match = Match(r'^(.*[^ ({>]){', line)
   if match:
     # Try a bit harder to check for brace initialization.  This
     # happens in one of the following forms:
@@ -3355,13 +3428,14 @@
   return False
 
 
-def IsRValueType(clean_lines, nesting_state, linenum, column):
+def IsRValueType(typenames, clean_lines, nesting_state, linenum, column):
   """Check if the token ending on (linenum, column) is a type.
 
   Assumes that text to the right of the column is "&&" or a function
   name.
 
   Args:
+    typenames: set of type names from template-argument-list.
     clean_lines: A CleansedLines instance containing the file.
     nesting_state: A NestingState instance which maintains information about
                    the current stack of nested blocks being parsed.
@@ -3385,7 +3459,7 @@
   if Match(r'&&\s*(?:[>,]|\.\.\.)', suffix):
     return True
 
-  # Check for simple type and end of templates:
+  # Check for known types and end of templates:
   #   int&& variable
   #   vector<int>&& variable
   #
@@ -3393,9 +3467,10 @@
   # recognize pointer and reference types:
   #   int* Function()
   #   int& Function()
-  if match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
-                        'short', 'int', 'long', 'signed', 'unsigned',
-                        'float', 'double', 'void', 'auto', '>', '*', '&']:
+  if (match.group(2) in typenames or
+      match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool',
+                         'short', 'int', 'long', 'signed', 'unsigned',
+                         'float', 'double', 'void', 'auto', '>', '*', '&']):
     return True
 
   # If we see a close parenthesis, look for decltype on the other side.
@@ -3528,7 +3603,7 @@
 
     # Something else.  Check that tokens to the left look like
     #   return_type function_name
-    match_func = Match(r'^(.*)\s+\w(?:\w|::)*(?:<[^<>]*>)?\s*$',
+    match_func = Match(r'^(.*\S.*)\s+\w(?:\w|::)*(?:<[^<>]*>)?\s*$',
                        match_symbol.group(1))
     if match_func:
       # Check for constructors, which don't have return types.
@@ -3538,7 +3613,7 @@
       if (implicit_constructor and
           implicit_constructor.group(1) == implicit_constructor.group(2)):
         return True
-      return IsRValueType(clean_lines, nesting_state, linenum,
+      return IsRValueType(typenames, clean_lines, nesting_state, linenum,
                           len(match_func.group(1)))
 
     # Nothing before the function name.  If this is inside a block scope,
@@ -3576,12 +3651,13 @@
   return Match(r'\s*=\s*(?:delete|default)\b', close_line[close_paren:])
 
 
-def IsRValueAllowed(clean_lines, linenum):
+def IsRValueAllowed(clean_lines, linenum, typenames):
   """Check if RValue reference is allowed on a particular line.
 
   Args:
     clean_lines: A CleansedLines instance containing the file.
     linenum: The number of the line to check.
+    typenames: set of type names from template-argument-list.
   Returns:
     True if line is within the region where RValue references are allowed.
   """
@@ -3602,7 +3678,7 @@
     return IsDeletedOrDefault(clean_lines, linenum)
 
   # Allow constructors
-  match = Match(r'\s*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line)
+  match = Match(r'\s*(?:[\w<>]+::)*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line)
   if match and match.group(1) == match.group(2):
     return IsDeletedOrDefault(clean_lines, linenum)
   if Search(r'\b(?:explicit|inline)\s+[\w<>]+\s*\(', line):
@@ -3615,7 +3691,86 @@
     if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$', previous_line):
       return IsDeletedOrDefault(clean_lines, linenum)
 
-  return False
+  # Reject types not mentioned in template-argument-list
+  while line:
+    match = Match(r'^.*?(\w+)\s*&&(.*)$', line)
+    if not match:
+      break
+    if match.group(1) not in typenames:
+      return False
+    line = match.group(2)
+
+  # All RValue types that were in template-argument-list should have
+  # been removed by now.  Those were allowed, assuming that they will
+  # be forwarded.
+  #
+  # If there are no remaining RValue types left (i.e. types that were
+  # not found in template-argument-list), flag those as not allowed.
+  return line.find('&&') < 0
+
+
+def GetTemplateArgs(clean_lines, linenum):
+  """Find list of template arguments associated with this function declaration.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: Line number containing the start of the function declaration,
+             usually one line after the end of the template-argument-list.
+  Returns:
+    Set of type names, or empty set if this does not appear to have
+    any template parameters.
+  """
+  # Find start of function
+  func_line = linenum
+  while func_line > 0:
+    line = clean_lines.elided[func_line]
+    if Match(r'^\s*$', line):
+      return set()
+    if line.find('(') >= 0:
+      break
+    func_line -= 1
+  if func_line == 0:
+    return set()
+
+  # Collapse template-argument-list into a single string
+  argument_list = ''
+  match = Match(r'^(\s*template\s*)<', clean_lines.elided[func_line])
+  if match:
+    # template-argument-list on the same line as function name
+    start_col = len(match.group(1))
+    _, end_line, end_col = CloseExpression(clean_lines, func_line, start_col)
+    if end_col > -1 and end_line == func_line:
+      start_col += 1  # Skip the opening bracket
+      argument_list = clean_lines.elided[func_line][start_col:end_col]
+
+  elif func_line > 1:
+    # template-argument-list one line before function name
+    match = Match(r'^(.*)>\s*$', clean_lines.elided[func_line - 1])
+    if match:
+      end_col = len(match.group(1))
+      _, start_line, start_col = ReverseCloseExpression(
+          clean_lines, func_line - 1, end_col)
+      if start_col > -1:
+        start_col += 1  # Skip the opening bracket
+        while start_line < func_line - 1:
+          argument_list += clean_lines.elided[start_line][start_col:]
+          start_col = 0
+          start_line += 1
+        argument_list += clean_lines.elided[func_line - 1][start_col:end_col]
+
+  if not argument_list:
+    return set()
+
+  # Extract type names
+  typenames = set()
+  while True:
+    match = Match(r'^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$',
+                  argument_list)
+    if not match:
+      break
+    typenames.add(match.group(1))
+    argument_list = match.group(2)
+  return typenames
 
 
 def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error):
@@ -3643,9 +3798,10 @@
   # Either poorly formed && or an rvalue reference, check the context
   # to get a more accurate error message.  Mostly we want to determine
   # if what's to the left of "&&" is a type or not.
+  typenames = GetTemplateArgs(clean_lines, linenum)
   and_pos = len(match.group(1))
-  if IsRValueType(clean_lines, nesting_state, linenum, and_pos):
-    if not IsRValueAllowed(clean_lines, linenum):
+  if IsRValueType(typenames, clean_lines, nesting_state, linenum, and_pos):
+    if not IsRValueAllowed(clean_lines, linenum, typenames):
       error(filename, linenum, 'build/c++11', 3,
             'RValue references are an unapproved C++ feature.')
   else:
@@ -3926,8 +4082,10 @@
     # semicolons, while the downside for getting the blacklist wrong
     # would result in compile errors.
     #
-    # In addition to macros, we also don't want to warn on compound
-    # literals and lambdas.
+    # In addition to macros, we also don't want to warn on
+    #  - Compound literals
+    #  - Lambdas
+    #  - alignas specifier with anonymous structs:
     closing_brace_pos = match.group(1).rfind(')')
     opening_parenthesis = ReverseCloseExpression(
         clean_lines, linenum, closing_brace_pos)
@@ -3941,6 +4099,7 @@
                'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
                'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
           (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
+          Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
           Search(r'\s+=\s*$', line_prefix)):
         match = None
     if (match and
@@ -4484,6 +4643,10 @@
       error(filename, linenum, 'build/include', 4,
             '"%s" already included at %s:%s' %
             (include, filename, duplicate_line))
+    elif (include.endswith('.cc') and
+          os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
+      error(filename, linenum, 'build/include', 4,
+            'Do not include .cc files from other packages')
     elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
       include_state.include_list[-1].append((include, linenum))
 
@@ -4511,20 +4674,6 @@
               'Include "%s" not in alphabetical order' % include)
       include_state.SetLastHeader(canonical_include)
 
-  # Look for any of the stream classes that are part of standard C++.
-  match = _RE_PATTERN_INCLUDE.match(line)
-  if match:
-    include = match.group(2)
-    if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
-      # Many unit tests use cout, so we exempt them.
-      if not _IsTestFilename(filename):
-        # Suggest a different header for ostream
-        if include == 'ostream':
-          error(filename, linenum, 'readability/streams', 3,
-                'For logging, include "base/logging.h" instead of <ostream>.')
-        else:
-          error(filename, linenum, 'readability/streams', 3,
-                'Streams are highly discouraged.')
 
 
 def _GetTextInside(text, start_pattern):
@@ -4755,25 +4904,6 @@
             'Do not use variable-length arrays.  Use an appropriately named '
             "('k' followed by CamelCase) compile-time constant for the size.")
 
-  # If DISALLOW_COPY_AND_ASSIGN DISALLOW_IMPLICIT_CONSTRUCTORS is present,
-  # then it should be the last thing in the class declaration.
-  match = Match(
-      (r'\s*'
-       r'(DISALLOW_(COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS))'
-       r'\(.*\);$'),
-      line)
-  if match and linenum + 1 < clean_lines.NumLines():
-    next_line = clean_lines.elided[linenum + 1]
-    # We allow some, but not all, declarations of variables to be present
-    # in the statement that defines the class.  The [\w\*,\s]* fragment of
-    # the regular expression below allows users to declare instances of
-    # the class or pointers to instances, but not less common types such
-    # as function pointers or arrays.  It's a tradeoff between allowing
-    # reasonable code and avoiding trying to parse more C++ using regexps.
-    if not Search(r'^\s*}[\w\*,\s]*;', next_line):
-      error(filename, linenum, 'readability/constructors', 3,
-            match.group(1) + ' should be the last thing in the class')
-
   # Check for use of unnamed namespaces in header files.  Registration
   # macros are typically OK, so we allow use of "namespace {" on lines
   # that end with backslashes.
@@ -4889,6 +5019,22 @@
   return False
 
 
+def IsOutOfLineMethodDefinition(clean_lines, linenum):
+  """Check if current line contains an out-of-line method definition.
+
+  Args:
+    clean_lines: A CleansedLines instance containing the file.
+    linenum: The number of the line to check.
+  Returns:
+    True if current line contains an out-of-line method definition.
+  """
+  # Scan back a few lines for start of current function
+  for i in xrange(linenum, max(-1, linenum - 10), -1):
+    if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
+      return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
+  return False
+
+
 def IsInitializerList(clean_lines, linenum):
   """Check if current line is inside constructor initializer list.
 
@@ -4957,6 +5103,11 @@
   if IsDerivedFunction(clean_lines, linenum):
     return
 
+  # Don't warn on out-of-line method definitions, as we would warn on the
+  # in-line declaration, if it isn't marked with 'override'.
+  if IsOutOfLineMethodDefinition(clean_lines, linenum):
+    return
+
   # Long type names may be broken across multiple lines, usually in one
   # of these forms:
   #   LongType
@@ -5152,9 +5303,9 @@
   # This is not a cast:
   #   reference_type&(int* function_param);
   match = Search(
-      r'(?:[^\w]&\(([^)]+)\)[\w(])|'
+      r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
       r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
-  if match and match.group(1) != '*':
+  if match:
     # Try a better error message when the & is bound to something
     # dereferenced by the casted pointer, as opposed to the casted
     # pointer itself.
@@ -5235,6 +5386,7 @@
   #   ExceptionMember(int) throw (...);
   #   ExceptionMember(int) throw (...) {
   #   PureVirtual(int) = 0;
+  #   [](int) -> bool {
   #
   # These are functions of some sort, where the compiler would be fine
   # if they had named parameters, but people often omit those
@@ -5246,7 +5398,7 @@
   #   <TemplateArgument(int)>;
   #   <(FunctionPointerTemplateArgument)(int)>;
   remainder = line[match.end(0):]
-  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),])',
+  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
            remainder):
     # Looks like an unnamed parameter.
 
@@ -5335,6 +5487,7 @@
     ('<set>', ('set', 'multiset',)),
     ('<stack>', ('stack',)),
     ('<string>', ('char_traits', 'basic_string',)),
+    ('<tuple>', ('tuple',)),
     ('<utility>', ('pair',)),
     ('<vector>', ('vector',)),
 
@@ -5602,9 +5755,21 @@
   """
   # Look for "virtual" on current line.
   line = clean_lines.elided[linenum]
-  virtual = Match(r'^(.*\bvirtual\b)', line)
+  virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
   if not virtual: return
 
+  # Ignore "virtual" keywords that are near access-specifiers.  These
+  # are only used in class base-specifier and do not apply to member
+  # functions.
+  if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
+      Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
+    return
+
+  # Ignore the "virtual" keyword from virtual base classes.  Usually
+  # there is a column on the same line in these cases (virtual base
+  # classes are rare in google3 because multiple inheritance is rare).
+  if Match(r'^.*[^:]:[^:].*$', line): return
+
   # Look for the next opening parenthesis.  This is the start of the
   # parameter list (possibly on the next line shortly after virtual).
   # TODO(unknown): doesn't work if there are virtual functions with
@@ -5612,7 +5777,7 @@
   # that this is rare.
   end_col = -1
   end_line = -1
-  start_col = len(virtual.group(1))
+  start_col = len(virtual.group(2))
   for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
     line = clean_lines.elided[start_line][start_col:]
     parameter_list = Match(r'^([^(]*)\(', line)
@@ -5652,9 +5817,21 @@
     linenum: The number of the line to check.
     error: The function to call with any errors found.
   """
-  # Check that at most one of "override" or "final" is present, not both
+  # Look for closing parenthesis nearby.  We need one to confirm where
+  # the declarator ends and where the virt-specifier starts to avoid
+  # false positives.
   line = clean_lines.elided[linenum]
-  if Search(r'\boverride\b', line) and Search(r'\bfinal\b', line):
+  declarator_end = line.rfind(')')
+  if declarator_end >= 0:
+    fragment = line[declarator_end:]
+  else:
+    if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
+      fragment = line
+    else:
+      return
+
+  # Check that at most one of "override" or "final" is present, not both
+  if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
     error(filename, linenum, 'readability/inheritance', 4,
           ('"override" is redundant since function is '
            'already declared as "final"'))
@@ -5809,9 +5986,6 @@
       # type_traits
       'alignment_of',
       'aligned_union',
-
-      # utility
-      'forward',
       ):
     if Search(r'\bstd::%s\b' % top_name, line):
       error(filename, linenum, 'build/c++11', 5,
@@ -5846,11 +6020,12 @@
 
   CheckForCopyright(filename, lines, error)
 
-  if file_extension == 'h':
-    CheckForHeaderGuard(filename, lines, error)
-
   RemoveMultiLineComments(filename, lines, error)
   clean_lines = CleansedLines(lines)
+
+  if file_extension == 'h':
+    CheckForHeaderGuard(filename, clean_lines, error)
+
   for line in xrange(clean_lines.NumLines()):
     ProcessLine(filename, file_extension, clean_lines, line,
                 include_state, function_state, nesting_state, error,
@@ -5859,6 +6034,10 @@
   nesting_state.CheckCompletedBlocks(filename, error)
 
   CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
+  
+  # Check that the .cc file has included its header if it exists.
+  if file_extension == 'cc':
+    CheckHeaderFileIncluded(filename, include_state, error)
 
   # We check here rather than inside ProcessLine so that we see raw
   # lines rather than "cleaned" lines.
diff --git a/dart_format.py b/dart_format.py
new file mode 100755
index 0000000..ee07efe
--- /dev/null
+++ b/dart_format.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Redirects to the version of dartfmt checked into a gclient repo.
+
+dartfmt binaries are pulled down during gclient sync in the mojo repo.
+
+This tool is named dart_format.py instead of dartfmt to parallel
+clang_format.py, which is in this same repository."""
+
+import os
+import subprocess
+import sys
+
+import gclient_utils
+
+class NotFoundError(Exception):
+  """A file could not be found."""
+  def __init__(self, e):
+    Exception.__init__(self,
+        'Problem while looking for dartfmt in Chromium source tree:\n'
+        '  %s' % e)
+
+
+def FindDartFmtToolInChromiumTree():
+  """Return a path to the dartfmt executable, or die trying."""
+  primary_solution_path = gclient_utils.GetPrimarySolutionPath()
+  if not primary_solution_path:
+    raise NotFoundError(
+        'Could not find checkout in any parent of the current path.')
+
+  dartfmt_path = os.path.join(primary_solution_path, 'third_party', 'dart-sdk',
+                              'dart-sdk', 'bin', 'dartfmt')
+  if not os.path.exists(dartfmt_path):
+    raise NotFoundError('File does not exist: %s' % dartfmt_path)
+  return dartfmt_path
+
+
+def main(args):
+  try:
+    tool = FindDartFmtToolInChromiumTree()
+  except NotFoundError, e:
+    print >> sys.stderr, e
+    sys.exit(1)
+
+  # Add some visibility to --help showing where the tool lives, since this
+  # redirection can be a little opaque.
+  help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
+  if any(match in args for match in help_syntax):
+    print '\nDepot tools redirects you to the dartfmt at:\n    %s\n' % tool
+
+  return subprocess.call([tool] + sys.argv[1:])
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/depot-tools-auth b/depot-tools-auth
new file mode 100755
index 0000000..9233c92
--- /dev/null
+++ b/depot-tools-auth
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/depot-tools-auth.py" "$@"
diff --git a/depot-tools-auth.bat b/depot-tools-auth.bat
new file mode 100644
index 0000000..fe13f93
--- /dev/null
+++ b/depot-tools-auth.bat
@@ -0,0 +1,11 @@
+@echo off

+:: Copyright 2015 The Chromium Authors. All rights reserved.

+:: Use of this source code is governed by a BSD-style license that can be

+:: found in the LICENSE file.

+setlocal

+

+:: This is required with cygwin only.

+PATH=%~dp0;%PATH%

+

+:: Defer control.

+%~dp0python "%~dp0\depot-tools-auth.py" %*

diff --git a/depot-tools-auth.py b/depot-tools-auth.py
new file mode 100755
index 0000000..3ebc239
--- /dev/null
+++ b/depot-tools-auth.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manages cached OAuth2 tokens used by other depot_tools scripts.
+
+Usage:
+  depot-tools-auth login codereview.chromium.org
+  depot-tools-auth info codereview.chromium.org
+  depot-tools-auth logout codereview.chromium.org
+"""
+
+import logging
+import optparse
+import sys
+
+from third_party import colorama
+
+import auth
+import subcommand
+
+__version__ = '1.0'
+
+
+@subcommand.usage('<hostname>')
+def CMDlogin(parser, args):
+  """Performs interactive login and caches authentication token."""
+  # Forcefully relogin, revoking previous token.
+  hostname, authenticator = parser.parse_args(args)
+  authenticator.logout()
+  authenticator.login()
+  print_token_info(hostname, authenticator)
+  return 0
+
+
+@subcommand.usage('<hostname>')
+def CMDlogout(parser, args):
+  """Revokes cached authentication token and removes it from disk."""
+  _, authenticator = parser.parse_args(args)
+  done = authenticator.logout()
+  print 'Done.' if done else 'Already logged out.'
+  return 0
+
+
+@subcommand.usage('<hostname>')
+def CMDinfo(parser, args):
+  """Shows email associated with a cached authentication token."""
+  # If no token is cached, AuthenticationError will be caught in 'main'.
+  hostname, authenticator = parser.parse_args(args)
+  print_token_info(hostname, authenticator)
+  return 0
+
+
+def print_token_info(hostname, authenticator):
+  token_info = authenticator.get_token_info()
+  print 'Logged in to %s as %s.' % (hostname, token_info['email'])
+  print ''
+  print 'To login with a different email run:'
+  print '  depot-tools-auth login %s' % hostname
+  print 'To logout and purge the authentication token run:'
+  print '  depot-tools-auth logout %s' % hostname
+
+
+class OptionParser(optparse.OptionParser):
+  def __init__(self, *args, **kwargs):
+    optparse.OptionParser.__init__(
+        self, *args, prog='depot-tools-auth', version=__version__, **kwargs)
+    self.add_option(
+        '-v', '--verbose', action='count', default=0,
+        help='Use 2 times for more debugging info')
+    auth.add_auth_options(self, auth.make_auth_config(use_oauth2=True))
+
+  def parse_args(self, args=None, values=None):
+    """Parses options and returns (hostname, auth.Authenticator object)."""
+    options, args = optparse.OptionParser.parse_args(self, args, values)
+    levels = [logging.WARNING, logging.INFO, logging.DEBUG]
+    logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
+    auth_config = auth.extract_auth_config_from_options(options)
+    if len(args) != 1:
+      self.error('Expecting single argument (hostname).')
+    if not auth_config.use_oauth2:
+      self.error('This command is only usable with OAuth2 authentication')
+    return args[0], auth.get_authenticator_for_host(args[0], auth_config)
+
+
+def main(argv):
+  dispatcher = subcommand.CommandDispatcher(__name__)
+  try:
+    return dispatcher.execute(OptionParser(), argv)
+  except auth.AuthenticationError as e:
+    print >> sys.stderr, e
+    return 1
+
+
+if __name__ == '__main__':
+  colorama.init()
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/download_from_google_storage.py b/download_from_google_storage.py
index 8370515..44cc3a7 100755
--- a/download_from_google_storage.py
+++ b/download_from_google_storage.py
@@ -20,8 +20,7 @@
 
 
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
 # Maps sys.platform to what we actually want to call them.
 PLATFORM_MAPPING = {
     'cygwin': 'win',
@@ -55,13 +54,13 @@
 class Gsutil(object):
   """Call gsutil with some predefined settings.  This is a convenience object,
   and is also immutable."""
-  def __init__(self, path, boto_path, timeout=None, bypass_prodaccess=False):
+  def __init__(self, path, boto_path=None, timeout=None, version='4.7'):
     if not os.path.exists(path):
       raise FileNotFoundError('GSUtil not found in %s' % path)
     self.path = path
     self.timeout = timeout
     self.boto_path = boto_path
-    self.bypass_prodaccess = bypass_prodaccess
+    self.version = version
 
   def get_sub_env(self):
     env = os.environ.copy()
@@ -71,25 +70,16 @@
     elif self.boto_path:
       env['AWS_CREDENTIAL_FILE'] = self.boto_path
       env['BOTO_CONFIG'] = self.boto_path
-    else:
-      custompath = env.get('AWS_CREDENTIAL_FILE', '~/.boto') + '.depot_tools'
-      custompath = os.path.expanduser(custompath)
-      if os.path.exists(custompath):
-        env['AWS_CREDENTIAL_FILE'] = custompath
 
     return env
 
   def call(self, *args):
-    cmd = [sys.executable, self.path]
-    if self.bypass_prodaccess:
-      cmd.append('--bypass_prodaccess')
+    cmd = [sys.executable, self.path, '--force-version', self.version]
     cmd.extend(args)
     return subprocess2.call(cmd, env=self.get_sub_env(), timeout=self.timeout)
 
   def check_call(self, *args):
-    cmd = [sys.executable, self.path]
-    if self.bypass_prodaccess:
-      cmd.append('--bypass_prodaccess')
+    cmd = [sys.executable, self.path, '--force-version', self.version]
     cmd.extend(args)
     ((out, err), code) = subprocess2.communicate(
         cmd,
@@ -105,28 +95,11 @@
     if ('You are attempting to access protected data with '
           'no configured credentials.' in err):
       return (403, out, err)
-    if 'No such object' in err:
+    if 'matched no objects' in err:
       return (404, out, err)
     return (code, out, err)
 
 
-def check_bucket_permissions(bucket, gsutil):
-  if not bucket:
-    print >> sys.stderr, 'Missing bucket %s.'
-    return (None, 1)
-  base_url = 'gs://%s' % bucket
-
-  code, _, ls_err = gsutil.check_call('ls', base_url)
-  if code != 0:
-    print >> sys.stderr, ls_err
-  if code == 403:
-    print >> sys.stderr, 'Got error 403 while authenticating to %s.' % base_url
-    print >> sys.stderr, 'Try running "download_from_google_storage --config".'
-  elif code == 404:
-    print >> sys.stderr, '%s not found.' % base_url
-  return (base_url, code)
-
-
 def check_platform(target):
   """Checks if any parent directory of target matches (win|mac|linux)."""
   assert os.path.isabs(target)
@@ -163,8 +136,7 @@
     with open(input_filename, 'rb') as f:
       sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
       if sha1_match:
-        work_queue.put(
-            (sha1_match.groups(1)[0], input_filename.replace('.sha1', '')))
+        work_queue.put((sha1_match.groups(1)[0], output))
         return 1
     if not ignore_errors:
       raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
@@ -229,19 +201,41 @@
         continue
     # Check if file exists.
     file_url = '%s/%s' % (base_url, input_sha1_sum)
-    if gsutil.check_call('ls', file_url)[0] != 0:
-      out_q.put('%d> File %s for %s does not exist, skipping.' % (
-          thread_num, file_url, output_filename))
-      ret_codes.put((1, 'File %s for %s does not exist.' % (
-          file_url, output_filename)))
+    (code, _, err) = gsutil.check_call('ls', file_url)
+    if code != 0:
+      if code == 404:
+        out_q.put('%d> File %s for %s does not exist, skipping.' % (
+            thread_num, file_url, output_filename))
+        ret_codes.put((1, 'File %s for %s does not exist.' % (
+            file_url, output_filename)))
+      else:
+        # Other error, probably auth related (bad ~/.boto, etc).
+        out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % (
+            thread_num, file_url, output_filename, err))
+        ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % (
+            file_url, output_filename, err)))
       continue
     # Fetch the file.
-    out_q.put('%d> Downloading %s...' % (
-        thread_num, output_filename))
-    code, _, err = gsutil.check_call('cp', '-q', file_url, output_filename)
+    out_q.put('%d> Downloading %s...' % (thread_num, output_filename))
+    try:
+      os.remove(output_filename)  # Delete the file if it exists already.
+    except OSError:
+      if os.path.exists(output_filename):
+        out_q.put('%d> Warning: deleting %s failed.' % (
+            thread_num, output_filename))
+    code, _, err = gsutil.check_call('cp', file_url, output_filename)
     if code != 0:
       out_q.put('%d> %s' % (thread_num, err))
       ret_codes.put((code, err))
+      continue
+
+    remote_sha1 = get_sha1(output_filename)
+    if remote_sha1 != input_sha1_sum:
+      msg = ('%d> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
+             (thread_num, remote_sha1, input_sha1_sum))
+      out_q.put(msg)
+      ret_codes.put((20, msg))
+      continue
 
     # Set executable bit.
     if sys.platform == 'cygwin':
@@ -254,15 +248,11 @@
     elif sys.platform != 'win32':
       # On non-Windows platforms, key off of the custom header
       # "x-goog-meta-executable".
-      #
-      # TODO(hinoka): It is supposedly faster to use "gsutil stat" but that
-      # doesn't appear to be supported by the gsutil currently in our tree. When
-      # we update, this code should use that instead of "gsutil ls -L".
-      code, out, _ = gsutil.check_call('ls', '-L', file_url)
+      code, out, _ = gsutil.check_call('stat', file_url)
       if code != 0:
         out_q.put('%d> %s' % (thread_num, err))
         ret_codes.put((code, err))
-      elif re.search('x-goog-meta-executable:', out):
+      elif re.search(r'executable:\s*1', out):
         st = os.stat(output_filename)
         os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
 
@@ -394,21 +384,34 @@
 
   # Set the boto file to /dev/null if we don't need auth.
   if options.no_auth:
-    options.boto = os.devnull
+    if (set(('http_proxy', 'https_proxy')).intersection(
+        env.lower() for env in os.environ) and
+        'NO_AUTH_BOTO_CONFIG' not in os.environ):
+      print >> sys.stderr, ('NOTICE: You have PROXY values set in your '
+                            'environment, but gsutil in depot_tools does not '
+                            '(yet) obey them.')
+      print >> sys.stderr, ('Also, --no_auth prevents the normal BOTO_CONFIG '
+                            'environment variable from being used.')
+      print >> sys.stderr, ('To use a proxy in this situation, please supply '
+                            'those settings in a .boto file pointed to by '
+                            'the NO_AUTH_BOTO_CONFIG environment var.')
+    options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
 
   # Make sure gsutil exists where we expect it to.
   if os.path.exists(GSUTIL_DEFAULT_PATH):
     gsutil = Gsutil(GSUTIL_DEFAULT_PATH,
-                    boto_path=options.boto,
-                    bypass_prodaccess=options.no_auth)
+                    boto_path=options.boto)
   else:
     parser.error('gsutil not found in %s, bad depot_tools checkout?' %
                  GSUTIL_DEFAULT_PATH)
 
   # Passing in -g/--config will run our copy of GSUtil, then quit.
   if options.config:
-    return gsutil.call('config', '-r', '-o',
-                       os.path.expanduser('~/.boto.depot_tools'))
+    print '===Note from depot_tools==='
+    print 'If you do not have a project ID, enter "0" when asked for one.'
+    print '===End note from depot_tools==='
+    print
+    return gsutil.call('config')
 
   if not args:
     parser.error('Missing target.')
@@ -452,10 +455,7 @@
       parser.error('Output file %s exists and --no_resume is specified.'
                    % options.output)
 
-  # Check we have a valid bucket with valid permissions.
-  base_url, code = check_bucket_permissions(options.bucket, gsutil)
-  if code:
-    return code
+  base_url = 'gs://%s' % options.bucket
 
   return download_from_google_storage(
       input_filename, base_url, gsutil, options.num_threads, options.directory,
diff --git a/drover.py b/drover.py
index ec8620c..8702530 100755
--- a/drover.py
+++ b/drover.py
@@ -642,4 +642,8 @@
 
 
 if __name__ == "__main__":
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/fetch b/fetch
index 37e8e79..bea6718 100755
--- a/fetch
+++ b/fetch
@@ -5,8 +5,4 @@
 
 base_dir=$(dirname "$0")
 
-if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
-  "$base_dir"/update_depot_tools
-fi
-
 PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/fetch.py" "$@"
diff --git a/fetch.py b/fetch.py
index f5640cc..35e05d3 100755
--- a/fetch.py
+++ b/fetch.py
@@ -62,8 +62,8 @@
   def run(self, cmd, **kwargs):
     print 'Running: %s' % (' '.join(pipes.quote(x) for x in cmd))
     if self.options.dry_run:
-      return 0
-    return subprocess.check_call(cmd, **kwargs)
+      return ''
+    return subprocess.check_output(cmd, **kwargs)
 
 
 class GclientCheckout(Checkout):
@@ -75,6 +75,15 @@
       cmd_prefix = ('gclient',)
     return self.run(cmd_prefix + cmd, **kwargs)
 
+  def exists(self):
+    try:
+      gclient_root = self.run_gclient('root').strip()
+      return (os.path.exists(os.path.join(gclient_root, '.gclient')) or
+              os.path.exists(os.path.join(os.getcwd(), self.root)))
+    except subprocess.CalledProcessError:
+      pass
+    return os.path.exists(os.path.join(os.getcwd(), self.root))
+
 
 class GitCheckout(Checkout):
 
@@ -101,17 +110,28 @@
   def __init__(self, options, spec, root):
     super(GclientGitCheckout, self).__init__(options, spec, root)
     assert 'solutions' in self.spec
-    keys = ['solutions', 'target_os', 'target_os_only']
-    gclient_spec = '\n'.join('%s = %s' % (key, self.spec[key])
-                             for key in keys if key in self.spec)
-    self.spec['gclient_spec'] = gclient_spec
 
-  def exists(self):
-    return os.path.exists(os.path.join(os.getcwd(), self.root))
+  def _format_spec(self):
+    def _format_literal(lit):
+      if isinstance(lit, basestring):
+        return '"%s"' % lit
+      if isinstance(lit, list):
+        return '[%s]' % ', '.join(_format_literal(i) for i in lit)
+      return '%r' % lit
+    soln_strings = []
+    for soln in self.spec['solutions']:
+      soln_string= '\n'.join('    "%s": %s,' % (key, _format_literal(value))
+                             for key, value in soln.iteritems())
+      soln_strings.append('  {\n%s\n  },' % soln_string)
+    gclient_spec = 'solutions = [\n%s\n]\n' % '\n'.join(soln_strings)
+    extra_keys = ['target_os', 'target_os_only']
+    gclient_spec += ''.join('%s = %s\n' % (key, _format_literal(self.spec[key]))
+                             for key in extra_keys if key in self.spec)
+    return gclient_spec
 
   def init(self):
     # Configure and do the gclient checkout.
-    self.run_gclient('config', '--spec', self.spec['gclient_spec'])
+    self.run_gclient('config', '--spec', self._format_spec())
     sync_cmd = ['sync']
     if self.options.nohooks:
       sync_cmd.append('--nohooks')
@@ -139,20 +159,18 @@
 
   def __init__(self, options, spec, root):
     super(GclientGitSvnCheckout, self).__init__(options, spec, root)
-    assert 'svn_url' in self.spec
-    assert 'svn_branch' in self.spec
-    assert 'svn_ref' in self.spec
 
   def init(self):
     # Ensure we are authenticated with subversion for all submodules.
     git_svn_dirs = json.loads(self.spec.get('submodule_git_svn_spec', '{}'))
     git_svn_dirs.update({self.root: self.spec})
     for _, svn_spec in git_svn_dirs.iteritems():
-      try:
-        self.run_svn('ls', '--non-interactive', svn_spec['svn_url'])
-      except subprocess.CalledProcessError:
-        print 'Please run `svn ls %s`' % svn_spec['svn_url']
-        return 1
+      if svn_spec.get('svn_url'):
+        try:
+          self.run_svn('ls', '--non-interactive', svn_spec['svn_url'])
+        except subprocess.CalledProcessError:
+          print 'Please run `svn ls %s`' % svn_spec['svn_url']
+          return 1
 
     super(GclientGitSvnCheckout, self).init()
 
@@ -164,12 +182,17 @@
       wd = os.path.join(self.base, real_path)
       if self.options.dry_run:
         print 'cd %s' % wd
-      prefix = svn_spec.get('svn_prefix', 'origin/')
-      self.run_git('svn', 'init', '--prefix=' + prefix, '-T',
-                   svn_spec['svn_branch'], svn_spec['svn_url'], cwd=wd)
-      self.run_git('config', '--replace', 'svn-remote.svn.fetch',
-                   svn_spec['svn_branch'] + ':refs/remotes/' + prefix +
-                   svn_spec['svn_ref'], cwd=wd)
+      if svn_spec.get('auto'):
+        self.run_git('auto-svn', cwd=wd)
+        continue
+      self.run_git('svn', 'init', svn_spec['svn_url'], cwd=wd)
+      self.run_git('config', '--unset-all', 'svn-remote.svn.fetch', cwd=wd)
+      for svn_branch, git_ref in svn_spec.get('git_svn_fetch', {}).items():
+        self.run_git('config', '--add', 'svn-remote.svn.fetch',
+                     '%s:%s' % (svn_branch, git_ref), cwd=wd)
+      for svn_branch, git_ref in svn_spec.get('git_svn_branches', {}).items():
+        self.run_git('config', '--add', 'svn-remote.svn.branches',
+                     '%s:%s' % (svn_branch, git_ref), cwd=wd)
       self.run_git('svn', 'fetch', cwd=wd)
 
 
@@ -298,8 +321,9 @@
   except KeyError:
     return 1
   if checkout.exists():
-    print 'You appear to already have a checkout. "fetch" is used only'
-    print 'to get new checkouts. Use "gclient sync" to update the checkout.'
+    print 'Your current directory appears to already contain, or be part of, '
+    print 'a checkout. "fetch" is used only to get new checkouts. Use '
+    print '"gclient sync" to update existing checkouts.'
     print
     print 'Fetch also does not yet deal with partial checkouts, so if fetch'
     print 'failed, delete the checkout and start over (crbug.com/230691).'
@@ -314,4 +338,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/fix_encoding.py b/fix_encoding.py
index 61bd742..5da9135 100644
--- a/fix_encoding.py
+++ b/fix_encoding.py
@@ -229,7 +229,7 @@
         remaining -= n.value
         if not remaining:
           break
-        text = text[n.value:]
+        text = text[int(n.value):]
     except Exception, e:
       complain('%s.write: %r' % (self.name, e))
       raise
diff --git a/gcl.py b/gcl.py
index a32b950..bbc4432 100755
--- a/gcl.py
+++ b/gcl.py
@@ -23,6 +23,7 @@
 import breakpad  # pylint: disable=W0611
 
 
+import auth
 import fix_encoding
 import gclient_utils
 import git_cl
@@ -46,6 +47,9 @@
 # we store information about changelists.
 REPOSITORY_ROOT = ""
 
+# Replacement for project name.
+SWITCH_TO_GIT = "SWITCH_TO_GIT_ALREADY"
+
 # Filename where we store repository specific information for gcl.
 CODEREVIEW_SETTINGS_FILE = "codereview.settings"
 CODEREVIEW_SETTINGS_FILE_NOT_FOUND = (
@@ -351,7 +355,10 @@
     if not self._rpc_server:
       if not self.rietveld:
         ErrorExit(CODEREVIEW_SETTINGS_FILE_NOT_FOUND)
-      self._rpc_server = rietveld.CachingRietveld(self.rietveld, None, None)
+      # TODO(vadimsh): glc.py should be deleted soon. Do not bother much about
+      # authentication options and always use defaults.
+      self._rpc_server = rietveld.CachingRietveld(
+          self.rietveld, auth.make_auth_config())
     return self._rpc_server
 
   def CloseIssue(self):
@@ -875,6 +882,10 @@
       # Uploading a new patchset.
       upload_arg.append("--issue=%d" % change_info.issue)
 
+      project = GetCodeReviewSetting("PROJECT")
+      if project:
+        upload_arg.append("--project=%s" % SWITCH_TO_GIT)
+
       if not any(i.startswith('--title') or i.startswith('-t') for i in args):
         upload_arg.append('--title= ')
     else:
@@ -915,7 +926,7 @@
 
       project = GetCodeReviewSetting("PROJECT")
       if project:
-        upload_arg.append("--project=%s" % project)
+        upload_arg.append("--project=%s" % SWITCH_TO_GIT)
 
     # If we have a lot of files with long paths, then we won't be able to fit
     # the command to "svn diff".  Instead, we generate the diff manually for
@@ -1465,6 +1476,10 @@
         '\nYour python version %s is unsupported, please upgrade.\n' %
         sys.version.split(' ', 1)[0])
     return 2
+
+  sys.stderr.write('Warning: gcl is going away soon. Get off subversion!\n')
+  sys.stderr.write('See http://crbug.com/475321 for more details.\n')
+
   if not argv:
     argv = ['help']
   command = Command(argv[0])
@@ -1511,4 +1526,8 @@
 
 if __name__ == "__main__":
   fix_encoding.fix_encoding()
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gclient b/gclient
index 7c57dfd..798b2d8 100755
--- a/gclient
+++ b/gclient
@@ -6,7 +6,7 @@
 base_dir=$(dirname "$0")
 
 if [[ "#grep#fetch#cleanup#diff#" != *"#$1#"* ]]; then
-  "$base_dir"/update_depot_tools
+  "$base_dir"/update_depot_tools "$@"
 fi
 
 PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/gclient.py" "$@"
diff --git a/gclient.bat b/gclient.bat
index 42984c0..d73e641 100755
--- a/gclient.bat
+++ b/gclient.bat
@@ -8,7 +8,7 @@
 PATH=%~dp0;%PATH%

 

 :: Synchronize the root directory before deferring control back to gclient.py.

-call "%~dp0\update_depot_tools.bat"

+call "%~dp0\update_depot_tools.bat" %*

 

 :: Defer control.

 %~dp0python "%~dp0\gclient.py" %*

diff --git a/gclient.py b/gclient.py
index 3007487..8de7e52 100755
--- a/gclient.py
+++ b/gclient.py
@@ -342,6 +342,11 @@
     # A cache of the files affected by the current operation, necessary for
     # hooks.
     self._file_list = []
+    # List of host names from which dependencies are allowed.
+    # Default is an empty set, meaning unspecified in DEPS file, and hence all
+    # hosts will be allowed. Non-empty set means whitelist of hosts.
+    # allowed_hosts var is scoped to its DEPS file, and so it isn't recursive.
+    self._allowed_hosts = frozenset()
     # If it is not set to True, the dependency wasn't processed for its child
     # dependency, i.e. its DEPS wasn't read.
     self._deps_parsed = False
@@ -687,6 +692,18 @@
           rel_deps.add(os.path.normpath(os.path.join(self.name, d)))
         self.recursedeps = rel_deps
 
+    if 'allowed_hosts' in local_scope:
+      try:
+        self._allowed_hosts = frozenset(local_scope.get('allowed_hosts'))
+      except TypeError:  # raised if non-iterable
+        pass
+      if not self._allowed_hosts:
+        logging.warning("allowed_hosts is specified but empty %s",
+                        self._allowed_hosts)
+        raise gclient_utils.Error(
+            'ParseDepsFile(%s): allowed_hosts must be absent '
+            'or a non-empty iterable' % self.name)
+
     # Convert the deps into real Dependency.
     deps_to_add = []
     for name, url in deps.iteritems():
@@ -756,6 +773,24 @@
               print('Using parent\'s revision date %s since we are in a '
                     'different repository.' % options.revision)
 
+  def findDepsFromNotAllowedHosts(self):
+    """Returns a list of depenecies from not allowed hosts.
+
+    If allowed_hosts is not set, allows all hosts and returns empty list.
+    """
+    if not self._allowed_hosts:
+      return []
+    bad_deps = []
+    for dep in self._dependencies:
+      # Don't enforce this for custom_deps.
+      if dep.name in self._custom_deps:
+        continue
+      if isinstance(dep.url, basestring):
+        parsed_url = urlparse.urlparse(dep.url)
+        if parsed_url.netloc and parsed_url.netloc not in self._allowed_hosts:
+          bad_deps.append(dep)
+    return bad_deps
+
   # Arguments number differs from overridden method
   # pylint: disable=W0221
   def run(self, revision_overrides, command, args, work_queue, options):
@@ -770,6 +805,7 @@
     run_scm = command not in ('runhooks', 'recurse', None)
     parsed_url = self.LateOverride(self.url)
     file_list = [] if not options.nohooks else None
+    revision_override = revision_overrides.pop(self.name, None)
     if run_scm and parsed_url:
       if isinstance(parsed_url, self.FileImpl):
         # Special support for single-file checkout.
@@ -785,7 +821,7 @@
       else:
         # Create a shallow copy to mutate revision.
         options = copy.copy(options)
-        options.revision = revision_overrides.pop(self.name, None)
+        options.revision = revision_override
         self.maybeGetParentRevision(
             command, options, parsed_url, self.parent)
         self._used_revision = options.revision
@@ -1053,6 +1089,11 @@
 
   @property
   @gclient_utils.lockedmethod
+  def allowed_hosts(self):
+    return self._allowed_hosts
+
+  @property
+  @gclient_utils.lockedmethod
   def file_list(self):
     return tuple(self._file_list)
 
@@ -1077,7 +1118,8 @@
     out = []
     for i in ('name', 'url', 'parsed_url', 'safesync_url', 'custom_deps',
               'custom_vars', 'deps_hooks', 'file_list', 'should_process',
-              'processed', 'hooks_ran', 'deps_parsed', 'requirements'):
+              'processed', 'hooks_ran', 'deps_parsed', 'requirements',
+              'allowed_hosts'):
       # First try the native property if it exists.
       if hasattr(self, '_' + i):
         value = getattr(self, '_' + i, False)
@@ -1460,7 +1502,7 @@
     revision_overrides = {}
     # It's unnecessary to check for revision overrides for 'recurse'.
     # Save a few seconds by not calling _EnforceRevisions() in that case.
-    if command not in ('diff', 'recurse', 'runhooks', 'status'):
+    if command not in ('diff', 'recurse', 'runhooks', 'status', 'revert'):
       self._CheckConfig()
       revision_overrides = self._EnforceRevisions()
     pm = None
@@ -1500,23 +1542,18 @@
         # Fix path separator on Windows.
         entry_fixed = entry.replace('/', os.path.sep)
         e_dir = os.path.join(self.root_dir, entry_fixed)
-
-        def _IsParentOfAny(parent, path_list):
-          parent_plus_slash = parent + '/'
-          return any(
-              path[:len(parent_plus_slash)] == parent_plus_slash
-              for path in path_list)
-
         # Use entry and not entry_fixed there.
         if (entry not in entries and
             (not any(path.startswith(entry + '/') for path in entries)) and
             os.path.exists(e_dir)):
+          # The entry has been removed from DEPS.
           scm = gclient_scm.CreateSCM(
               prev_url, self.root_dir, entry_fixed, self.outbuf)
 
           # Check to see if this directory is now part of a higher-up checkout.
           # The directory might be part of a git OR svn checkout.
           scm_root = None
+          scm_class = None
           for scm_class in (gclient_scm.scm.GIT, gclient_scm.scm.SVN):
             try:
               scm_root = scm_class.GetCheckoutRoot(scm.checkout_path)
@@ -1529,9 +1566,45 @@
                             'determine whether it is part of a higher-level '
                             'checkout, so not removing.' % entry)
             continue
+
+          # This is to handle the case of third_party/WebKit migrating from
+          # being a DEPS entry to being part of the main project.
+          # If the subproject is a Git project, we need to remove its .git
+          # folder. Otherwise git operations on that folder will have different
+          # effects depending on the current working directory.
+          if scm_class == gclient_scm.scm.GIT and (
+              os.path.abspath(scm_root) == os.path.abspath(e_dir)):
+            e_par_dir = os.path.join(e_dir, os.pardir)
+            if scm_class.IsInsideWorkTree(e_par_dir):
+              par_scm_root = scm_class.GetCheckoutRoot(e_par_dir)
+              # rel_e_dir : relative path of entry w.r.t. its parent repo.
+              rel_e_dir = os.path.relpath(e_dir, par_scm_root)
+              if scm_class.IsDirectoryVersioned(par_scm_root, rel_e_dir):
+                save_dir = scm.GetGitBackupDirPath()
+                # Remove any eventual stale backup dir for the same project.
+                if os.path.exists(save_dir):
+                  gclient_utils.rmtree(save_dir)
+                os.rename(os.path.join(e_dir, '.git'), save_dir)
+                # When switching between the two states (entry/ is a subproject
+                # -> entry/ is part of the outer project), it is very likely
+                # that some files are changed in the checkout, unless we are
+                # jumping *exactly* across the commit which changed just DEPS.
+                # In such case we want to cleanup any eventual stale files
+                # (coming from the old subproject) in order to end up with a
+                # clean checkout.
+                scm_class.CleanupDir(par_scm_root, rel_e_dir)
+                assert not os.path.exists(os.path.join(e_dir, '.git'))
+                print(('\nWARNING: \'%s\' has been moved from DEPS to a higher '
+                       'level checkout. The git folder containing all the local'
+                       ' branches has been saved to %s.\n'
+                       'If you don\'t care about its state you can safely '
+                       'remove that folder to free up space.') %
+                      (entry, save_dir))
+                continue
+
           if scm_root in full_entries:
-            logging.info('%s is part of a higher level checkout, not '
-                         'removing.', scm.GetCheckoutRoot())
+            logging.info('%s is part of a higher level checkout, not removing',
+                         scm.GetCheckoutRoot())
             continue
 
           file_list = []
@@ -1751,6 +1824,16 @@
                   'git', 'grep', '--null', '--color=Always'] + args)
 
 
+def CMDroot(parser, args):
+  """Outputs the solution root (or current dir if there isn't one)."""
+  (options, args) = parser.parse_args(args)
+  client = GClient.LoadCurrentConfig(options)
+  if client:
+    print(os.path.abspath(client.root_dir))
+  else:
+    print(os.path.abspath('.'))
+
+
 @subcommand.usage('[url] [safesync url]')
 def CMDconfig(parser, args):
   """Creates a .gclient file in the current directory.
@@ -1930,6 +2013,9 @@
   parser.add_option('-M', '--merge', action='store_true',
                     help='merge upstream changes instead of trying to '
                          'fast-forward or rebase')
+  parser.add_option('-A', '--auto_rebase', action='store_true',
+                    help='Automatically rebase repositories against local '
+                         'checkout during update (git only).')
   parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
                     help='override deps for the specified (comma-separated) '
                          'platform(s); \'all\' will process all deps_os '
@@ -1948,6 +2034,9 @@
   parser.add_option('--shallow', action='store_true',
                     help='GIT ONLY - Do a shallow clone into the cache dir. '
                          'Requires Git 1.9+')
+  parser.add_option('--no_bootstrap', '--no-bootstrap',
+                    action='store_true',
+                    help='Don\'t bootstrap from Google Storage.')
   parser.add_option('--ignore_locks', action='store_true',
                     help='GIT ONLY - Ignore cache locks.')
   (options, args) = parser.parse_args(args)
@@ -2086,6 +2175,27 @@
   return 0
 
 
+def CMDverify(parser, args):
+  """Verifies the DEPS file deps are only from allowed_hosts."""
+  (options, args) = parser.parse_args(args)
+  client = GClient.LoadCurrentConfig(options)
+  if not client:
+    raise gclient_utils.Error('client not configured; see \'gclient config\'')
+  client.RunOnDeps(None, [])
+  # Look at each first-level dependency of this gclient only.
+  for dep in client.dependencies:
+    bad_deps = dep.findDepsFromNotAllowedHosts()
+    if not bad_deps:
+      continue
+    print "There are deps from not allowed hosts in file %s" % dep.deps_file
+    for bad_dep in bad_deps:
+      print "\t%s at %s" % (bad_dep.name, bad_dep.url)
+    print "allowed_hosts:", ', '.join(dep.allowed_hosts)
+    sys.stdout.flush()
+    raise gclient_utils.Error(
+        'dependencies from disallowed hosts; check your DEPS file.')
+  return 0
+
 class OptionParser(optparse.OptionParser):
   gclientfile_default = os.environ.get('GCLIENT_FILE', '.gclient')
 
@@ -2169,7 +2279,7 @@
   sys.stdout = gclient_utils.MakeFileAnnotated(sys.stdout)
 
 
-def Main(argv):
+def main(argv):
   """Doesn't parse the arguments here, just find the right subcommand to
   execute."""
   if sys.hexversion < 0x02060000:
@@ -2195,9 +2305,14 @@
     return 1
   finally:
     gclient_utils.PrintWarnings()
+  return 0
 
 
 if '__main__' == __name__:
-  sys.exit(Main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
 
 # vim: ts=2:sw=2:tw=80:et:
diff --git a/gclient_scm.py b/gclient_scm.py
index a465ec4..473a1bd 100644
--- a/gclient_scm.py
+++ b/gclient_scm.py
@@ -27,10 +27,9 @@
 THIS_FILE_PATH = os.path.abspath(__file__)
 
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
 
-CHROMIUM_SRC_URL = 'https://chromium.googlesource.com/chromium/src.git'
+
 class DiffFiltererWrapper(object):
   """Simple base class which tracks which file is being diffed and
   replaces instances of its file name in the original and
@@ -97,6 +96,10 @@
     elif (url.startswith('http://') or url.startswith('https://') or
           url.startswith('svn://') or url.startswith('svn+ssh://')):
       return 'svn'
+    elif url.startswith('file://'):
+      if url.endswith('.git'):
+        return 'git'
+      return 'svn'
   return None
 
 
@@ -294,7 +297,7 @@
     gclient_utils.CheckCallAndFilter(
         ['git', 'diff', merge_base],
         cwd=self.checkout_path,
-        filter_fn=GitDiffFilterer(self.relpath).Filter, print_func=self.Print)
+        filter_fn=GitDiffFilterer(self.relpath, print_func=self.Print).Filter)
 
   def _FetchAndReset(self, revision, file_list, options):
     """Equivalent to git fetch; git reset."""
@@ -315,10 +318,10 @@
       return
     for f in os.listdir(hook_dir):
       if not f.endswith('.sample') and not f.endswith('.disabled'):
-        dest_name = os.path.join(hook_dir, f + '.disabled')
-        if os.path.exists(dest_name):
-          os.remove(dest_name)
-        os.rename(os.path.join(hook_dir, f), dest_name)
+        disabled_hook_path = os.path.join(hook_dir, f + '.disabled')
+        if os.path.exists(disabled_hook_path):
+          os.remove(disabled_hook_path)
+        os.rename(os.path.join(hook_dir, f), disabled_hook_path)
 
   def update(self, options, args, file_list):
     """Runs git to update or transparently checkout the working copy.
@@ -388,6 +391,20 @@
     if mirror:
       url = mirror.mirror_path
 
+    # If we are going to introduce a new project, there is a possibility that
+    # we are syncing back to a state where the project was originally a
+    # sub-project rolled by DEPS (realistic case: crossing the Blink merge point
+    # syncing backwards, when Blink was a DEPS entry and not part of src.git).
+    # In such case, we might have a backup of the former .git folder, which can
+    # be used to avoid re-fetching the entire repo again (useful for bisects).
+    backup_dir = self.GetGitBackupDirPath()
+    target_dir = os.path.join(self.checkout_path, '.git')
+    if os.path.exists(backup_dir) and not os.path.exists(target_dir):
+      gclient_utils.safe_makedirs(self.checkout_path)
+      os.rename(backup_dir, target_dir)
+      # Reset to a clean state
+      self._Run(['reset', '--hard', 'HEAD'], options)
+
     if (not os.path.exists(self.checkout_path) or
         (os.path.isdir(self.checkout_path) and
          not os.path.exists(os.path.join(self.checkout_path, '.git')))):
@@ -435,6 +452,11 @@
         self._CheckClean(rev_str)
       # Switch over to the new upstream
       self._Run(['remote', 'set-url', self.remote, url], options)
+      if mirror:
+        with open(os.path.join(
+            self.checkout_path, '.git', 'objects', 'info', 'alternates'),
+            'w') as fh:
+          fh.write(os.path.join(url, 'objects'))
       self._FetchAndReset(revision, file_list, options)
       return_early = True
 
@@ -591,15 +613,16 @@
             self.Print('_____ %s%s' % (self.relpath, rev_str), timestamp=False)
             printed_path = True
           while True:
-            try:
-              action = self._AskForData(
-                  'Cannot %s, attempt to rebase? '
-                  '(y)es / (q)uit / (s)kip : ' %
-                      ('merge' if options.merge else 'fast-forward merge'),
-                  options)
-            except ValueError:
-              raise gclient_utils.Error('Invalid Character')
-            if re.match(r'yes|y', action, re.I):
+            if not options.auto_rebase:
+              try:
+                action = self._AskForData(
+                    'Cannot %s, attempt to rebase? '
+                    '(y)es / (q)uit / (s)kip : ' %
+                        ('merge' if options.merge else 'fast-forward merge'),
+                    options)
+              except ValueError:
+                raise gclient_utils.Error('Invalid Character')
+            if options.auto_rebase or re.match(r'yes|y', action, re.I):
               self._AttemptRebase(upstream_branch, files, options,
                                   printed_path=printed_path, merge=False)
               printed_path = True
@@ -798,6 +821,12 @@
     base_url = self.url
     return base_url[:base_url.rfind('/')] + url
 
+  def GetGitBackupDirPath(self):
+    """Returns the path where the .git folder for the current project can be
+    staged/restored. Use case: subproject moved from DEPS <-> outer project."""
+    return os.path.join(self._root_dir,
+                        'old_' + self.relpath.replace(os.sep, '_')) + '.git'
+
   def _GetMirror(self, url, options):
     """Get a git_cache.Mirror object for the argument url."""
     if not git_cache.Mirror.GetCachePath():
@@ -806,12 +835,6 @@
         'print_func': self.filter,
         'refs': []
     }
-    # TODO(hinoka): This currently just fails because lkcr/lkgr are branches
-    #               not tags. This also adds 20 seconds to every bot_update
-    #               run, so I'm commenting this out until lkcr/lkgr become
-    #               tags.  (2014/4/24)
-    # if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
-    #  mirror_kwargs['refs'].extend(['refs/tags/lkgr', 'refs/tags/lkcr'])
     if hasattr(options, 'with_branch_heads') and options.with_branch_heads:
       mirror_kwargs['refs'].append('refs/branch-heads/*')
     if hasattr(options, 'with_tags') and options.with_tags:
@@ -829,7 +852,9 @@
         depth = 10000
     else:
       depth = None
-    mirror.populate(verbose=options.verbose, bootstrap=True, depth=depth,
+    mirror.populate(verbose=options.verbose,
+                    bootstrap=not getattr(options, 'no_bootstrap', False),
+                    depth=depth,
                     ignore_lock=getattr(options, 'ignore_locks', False))
     mirror.unlock()
 
@@ -1127,21 +1152,37 @@
 
   def _Run(self, args, options, show_header=True, **kwargs):
     # Disable 'unused options' warning | pylint: disable=W0613
-    cwd = kwargs.setdefault('cwd', self.checkout_path)
+    kwargs.setdefault('cwd', self.checkout_path)
     kwargs.setdefault('stdout', self.out_fh)
     kwargs['filter_fn'] = self.filter
     kwargs.setdefault('print_stdout', False)
     env = scm.GIT.ApplyEnvVars(kwargs)
     cmd = ['git'] + args
     if show_header:
-      header = "running '%s' in '%s'" % (' '.join(cmd), cwd)
-      self.filter(header)
-    return gclient_utils.CheckCallAndFilter(cmd, env=env, **kwargs)
+      gclient_utils.CheckCallAndFilterAndHeader(cmd, env=env, **kwargs)
+    else:
+      gclient_utils.CheckCallAndFilter(cmd, env=env, **kwargs)
 
 
 class SVNWrapper(SCMWrapper):
   """ Wrapper for SVN """
   name = 'svn'
+  _PRINTED_DEPRECATION = False
+
+  _MESSAGE = (
+    'Oh hai! You are using subversion. Chrome infra is eager to get rid of',
+    'svn support so please switch to git.',
+    'Tracking bug: http://crbug.com/475320',
+    'If you are a project owner, you may request git migration assistance at: ',
+    '  https://code.google.com/p/chromium/issues/entry?template=Infra-Git')
+
+  def __init__(self, *args, **kwargs):
+    super(SVNWrapper, self).__init__(*args, **kwargs)
+    suppress_deprecated_notice = os.environ.get(
+        'SUPPRESS_DEPRECATED_SVN_NOTICE', False)
+    if not SVNWrapper._PRINTED_DEPRECATION and not suppress_deprecated_notice:
+      SVNWrapper._PRINTED_DEPRECATION = True
+      sys.stderr.write('\n'.join(self._MESSAGE) + '\n')
 
   @staticmethod
   def BinaryExists():
@@ -1186,7 +1227,7 @@
         ['svn', 'diff', '-x', '--ignore-eol-style'] + args,
         cwd=self.checkout_path,
         print_stdout=False,
-        filter_fn=SvnDiffFilterer(self.relpath).Filter, print_func=self.Print)
+        filter_fn=SvnDiffFilterer(self.relpath, print_func=self.Print).Filter)
 
   def update(self, options, args, file_list):
     """Runs svn to update or transparently checkout the working copy.
diff --git a/gclient_utils.py b/gclient_utils.py
index a21e65a..21c44c3 100644
--- a/gclient_utils.py
+++ b/gclient_utils.py
@@ -166,6 +166,13 @@
       time.sleep(0.1)
 
 
+def rm_file_or_tree(path):
+  if os.path.isfile(path):
+    os.remove(path)
+  else:
+    rmtree(path)
+
+
 def rmtree(path):
   """shutil.rmtree() on steroids.
 
@@ -655,15 +662,8 @@
   raise Error('Unknown platform: ' + sys.platform)
 
 
-def GetBuildtoolsPath():
-  """Returns the full path to the buildtools directory.
-  This is based on the root of the checkout containing the current directory."""
-
-  # Overriding the build tools path by environment is highly unsupported and may
-  # break without warning.  Do not rely on this for anything important.
-  override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
-  if override is not None:
-    return override
+def GetPrimarySolutionPath():
+  """Returns the full path to the primary solution. (gclient_root + src)"""
 
   gclient_root = FindGclientRoot(os.getcwd())
   if not gclient_root:
@@ -679,18 +679,37 @@
       pass
     top_dir = top_dir[0]
     if os.path.exists(os.path.join(top_dir, 'buildtools')):
-      return os.path.join(top_dir, 'buildtools')
+      return top_dir
     return None
 
   # Some projects' top directory is not named 'src'.
   source_dir_name = GetGClientPrimarySolutionName(gclient_root) or 'src'
-  return os.path.join(gclient_root, source_dir_name, 'buildtools')
+  return os.path.join(gclient_root, source_dir_name)
+
+
+def GetBuildtoolsPath():
+  """Returns the full path to the buildtools directory.
+  This is based on the root of the checkout containing the current directory."""
+
+  # Overriding the build tools path by environment is highly unsupported and may
+  # break without warning.  Do not rely on this for anything important.
+  override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
+  if override is not None:
+    return override
+
+  primary_solution = GetPrimarySolutionPath()
+  if not primary_solution:
+    return None
+  buildtools_path = os.path.join(primary_solution, 'buildtools')
+  if not os.path.exists(buildtools_path):
+    # Buildtools may be in the gclient root.
+    gclient_root = FindGclientRoot(os.getcwd())
+    buildtools_path = os.path.join(gclient_root, 'buildtools')
+  return buildtools_path
 
 
 def GetBuildtoolsPlatformBinaryPath():
   """Returns the full path to the binary directory for the current platform."""
-  # Mac and Windows just have one directory, Linux has two according to whether
-  # it's 32 or 64 bits.
   buildtools_path = GetBuildtoolsPath()
   if not buildtools_path:
     return None
@@ -700,10 +719,7 @@
   elif sys.platform == 'darwin':
     subdir = 'mac'
   elif sys.platform.startswith('linux'):
-    if sys.maxsize > 2**32:
       subdir = 'linux64'
-    else:
-      subdir = 'linux32'
   else:
     raise Error('Unknown platform: ' + sys.platform)
   return os.path.join(buildtools_path, subdir)
@@ -1130,15 +1146,33 @@
 def NumLocalCpus():
   """Returns the number of processors.
 
-  Python on OSX 10.6 raises a NotImplementedError exception.
+  multiprocessing.cpu_count() is permitted to raise NotImplementedError, and
+  is known to do this on some Windows systems and OSX 10.6. If we can't get the
+  CPU count, we will fall back to '1'.
   """
+  # Surround the entire thing in try/except; no failure here should stop gclient
+  # from working.
   try:
-    import multiprocessing
-    return multiprocessing.cpu_count()
-  except:  # pylint: disable=W0702
-    # Mac OS 10.6 only
-    # pylint: disable=E1101
-    return int(os.sysconf('SC_NPROCESSORS_ONLN'))
+    # Use multiprocessing to get CPU count. This may raise
+    # NotImplementedError.
+    try:
+      import multiprocessing
+      return multiprocessing.cpu_count()
+    except NotImplementedError:  # pylint: disable=W0702
+      # (UNIX) Query 'os.sysconf'.
+      # pylint: disable=E1101
+      if hasattr(os, 'sysconf') and 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
+        return int(os.sysconf('SC_NPROCESSORS_ONLN'))
+
+      # (Windows) Query 'NUMBER_OF_PROCESSORS' environment variable.
+      if 'NUMBER_OF_PROCESSORS' in os.environ:
+        return int(os.environ['NUMBER_OF_PROCESSORS'])
+  except Exception as e:
+    logging.exception("Exception raised while probing CPU count: %s", e)
+
+  logging.debug('Failed to get CPU count. Defaulting to 1.')
+  return 1
+
 
 def DefaultDeltaBaseCacheLimit():
   """Return a reasonable default for the git config core.deltaBaseCacheLimit.
@@ -1152,6 +1186,7 @@
   else:
     return '512m'
 
+
 def DefaultIndexPackConfig(url=''):
   """Return reasonable default values for configuring git-index-pack.
 
@@ -1162,3 +1197,21 @@
   if url in THREADED_INDEX_PACK_BLACKLIST:
     result.extend(['-c', 'pack.threads=1'])
   return result
+
+
+def FindExecutable(executable):
+  """This mimics the "which" utility."""
+  path_folders = os.environ.get('PATH').split(os.pathsep)
+
+  for path_folder in path_folders:
+    target = os.path.join(path_folder, executable)
+    # Just incase we have some ~/blah paths.
+    target = os.path.abspath(os.path.expanduser(target))
+    if os.path.isfile(target) and os.access(target, os.X_OK):
+      return target
+    if sys.platform.startswith('win'):
+      for suffix in ('.bat', '.cmd', '.exe'):
+        alt_target = target + suffix
+        if os.path.isfile(alt_target) and os.access(alt_target, os.X_OK):
+          return alt_target
+  return None
diff --git a/git-auto-svn b/git-auto-svn
new file mode 100755
index 0000000..6a07962
--- /dev/null
+++ b/git-auto-svn
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+. $(type -P python_git_runner.sh)
diff --git a/git-cache b/git-cache
index 95eef4b..6a07962 100755
--- a/git-cache
+++ b/git-cache
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cache.py - a git-command for managing local caches of remote repositories.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-cherry-pick-upload b/git-cherry-pick-upload
index 4ab9b63..6a07962 100755
--- a/git-cherry-pick-upload
+++ b/git-cherry-pick-upload
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cherry_pick_upload.py -- Upload a cherry pick CL to rietveld.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-cl b/git-cl
index e2082a4..12b19e3 100755
--- a/git-cl
+++ b/git-cl
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_cl.py -- a git-command for integrating reviews on Rietveld
-# Copyright (C) 2008 Evan Martin <martine@danga.com>
-
 . $(type -P python_git_runner.sh)
diff --git a/git-crrev-parse b/git-crrev-parse
new file mode 100755
index 0000000..77318fe
--- /dev/null
+++ b/git-crrev-parse
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# This git extension converts a chromium commit number to its git commit hash.
+# It accepts the following input formats:
+# 
+#   $ git crrev-parse Cr-Commit-Position: refs/heads/master@{#311769}
+#   $ git crrev-parse '    Cr-Commit-Position: refs/heads/master@{#311769}'
+#   $ git crrev-parse 'Cr-Commit-Position: refs/heads/master@{#311769}'
+#   $ git crrev-parse refs/heads/master@{#311769}
+#   
+# It also works for branches (assuming you have branches in your local
+# checkout):
+#   
+#   $ git crrev-parse refs/branch-heads/2278@{#2}
+#   
+# If you don't specify a branch, refs/heads/master is assumed:
+#   
+#   $ git crrev-parse @{#311769}
+#   $ git crrev-parse 311769
+
+# Developer note: this script makes heavy use of prefix/suffix/pattern
+# substitution for bash variables.  Refer to the "Parameter Expansion"
+# section of the man page for bash.
+
+while [ -n "$1" ]; do
+  if [[ "$1" = "Cr-Commit-Position:" ]] && [[ "$2" =~ .*@\{#[0-9][0-9]*\} ]]; then
+    commit_pos="$2"
+    shift
+  else
+    commit_pos="${1#*Cr-Commit-Position: }"
+  fi
+  ref="${commit_pos%@\{#*\}}"
+  if [ "$ref" = "$commit_pos" -o -z "$ref" ]; then
+    ref="refs/heads/master"
+  fi
+  remote_ref="${ref/refs\/heads/refs\/remotes\/origin}"
+  remote_ref="${remote_ref/refs\/branch-heads/refs\/remotes\/branch-heads}"
+  num="${commit_pos#*@\{\#}"
+  num="${num%\}}"
+  
+  if [ -z "$ref" -o -z "$num" ]; then
+    git rev-parse "$1"
+  else
+    grep_str="Cr-Commit-Position: $ref@{#$num}"
+    git rev-list -n 1 --grep="$grep_str" "$remote_ref"
+  fi
+
+  shift
+done
diff --git a/git-footers b/git-footers
index 635cd8f..6a07962 100755
--- a/git-footers
+++ b/git-footers
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_footers.py -- Extract the conventional footers associated with a commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-freeze b/git-freeze
index 0187c2e..5e485bb 100755
--- a/git-freeze
+++ b/git-freeze
@@ -3,9 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_freezer.py freeze -- a git-command to suspend all existing working
-# directory modifications. This can be reversed with the 'git thaw' command.
-
 SCRIPT=git_freezer.py
 set -- freeze "$@"
 . $(type -P python_git_runner.sh)
diff --git a/git-map b/git-map
index 3e651c7..03e8418 100755
--- a/git-map
+++ b/git-map
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_map.py -- a git-command for presenting a graphical view of the git
-# history.
-
 . $(type -P python_git_runner.sh) | less -R
diff --git a/git-map-branches b/git-map-branches
index b7da014..6a07962 100755
--- a/git-map-branches
+++ b/git-map-branches
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_map_branches.py -- a git-command for presenting a graphical view of git
-# branches in the current repo, and their relationships to each other.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-mark-merge-base b/git-mark-merge-base
index 502d04c..6a07962 100755
--- a/git-mark-merge-base
+++ b/git-mark-merge-base
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_mark_merge_base.py -- Manually set the merge base for the current branch.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-nav-downstream b/git-nav-downstream
index 7f8a677..6a07962 100755
--- a/git-nav-downstream
+++ b/git-nav-downstream
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_nav_downstream.py -- a git-command to navigate to a downstream branch.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-nav-upstream b/git-nav-upstream
index bec3eba..f16dc2f 100755
--- a/git-nav-upstream
+++ b/git-nav-upstream
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# a git-command to navigate to the upstream branch.
-
 git checkout '@{u}'
diff --git a/git-new-branch b/git-new-branch
index fb56ee8..6a07962 100755
--- a/git-new-branch
+++ b/git-new-branch
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_new_branch.py -- Create a new branch which tracks the default upstream
-# (origin/master).
-
 . $(type -P python_git_runner.sh)
diff --git a/git-number b/git-number
index e1d1298..a5037a4 100755
--- a/git-number
+++ b/git-number
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_number.py - a git-command for calculating and displaying the generation
-# number of a commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-rebase-update b/git-rebase-update
index 60c16c3..6a07962 100755
--- a/git-rebase-update
+++ b/git-rebase-update
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_rebase_update.py -- Update remote sources, and use rebase to update all
-# branches in this repo.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-rename-branch b/git-rename-branch
index 8c18884..6a07962 100755
--- a/git-rename-branch
+++ b/git-rename-branch
@@ -3,7 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_rename_branch.py -- Rename the current branch, correctly updating the
-# upstream branch of all the downstream branches.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-reparent-branch b/git-reparent-branch
index 1fdb1ae..6a07962 100755
--- a/git-reparent-branch
+++ b/git-reparent-branch
@@ -3,8 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_reparent_branch.py -- Change the parent (upstream) branch of the current
-# branch. Afterwards, run a `git rebase-update` cycle to ensure that all
-# branches correctly reflect their parentage.
-
 . $(type -P python_git_runner.sh)
diff --git a/git-retry b/git-retry
index 2c03942..beb0976 100755
--- a/git-retry
+++ b/git-retry
@@ -3,9 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_freezer.py freeze -- a git-command to suspend all existing working
-# directory modifications. This can be reversed with the 'git thaw' command.
-
 SCRIPT=git_retry.py
 set -- retry "$@"
 . $(type -P python_git_runner.sh)
diff --git a/git-squash-branch b/git-squash-branch
index 0fb1832..6a07962 100755
--- a/git-squash-branch
+++ b/git-squash-branch
@@ -3,6 +3,4 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# git_squash_branch.py -- Collapses the current branch to a single commit.
-
 . $(type -P python_git_runner.sh)
diff --git a/git_auto_svn.py b/git_auto_svn.py
new file mode 100755
index 0000000..88d970b
--- /dev/null
+++ b/git_auto_svn.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Performs all git-svn setup steps necessary for 'git svn dcommit' to work.
+
+Assumes that trunk of the svn remote maps to master of the git remote.
+
+Example:
+git clone https://chromium.googlesource.com/chromium/tools/depot_tools
+cd depot_tools
+git auto-svn
+"""
+
+import argparse
+import os
+import sys
+import urlparse
+
+import subprocess2
+
+from git_common import run as run_git
+from git_common import run_stream_with_retcode as run_git_stream_with_retcode
+from git_common import set_config, root, ROOT
+from git_footers import get_footer_svn_id
+
+
+SVN_EXE = ROOT+'\\svn.bat' if sys.platform.startswith('win') else 'svn'
+
+
+def run_svn(*cmd, **kwargs):
+  """Runs an svn command.
+
+  Returns (stdout, stderr) as a pair of strings.
+
+  Raises subprocess2.CalledProcessError on nonzero return code.
+  """
+  kwargs.setdefault('stdin', subprocess2.PIPE)
+  kwargs.setdefault('stdout', subprocess2.PIPE)
+  kwargs.setdefault('stderr', subprocess2.PIPE)
+
+  cmd = (SVN_EXE,) + cmd
+  proc = subprocess2.Popen(cmd, **kwargs)
+  ret, err = proc.communicate()
+  retcode = proc.wait()
+  if retcode != 0:
+    raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(), ret, err)
+
+  return ret, err
+
+
+def main(argv):
+  # No command line flags. Just use the parser to prevent people from trying
+  # to pass flags that don't do anything, and to provide 'usage'.
+  parser = argparse.ArgumentParser(
+      description='Automatically set up git-svn for a repo mirrored from svn.')
+  parser.parse_args(argv)
+
+  upstream = root()
+  svn_id = get_footer_svn_id(upstream)
+  assert svn_id, 'No valid git-svn-id footer found on %s.' % upstream
+  print 'Found git-svn-id footer %s on %s' % (svn_id, upstream)
+
+  parsed_svn = urlparse.urlparse(svn_id)
+  path_components = parsed_svn.path.split('/')
+  svn_repo = None
+  svn_path = None
+  for i in xrange(len(path_components)):
+    try:
+      maybe_repo = '%s://%s%s' % (
+          parsed_svn.scheme, parsed_svn.netloc, '/'.join(path_components[:i+1]))
+      print 'Checking ', maybe_repo
+      run_svn('info', maybe_repo)
+      svn_repo = maybe_repo
+      svn_path = '/'.join(path_components[i+1:])
+      break
+    except subprocess2.CalledProcessError, e:
+      if 'E170001' in str(e):
+        print 'Authentication failed:'
+        print e
+        print ('Try running "svn ls %s" with the password'
+               ' from https://chromium-access.appspot.com' % maybe_repo)
+        print
+      continue
+  assert svn_repo is not None, 'Unable to find svn repo for %s' % svn_id
+  print 'Found upstream svn repo %s and path %s' % (svn_repo, svn_path)
+
+  set_config('svn-remote.svn.url', svn_repo)
+  set_config('svn-remote.svn.fetch',
+             '%s:refs/remotes/%s' % (svn_path, upstream))
+  print 'Configured metadata, running "git svn fetch". This may take some time.'
+  with run_git_stream_with_retcode('svn', 'fetch') as stdout:
+    for line in stdout.xreadlines():
+      print line.strip()
+  return 0
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cache.py b/git_cache.py
index eac2093..e80923c 100755
--- a/git_cache.py
+++ b/git_cache.py
@@ -142,8 +142,7 @@
 
   git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
   gsutil_exe = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
   cachepath_lock = threading.Lock()
 
   def __init__(self, url, refs=None, print_func=None):
@@ -151,7 +150,14 @@
     self.refs = refs or []
     self.basedir = self.UrlToCacheDir(url)
     self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
-    self.print = print_func or print
+    if print_func:
+      self.print = self.print_without_file
+      self.print_func = print_func
+    else:
+      self.print = print
+
+  def print_without_file(self, message, **kwargs):
+    self.print_func(message)
 
   @property
   def bootstrap_bucket(self):
@@ -179,24 +185,6 @@
     netpath = re.sub(r'\b-\b', '/', os.path.basename(path)).replace('--', '-')
     return 'https://%s' % netpath
 
-  @staticmethod
-  def FindExecutable(executable):
-    """This mimics the "which" utility."""
-    path_folders = os.environ.get('PATH').split(os.pathsep)
-
-    for path_folder in path_folders:
-      target = os.path.join(path_folder, executable)
-      # Just incase we have some ~/blah paths.
-      target = os.path.abspath(os.path.expanduser(target))
-      if os.path.isfile(target) and os.access(target, os.X_OK):
-        return target
-      if sys.platform.startswith('win'):
-        for suffix in ('.bat', '.cmd', '.exe'):
-          alt_target = target + suffix
-          if os.path.isfile(alt_target) and os.access(alt_target, os.X_OK):
-            return alt_target
-    return None
-
   @classmethod
   def SetCachePath(cls, cachepath):
     with cls.cachepath_lock:
@@ -267,16 +255,17 @@
     """
 
     python_fallback = False
-    if sys.platform.startswith('win') and not self.FindExecutable('7z'):
+    if (sys.platform.startswith('win') and
+        not gclient_utils.FindExecutable('7z')):
       python_fallback = True
     elif sys.platform.startswith('darwin'):
       # The OSX version of unzip doesn't support zip64.
       python_fallback = True
-    elif not self.FindExecutable('unzip'):
+    elif not gclient_utils.FindExecutable('unzip'):
       python_fallback = True
 
     gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
-    gsutil = Gsutil(self.gsutil_exe, boto_path=None, bypass_prodaccess=True)
+    gsutil = Gsutil(self.gsutil_exe, boto_path=None)
     # Get the most recent version of the zipfile.
     _, ls_out, _ = gsutil.check_call('ls', gs_folder)
     ls_out_sorted = sorted(ls_out.splitlines())
@@ -313,7 +302,7 @@
           retcode = 0
     finally:
       # Clean up the downloaded zipfile.
-      gclient_utils.rmtree(tempdir)
+      gclient_utils.rm_file_or_tree(tempdir)
 
     if retcode:
       self.print(
@@ -487,7 +476,7 @@
                      if os.path.isdir(os.path.join(cachepath, path))])
     for dirent in dirlist:
       if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
-        gclient_utils.rmtree(os.path.join(cachepath, dirent))
+        gclient_utils.rm_file_or_tree(os.path.join(cachepath, dirent))
       elif (dirent.endswith('.lock') and
           os.path.isfile(os.path.join(cachepath, dirent))):
         repo_dirs.add(os.path.join(cachepath, dirent[:-5]))
@@ -691,4 +680,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cherry_pick_upload.py b/git_cherry_pick_upload.py
index 2a048fa..3090364 100755
--- a/git_cherry_pick_upload.py
+++ b/git_cherry_pick_upload.py
@@ -5,23 +5,26 @@
 
 """Upload a cherry pick CL to rietveld."""
 
-import argparse
 import md5
+import optparse
 import subprocess2
 import sys
 
+import auth
+
 from git_cl import Changelist
 from git_common import config, run
 from third_party.upload import EncodeMultipartFormData, GitVCS
 from rietveld import Rietveld
 
 
-def cherry_pick(target_branch, commit):
+def cherry_pick(target_branch, commit, auth_config):
   """Attempt to upload a cherry pick CL to rietveld.
 
   Args:
     target_branch: The branch to cherry pick onto.
     commit: The git hash of the commit to cherry pick.
+    auth_config: auth.AuthConfig object with authentication configuration.
   """
   author = config('user.email')
 
@@ -48,7 +51,7 @@
           run('diff', parent, commit))),
   ])
 
-  rietveld = Rietveld(config('rietveld.server'), author, None)
+  rietveld = Rietveld(config('rietveld.server'), auth_config, author)
   # pylint: disable=W0212
   output = rietveld._send(
     '/upload',
@@ -124,21 +127,29 @@
 
 
 def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument(
+  parser = optparse.OptionParser(
+      usage='usage: %prog --branch <branch> <commit>')
+  parser.add_option(
       '--branch',
       '-b',
       help='The upstream branch to cherry pick to.',
-      metavar='<branch>',
-      required=True,
-  )
-  parser.add_argument(
-      'commit',
-      help='SHA to cherry pick.',
-      metavar='<commit>',
-  )
-  args = parser.parse_args()
-  cherry_pick(args.branch, args.commit)
+      metavar='<branch>')
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args()
+  auth_config = auth.extract_auth_config_from_options
+
+  if not options.branch:
+    parser.error('--branch is required')
+  if len(args) != 1:
+    parser.error('Expecting single argument <commit>')
+
+  cherry_pick(options.branch, args[0], auth_config)
+  return 0
+
 
 if __name__ == '__main__':
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_cl.py b/git_cl.py
index 5d39cab..8b474a1 100755
--- a/git_cl.py
+++ b/git_cl.py
@@ -8,7 +8,11 @@
 """A git-command for integrating reviews on Rietveld."""
 
 from distutils.version import LooseVersion
+from multiprocessing.pool import ThreadPool
+import base64
+import collections
 import glob
+import httplib
 import json
 import logging
 import optparse
@@ -17,25 +21,31 @@
 import re
 import stat
 import sys
+import tempfile
 import textwrap
-import threading
+import time
+import traceback
 import urllib2
 import urlparse
 import webbrowser
+import zlib
 
 try:
   import readline  # pylint: disable=F0401,W0611
 except ImportError:
   pass
 
-
 from third_party import colorama
+from third_party import httplib2
 from third_party import upload
+import auth
 import breakpad  # pylint: disable=W0611
 import clang_format
+import dart_format
 import fix_encoding
 import gclient_utils
 import git_common
+from git_footers import get_footer_svn_id
 import owners
 import owners_finder
 import presubmit_support
@@ -52,6 +62,13 @@
 DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup'
 GIT_INSTRUCTIONS_URL = 'http://code.google.com/p/chromium/wiki/UsingGit'
 CHANGE_ID = 'Change-Id:'
+REFS_THAT_ALIAS_TO_OTHER_REFS = {
+    'refs/remotes/origin/lkgr': 'refs/remotes/origin/master',
+    'refs/remotes/origin/lkcr': 'refs/remotes/origin/master',
+}
+
+# Buildbucket-related constants
+BUILDBUCKET_HOST = 'cr-buildbucket.appspot.com'
 
 # Valid extensions for files we want to lint.
 DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
@@ -111,6 +128,11 @@
     return 1, ''
 
 
+def RunGitSilent(args):
+  """Returns stdout, suppresses stderr and ingores the return code."""
+  return RunGitWithCode(args, suppress_stderr=True)[1]
+
+
 def IsGitVersionAtLeast(min_version):
   prefix = 'git version '
   version = RunGit(['--version']).strip()
@@ -118,6 +140,13 @@
       LooseVersion(version[len(prefix):]) >= LooseVersion(min_version))
 
 
+def BranchExists(branch):
+  """Return True if specified branch exists."""
+  code, _ = RunGitWithCode(['rev-parse', '--verify', branch],
+                           suppress_stderr=True)
+  return not code
+
+
 def ask_for_data(prompt):
   try:
     return raw_input(prompt)
@@ -188,18 +217,116 @@
   parser.parse_args = Parse
 
 
-def is_dirty_git_tree(cmd):
-  # Make sure index is up-to-date before running diff-index.
-  RunGit(['update-index', '--refresh', '-q'], error_ok=True)
-  dirty = RunGit(['diff-index', '--name-status', 'HEAD'])
-  if dirty:
-    print 'Cannot %s with a dirty tree. You must commit locally first.' % cmd
-    print 'Uncommitted files: (git diff-index --name-status HEAD)'
-    print dirty[:4096]
-    if len(dirty) > 4096:
-      print '... (run "git diff-index --name-status HEAD" to see full output).'
-    return True
-  return False
+def _prefix_master(master):
+  """Convert user-specified master name to full master name.
+
+  Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket
+  name, while the developers always use shortened master name
+  (tryserver.chromium.linux) by stripping off the prefix 'master.'. This
+  function does the conversion for buildbucket migration.
+  """
+  prefix = 'master.'
+  if master.startswith(prefix):
+    return master
+  return '%s%s' % (prefix, master)
+
+
+def trigger_try_jobs(auth_config, changelist, options, masters, category,
+                     override_properties=None):
+  rietveld_url = settings.GetDefaultServerUrl()
+  rietveld_host = urlparse.urlparse(rietveld_url).hostname
+  authenticator = auth.get_authenticator_for_host(rietveld_host, auth_config)
+  http = authenticator.authorize(httplib2.Http())
+  http.force_exception_to_status_code = True
+  issue_props = changelist.GetIssueProperties()
+  issue = changelist.GetIssue()
+  patchset = changelist.GetMostRecentPatchset()
+
+  buildbucket_put_url = (
+      'https://{hostname}/_ah/api/buildbucket/v1/builds/batch'.format(
+          hostname=BUILDBUCKET_HOST))
+  buildset = 'patch/rietveld/{hostname}/{issue}/{patch}'.format(
+      hostname=rietveld_host,
+      issue=issue,
+      patch=patchset)
+
+  batch_req_body = {'builds': []}
+  print_text = []
+  print_text.append('Tried jobs on:')
+  for master, builders_and_tests in sorted(masters.iteritems()):
+    print_text.append('Master: %s' % master)
+    bucket = _prefix_master(master)
+    for builder, tests in sorted(builders_and_tests.iteritems()):
+      print_text.append('  %s: %s' % (builder, tests))
+      parameters = {
+          'builder_name': builder,
+          'changes': [
+              {'author': {'email': issue_props['owner_email']}},
+          ],
+          'properties': {
+              'category': category,
+              'issue': issue,
+              'master': master,
+              'patch_project': issue_props['project'],
+              'patch_storage': 'rietveld',
+              'patchset': patchset,
+              'reason': options.name,
+              'revision': options.revision,
+              'rietveld': rietveld_url,
+              'testfilter': tests,
+          },
+      }
+      if override_properties:
+        parameters['properties'].update(override_properties)
+      if options.clobber:
+        parameters['properties']['clobber'] = True
+      batch_req_body['builds'].append(
+          {
+              'bucket': bucket,
+              'parameters_json': json.dumps(parameters),
+              'tags': ['builder:%s' % builder,
+                       'buildset:%s' % buildset,
+                       'master:%s' % master,
+                       'user_agent:git_cl_try']
+          }
+      )
+
+  for try_count in xrange(3):
+    response, content = http.request(
+        buildbucket_put_url,
+        'PUT',
+        body=json.dumps(batch_req_body),
+        headers={'Content-Type': 'application/json'},
+    )
+    content_json = None
+    try:
+      content_json = json.loads(content)
+    except ValueError:
+      pass
+
+    # Buildbucket could return an error even if status==200.
+    if content_json and content_json.get('error'):
+      msg = 'Error in response. Code: %d. Reason: %s. Message: %s.' % (
+          content_json['error'].get('code', ''),
+          content_json['error'].get('reason', ''),
+          content_json['error'].get('message', ''))
+      raise BuildbucketResponseException(msg)
+
+    if response.status == 200:
+      if not content_json:
+        raise BuildbucketResponseException(
+            'Buildbucket returns invalid json content: %s.\n'
+            'Please file bugs at crbug.com, label "Infra-BuildBucket".' %
+            content)
+      break
+    if response.status < 500 or try_count >= 2:
+      raise httplib2.HttpLib2Error(content)
+
+    # status >= 500 means transient failures.
+    logging.debug('Transient errors when triggering tryjobs. Will retry.')
+    time.sleep(0.5 + 1.5*try_count)
+
+  print '\n'.join(print_text)
 
 
 def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
@@ -269,6 +396,10 @@
       stdout=stdout, env=env)
 
 
+class BuildbucketResponseException(Exception):
+  pass
+
+
 class Settings(object):
   def __init__(self):
     self.default_server = None
@@ -283,6 +414,7 @@
     self.is_gerrit_autodetect_branch = None
     self.git_editor = None
     self.project = None
+    self.force_https_commit_url = None
     self.pending_ref_prefix = None
 
   def LazyUpdateIfNeeded(self):
@@ -423,6 +555,16 @@
   def GetBugPrefix(self):
     return self._GetRietveldConfig('bug-prefix', error_ok=True)
 
+  def GetIsSkipDependencyUpload(self, branch_name):
+    """Returns true if specified branch should skip dep uploads."""
+    return self._GetBranchConfig(branch_name, 'skip-deps-uploads',
+                                 error_ok=True)
+
+  def GetRunPostUploadHook(self):
+    run_post_upload_hook = self._GetRietveldConfig(
+        'run-post-upload-hook', error_ok=True)
+    return run_post_upload_hook == "True"
+
   def GetDefaultCCList(self):
     return self._GetRietveldConfig('cc', error_ok=True)
 
@@ -461,6 +603,12 @@
       self.project = self._GetRietveldConfig('project', error_ok=True)
     return self.project
 
+  def GetForceHttpsCommitUrl(self):
+    if not self.force_https_commit_url:
+      self.force_https_commit_url = self._GetRietveldConfig(
+          'force-https-commit-url', error_ok=True)
+    return self.force_https_commit_url
+
   def GetPendingRefPrefix(self):
     if not self.pending_ref_prefix:
       self.pending_ref_prefix = self._GetRietveldConfig(
@@ -470,6 +618,9 @@
   def _GetRietveldConfig(self, param, **kwargs):
     return self._GetConfig('rietveld.' + param, **kwargs)
 
+  def _GetBranchConfig(self, branch_name, param, **kwargs):
+    return self._GetConfig('branch.' + branch_name + '.' + param, **kwargs)
+
   def _GetConfig(self, param, **kwargs):
     self.LazyUpdateIfNeeded()
     return RunGit(['config', param], **kwargs).strip()
@@ -481,7 +632,7 @@
 
 
 class Changelist(object):
-  def __init__(self, branchref=None, issue=None):
+  def __init__(self, branchref=None, issue=None, auth_config=None):
     # Poke settings so we get the "configure your server" message if necessary.
     global settings
     if not settings:
@@ -501,11 +652,16 @@
     self.description = None
     self.lookedup_patchset = False
     self.patchset = None
-    self._rpc_server = None
     self.cc = None
     self.watchers = ()
-    self._remote = None
+    self._auth_config = auth_config
     self._props = None
+    self._remote = None
+    self._rpc_server = None
+
+  @property
+  def auth_config(self):
+    return self._auth_config
 
   def GetCCList(self):
     """Return the users cc'd on this CL.
@@ -533,7 +689,11 @@
   def GetBranch(self):
     """Returns the short branch name, e.g. 'master'."""
     if not self.branch:
-      self.branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
+      branchref = RunGit(['symbolic-ref', 'HEAD'],
+                         stderr=subprocess2.VOID, error_ok=True).strip()
+      if not branchref:
+        return None
+      self.branchref = branchref
       self.branch = ShortBranchName(self.branchref)
     return self.branch
 
@@ -583,8 +743,12 @@
     return remote, upstream_branch
 
   def GetCommonAncestorWithUpstream(self):
+    upstream_branch = self.GetUpstreamBranch()
+    if not BranchExists(upstream_branch):
+      DieWithError('The upstream for the current branch (%s) does not exist '
+                   'anymore.\nPlease fix it and try again.' % self.GetBranch())
     return git_common.get_or_create_merge_base(self.GetBranch(),
-                                               self.GetUpstreamBranch())
+                                               upstream_branch)
 
   def GetUpstreamBranch(self):
     if self.upstream_branch is None:
@@ -636,6 +800,15 @@
   def GitSanityChecks(self, upstream_git_obj):
     """Checks git repo status and ensures diff is from local commits."""
 
+    if upstream_git_obj is None:
+      if self.GetBranch() is None:
+        print >> sys.stderr, (
+            'ERROR: unable to determine current branch (detached HEAD?)')
+      else:
+        print >> sys.stderr, (
+            'ERROR: no upstream branch')
+      return False
+
     # Verify the commit we're diffing against is in our current branch.
     upstream_sha = RunGit(['rev-parse', '--verify', upstream_git_obj]).strip()
     common_ancestor = RunGit(['merge-base', upstream_sha, 'HEAD']).strip()
@@ -676,6 +849,19 @@
     return RunGit(['config', 'branch.%s.base-url' % self.GetBranch()],
                   error_ok=True).strip()
 
+  def GetGitSvnRemoteUrl(self):
+    """Return the configured git-svn remote URL parsed from git svn info.
+
+    Returns None if it is not set.
+    """
+    # URL is dependent on the current directory.
+    data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
+    if data:
+      keys = dict(line.split(': ', 1) for line in data.splitlines()
+                  if ': ' in line)
+      return keys.get('URL', None)
+    return None
+
   def GetRemoteUrl(self):
     """Return the configured remote URL, e.g. 'git://example.org/foo.git/'.
 
@@ -704,8 +890,10 @@
       # If we're on a branch then get the server potentially associated
       # with that branch.
       if self.GetIssue():
-        self.rietveld_server = gclient_utils.UpgradeToHttps(RunGit(
-            ['config', self._RietveldServer()], error_ok=True).strip())
+        rietveld_server_config = self._RietveldServer()
+        if rietveld_server_config:
+          self.rietveld_server = gclient_utils.UpgradeToHttps(RunGit(
+              ['config', rietveld_server_config], error_ok=True).strip())
       if not self.rietveld_server:
         self.rietveld_server = settings.GetDefaultServerUrl()
     return self.rietveld_server
@@ -787,6 +975,9 @@
   def GetApprovingReviewers(self):
     return get_approving_reviewers(self.GetIssueProperties())
 
+  def AddComment(self, message):
+    return self.RpcServer().add_comment(self.GetIssue(), message)
+
   def SetIssue(self, issue):
     """Set this branch's issue.  If issue=0, clears the issue."""
     if issue:
@@ -937,7 +1128,8 @@
     """
     if not self._rpc_server:
       self._rpc_server = rietveld.CachingRietveld(
-          self.GetRietveldServer(), None, None)
+          self.GetRietveldServer(),
+          self._auth_config or auth.make_auth_config())
     return self._rpc_server
 
   def _IssueSetting(self):
@@ -950,7 +1142,10 @@
 
   def _RietveldServer(self):
     """Returns the git setting that stores this change's rietveld server."""
-    return 'branch.%s.rietveldserver' % self.GetBranch()
+    branch = self.GetBranch()
+    if branch:
+      return 'branch.%s.rietveldserver' % branch
+    return None
 
 
 def GetCodereviewSettingsInteractively():
@@ -987,6 +1182,8 @@
               'tree-status-url', False)
   SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url', True)
   SetProperty(settings.GetBugPrefix(), 'Bug Prefix', 'bug-prefix', False)
+  SetProperty(settings.GetRunPostUploadHook(), 'Run Post Upload Hook',
+              'run-post-upload-hook', False)
 
   # TODO: configure a default branch to diff against, rather than this
   # svn-based hackery.
@@ -1164,9 +1361,13 @@
   SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
   SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
   SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
+  SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL',
+              unset_error_ok=True)
   SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
   SetProperty('project', 'PROJECT', unset_error_ok=True)
   SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
+  SetProperty('run-post-upload-hook', 'RUN_POST_UPLOAD_HOOK',
+              unset_error_ok=True)
 
   if 'GERRIT_HOST' in keyvals:
     RunGit(['config', 'gerrit.host', keyvals['GERRIT_HOST']])
@@ -1290,6 +1491,151 @@
     'error': Fore.WHITE,
   }.get(status, Fore.WHITE)
 
+def fetch_cl_status(branch, auth_config=None):
+  """Fetches information for an issue and returns (branch, issue, status)."""
+  cl = Changelist(branchref=branch, auth_config=auth_config)
+  url = cl.GetIssueURL()
+  status = cl.GetStatus()
+
+  if url and (not status or status == 'error'):
+    # The issue probably doesn't exist anymore.
+    url += ' (broken)'
+
+  return (branch, url, status)
+
+def get_cl_statuses(
+    branches, fine_grained, max_processes=None, auth_config=None):
+  """Returns a blocking iterable of (branch, issue, color) for given branches.
+
+  If fine_grained is true, this will fetch CL statuses from the server.
+  Otherwise, simply indicate if there's a matching url for the given branches.
+
+  If max_processes is specified, it is used as the maximum number of processes
+  to spawn to fetch CL status from the server. Otherwise 1 process per branch is
+  spawned.
+  """
+  # Silence upload.py otherwise it becomes unwieldly.
+  upload.verbosity = 0
+
+  if fine_grained:
+    # Process one branch synchronously to work through authentication, then
+    # spawn processes to process all the other branches in parallel.
+    if branches:
+      fetch = lambda branch: fetch_cl_status(branch, auth_config=auth_config)
+      yield fetch(branches[0])
+
+      branches_to_fetch = branches[1:]
+      pool = ThreadPool(
+          min(max_processes, len(branches_to_fetch))
+              if max_processes is not None
+              else len(branches_to_fetch))
+      for x in pool.imap_unordered(fetch, branches_to_fetch):
+        yield x
+  else:
+    # Do not use GetApprovingReviewers(), since it requires an HTTP request.
+    for b in branches:
+      cl = Changelist(branchref=b, auth_config=auth_config)
+      url = cl.GetIssueURL()
+      yield (b, url, 'waiting' if url else 'error')
+
+
+def upload_branch_deps(cl, args):
+  """Uploads CLs of local branches that are dependents of the current branch.
+
+  If the local branch dependency tree looks like:
+  test1 -> test2.1 -> test3.1
+                   -> test3.2
+        -> test2.2 -> test3.3
+
+  and you run "git cl upload --dependencies" from test1 then "git cl upload" is
+  run on the dependent branches in this order:
+  test2.1, test3.1, test3.2, test2.2, test3.3
+
+  Note: This function does not rebase your local dependent branches. Use it when
+        you make a change to the parent branch that will not conflict with its
+        dependent branches, and you would like their dependencies updated in
+        Rietveld.
+  """
+  if git_common.is_dirty_git_tree('upload-branch-deps'):
+    return 1
+
+  root_branch = cl.GetBranch()
+  if root_branch is None:
+    DieWithError('Can\'t find dependent branches from detached HEAD state. '
+                 'Get on a branch!')
+  if not cl.GetIssue() or not cl.GetPatchset():
+    DieWithError('Current branch does not have an uploaded CL. We cannot set '
+                 'patchset dependencies without an uploaded CL.')
+
+  branches = RunGit(['for-each-ref',
+                     '--format=%(refname:short) %(upstream:short)',
+                     'refs/heads'])
+  if not branches:
+    print('No local branches found.')
+    return 0
+
+  # Create a dictionary of all local branches to the branches that are dependent
+  # on it.
+  tracked_to_dependents = collections.defaultdict(list)
+  for b in branches.splitlines():
+    tokens = b.split()
+    if len(tokens) == 2:
+      branch_name, tracked = tokens
+      tracked_to_dependents[tracked].append(branch_name)
+
+  print
+  print 'The dependent local branches of %s are:' % root_branch
+  dependents = []
+  def traverse_dependents_preorder(branch, padding=''):
+    dependents_to_process = tracked_to_dependents.get(branch, [])
+    padding += '  '
+    for dependent in dependents_to_process:
+      print '%s%s' % (padding, dependent)
+      dependents.append(dependent)
+      traverse_dependents_preorder(dependent, padding)
+  traverse_dependents_preorder(root_branch)
+  print
+
+  if not dependents:
+    print 'There are no dependent local branches for %s' % root_branch
+    return 0
+
+  print ('This command will checkout all dependent branches and run '
+         '"git cl upload".')
+  ask_for_data('[Press enter to continue or ctrl-C to quit]')
+
+  # Add a default patchset title to all upload calls.
+  args.extend(['-t', 'Updated patchset dependency'])
+  # Record all dependents that failed to upload.
+  failures = {}
+  # Go through all dependents, checkout the branch and upload.
+  try:
+    for dependent_branch in dependents:
+      print
+      print '--------------------------------------'
+      print 'Running "git cl upload" from %s:' % dependent_branch
+      RunGit(['checkout', '-q', dependent_branch])
+      print
+      try:
+        if CMDupload(OptionParser(), args) != 0:
+          print 'Upload failed for %s!' % dependent_branch
+          failures[dependent_branch] = 1
+      except:  # pylint: disable=W0702
+        failures[dependent_branch] = 1
+      print
+  finally:
+    # Swap back to the original root branch.
+    RunGit(['checkout', '-q', root_branch])
+
+  print
+  print 'Upload complete for dependent branches!'
+  for dependent_branch in dependents:
+    upload_status = 'failed' if failures.get(dependent_branch) else 'succeeded'
+    print '  %s : %s' % (dependent_branch, upload_status)
+  print
+
+  return 0
+
 
 @subcommand.hidden
 def CMDstatus(parser, args):
@@ -1309,12 +1655,18 @@
                     help='print only specific field (desc|id|patch|url)')
   parser.add_option('-f', '--fast', action='store_true',
                     help='Do not retrieve review status')
-  (options, args) = parser.parse_args(args)
+  parser.add_option(
+      '-j', '--maxjobs', action='store', type=int,
+      help='The maximum number of jobs to use when retrieving review status')
+
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
   if args:
     parser.error('Unsupported args: %s' % args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if options.field:
-    cl = Changelist()
+    cl = Changelist(auth_config=auth_config)
     if options.field.startswith('desc'):
       print cl.GetDescription()
     elif options.field == 'id':
@@ -1336,67 +1688,41 @@
     print('No local branch found.')
     return 0
 
-  changes = (Changelist(branchref=b) for b in branches.splitlines())
+  changes = (
+      Changelist(branchref=b, auth_config=auth_config)
+      for b in branches.splitlines())
   branches = [c.GetBranch() for c in changes]
   alignment = max(5, max(len(b) for b in branches))
   print 'Branches associated with reviews:'
-  # Adhoc thread pool to request data concurrently.
-  output = Queue.Queue()
+  output = get_cl_statuses(branches,
+                           fine_grained=not options.fast,
+                           max_processes=options.maxjobs,
+                           auth_config=auth_config)
 
-  # Silence upload.py otherwise it becomes unweldly.
-  upload.verbosity = 0
-
-  if not options.fast:
-    def fetch(b):
-      """Fetches information for an issue and returns (branch, issue, color)."""
-      c = Changelist(branchref=b)
-      i = c.GetIssueURL()
-      status = c.GetStatus()
-      color = color_for_status(status)
-
-      if i and (not status or status == 'error'):
-        # The issue probably doesn't exist anymore.
-        i += ' (broken)'
-
-      output.put((b, i, color))
-
-    # Process one branch synchronously to work through authentication, then
-    # spawn threads to process all the other branches in parallel.
-    if branches:
-      fetch(branches[0])
-    threads = [
-      threading.Thread(target=fetch, args=(b,)) for b in branches[1:]]
-    for t in threads:
-      t.daemon = True
-      t.start()
-  else:
-    # Do not use GetApprovingReviewers(), since it requires an HTTP request.
-    for b in branches:
-      c = Changelist(branchref=b)
-      url = c.GetIssueURL()
-      output.put((b, url, Fore.BLUE if url else Fore.WHITE))
-
-  tmp = {}
+  branch_statuses = {}
   alignment = max(5, max(len(ShortBranchName(b)) for b in branches))
   for branch in sorted(branches):
-    while branch not in tmp:
-      b, i, color = output.get()
-      tmp[b] = (i, color)
-    issue, color = tmp.pop(branch)
+    while branch not in branch_statuses:
+      b, i, status = output.next()
+      branch_statuses[b] = (i, status)
+    issue_url, status = branch_statuses.pop(branch)
+    color = color_for_status(status)
     reset = Fore.RESET
     if not sys.stdout.isatty():
       color = ''
       reset = ''
-    print '  %*s : %s%s%s' % (
-          alignment, ShortBranchName(branch), color, issue, reset)
+    status_str = '(%s)' % status if status else ''
+    print '  %*s : %s%s %s%s' % (
+          alignment, ShortBranchName(branch), color, issue_url, status_str,
+          reset)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   print
   print 'Current branch:',
-  if not cl.GetIssue():
-    print 'no issue assigned.'
-    return 0
   print cl.GetBranch()
+  if not cl.GetIssue():
+    print 'No issue assigned.'
+    return 0
   print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
   if not options.fast:
     print 'Issue description:'
@@ -1427,56 +1753,101 @@
 
   Pass issue number 0 to clear the current issue.
   """
-  _, args = parser.parse_args(args)
+  parser.add_option('-r', '--reverse', action='store_true',
+                    help='Lookup the branch(es) for the specified issues. If '
+                         'no issues are specified, all branches with mapped '
+                         'issues will be listed.')
+  options, args = parser.parse_args(args)
 
-  cl = Changelist()
-  if len(args) > 0:
-    try:
-      issue = int(args[0])
-    except ValueError:
-      DieWithError('Pass a number to set the issue or none to list it.\n'
-          'Maybe you want to run git cl status?')
-    cl.SetIssue(issue)
-  print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
+  if options.reverse:
+    branches = RunGit(['for-each-ref', 'refs/heads',
+                       '--format=%(refname:short)']).splitlines()
+
+    # Reverse issue lookup.
+    issue_branch_map = {}
+    for branch in branches:
+      cl = Changelist(branchref=branch)
+      issue_branch_map.setdefault(cl.GetIssue(), []).append(branch)
+    if not args:
+      args = sorted(issue_branch_map.iterkeys())
+    for issue in args:
+      if not issue:
+        continue
+      print 'Branch for issue number %s: %s' % (
+          issue, ', '.join(issue_branch_map.get(int(issue)) or ('None',)))
+  else:
+    cl = Changelist()
+    if len(args) > 0:
+      try:
+        issue = int(args[0])
+      except ValueError:
+        DieWithError('Pass a number to set the issue or none to list it.\n'
+            'Maybe you want to run git cl status?')
+      cl.SetIssue(issue)
+    print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
   return 0
 
 
 @subcommand.hidden
 def CMDcomments(parser, args):
-  """Shows review comments of the current changelist."""
-  (_, args) = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
+  """Shows or posts review comments for any changelist."""
+  parser.add_option('-a', '--add-comment', dest='comment',
+                    help='comment to add to an issue')
+  parser.add_option('-i', dest='issue',
+                    help="review issue id (defaults to current issue)")
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  cl = Changelist()
-  if cl.GetIssue():
-    data = cl.GetIssueProperties()
-    for message in sorted(data['messages'], key=lambda x: x['date']):
-      if message['disapproval']:
-        color = Fore.RED
-      elif message['approval']:
-        color = Fore.GREEN
-      elif message['sender'] == data['owner_email']:
-        color = Fore.MAGENTA
-      else:
-        color = Fore.BLUE
-      print '\n%s%s  %s%s' % (
-          color, message['date'].split('.', 1)[0], message['sender'],
-          Fore.RESET)
-      if message['text'].strip():
-        print '\n'.join('  ' + l for l in message['text'].splitlines())
+  issue = None
+  if options.issue:
+    try:
+      issue = int(options.issue)
+    except ValueError:
+      DieWithError('A review issue id is expected to be a number')
+
+  cl = Changelist(issue=issue, auth_config=auth_config)
+
+  if options.comment:
+    cl.AddComment(options.comment)
+    return 0
+
+  data = cl.GetIssueProperties()
+  for message in sorted(data.get('messages', []), key=lambda x: x['date']):
+    if message['disapproval']:
+      color = Fore.RED
+    elif message['approval']:
+      color = Fore.GREEN
+    elif message['sender'] == data['owner_email']:
+      color = Fore.MAGENTA
+    else:
+      color = Fore.BLUE
+    print '\n%s%s  %s%s' % (
+        color, message['date'].split('.', 1)[0], message['sender'],
+        Fore.RESET)
+    if message['text'].strip():
+      print '\n'.join('  ' + l for l in message['text'].splitlines())
   return 0
 
 
 @subcommand.hidden
 def CMDdescription(parser, args):
   """Brings up the editor for the current CL's description."""
-  cl = Changelist()
+  parser.add_option('-d', '--display', action='store_true',
+                    help='Display the description instead of opening an editor')
+  auth.add_auth_options(parser)
+  options, _ = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+  cl = Changelist(auth_config=auth_config)
   if not cl.GetIssue():
     DieWithError('This branch has no associated changelist.')
   description = ChangeDescription(cl.GetDescription())
+  if options.display:
+    print description.description
+    return 0
   description.prompt()
-  cl.UpdateDescription(description.description)
+  if cl.GetDescription() != description.description:
+    cl.UpdateDescription(description.description)
   return 0
 
 
@@ -1498,7 +1869,9 @@
   """Runs cpplint on the current changelist."""
   parser.add_option('--filter', action='append', metavar='-x,+y',
                     help='Comma-separated list of cpplint\'s category-filters')
-  (options, args) = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   # Access to a protected member _XX of a client class
   # pylint: disable=W0212
@@ -1514,7 +1887,7 @@
   previous_cwd = os.getcwd()
   os.chdir(settings.GetRoot())
   try:
-    cl = Changelist()
+    cl = Changelist(auth_config=auth_config)
     change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
     files = [f.LocalPath() for f in change.AffectedFiles()]
     if not files:
@@ -1553,13 +1926,15 @@
                     help='Run commit hook instead of the upload hook')
   parser.add_option('-f', '--force', action='store_true',
                     help='Run checks even if tree is dirty')
-  (options, args) = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  if not options.force and is_dirty_git_tree('presubmit'):
+  if not options.force and git_common.is_dirty_git_tree('presubmit'):
     print 'use --force to check even if tree is dirty.'
     return 1
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if args:
     base_branch = args[0]
   else:
@@ -1610,15 +1985,72 @@
   if not change_desc.description:
     print "Description is empty; aborting."
     return 1
-  if CHANGE_ID not in change_desc.description:
-    AddChangeIdToCommitMessage(options, args)
 
-  commits = RunGit(['rev-list', '%s/%s..' % (remote, branch)]).splitlines()
+  if options.squash:
+    # Try to get the message from a previous upload.
+    shadow_branch = 'refs/heads/git_cl_uploads/' + cl.GetBranch()
+    message = RunGitSilent(['show', '--format=%s\n\n%b', '-s', shadow_branch])
+    if not message:
+      if not options.force:
+        change_desc.prompt()
+
+      if CHANGE_ID not in change_desc.description:
+        # Run the commit-msg hook without modifying the head commit by writing
+        # the commit message to a temporary file and running the hook over it,
+        # then reading the file back in.
+        commit_msg_hook = os.path.join(settings.GetRoot(), '.git', 'hooks',
+                                       'commit-msg')
+        file_handle, msg_file = tempfile.mkstemp(text=True,
+                                                 prefix='commit_msg')
+        try:
+          try:
+            with os.fdopen(file_handle, 'w') as fileobj:
+              fileobj.write(change_desc.description)
+          finally:
+            os.close(file_handle)
+            RunCommand([commit_msg_hook, msg_file])
+            change_desc.set_description(gclient_utils.FileRead(msg_file))
+        finally:
+          os.remove(msg_file)
+
+      if not change_desc.description:
+        print "Description is empty; aborting."
+        return 1
+
+      message = change_desc.description
+
+    remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
+    if remote is '.':
+      # If our upstream branch is local, we base our squashed commit on its
+      # squashed version.
+      parent = ('refs/heads/git_cl_uploads/' +
+                scm.GIT.ShortBranchName(upstream_branch))
+
+      # Verify that the upstream branch has been uploaded too, otherwise Gerrit
+      # will create additional CLs when uploading.
+      if (RunGitSilent(['rev-parse', upstream_branch + ':']) !=
+          RunGitSilent(['rev-parse', parent + ':'])):
+        print 'Upload upstream branch ' + upstream_branch + ' first.'
+        return 1
+    else:
+      parent = cl.GetCommonAncestorWithUpstream()
+
+    tree = RunGit(['rev-parse', 'HEAD:']).strip()
+    ref_to_push = RunGit(['commit-tree', tree, '-p', parent,
+                          '-m', message]).strip()
+  else:
+    if CHANGE_ID not in change_desc.description:
+      AddChangeIdToCommitMessage(options, args)
+    ref_to_push = 'HEAD'
+    parent = '%s/%s' % (gerrit_remote, branch)
+
+  commits = RunGitSilent(['rev-list', '%s..%s' % (parent,
+                                                  ref_to_push)]).splitlines()
   if len(commits) > 1:
     print('WARNING: This will upload %d commits. Run the following command '
           'to see which commits will be uploaded: ' % len(commits))
-    print('git log %s/%s..' % (remote, branch))
-    print('You can also use `git squash-branch` to squash these into a single'
+    print('git log %s..%s' % (parent, ref_to_push))
+    print('You can also use `git squash-branch` to squash these into a single '
           'commit.')
     if ask_for_data('About to upload; continue (y/N)? ').lower() != 'y':
       return 0
@@ -1641,16 +2073,77 @@
   if receive_options:
     git_command.append('--receive-pack=git receive-pack %s' %
                        ' '.join(receive_options))
-  git_command += [remote, 'HEAD:refs/for/' + branch]
+  git_command += [gerrit_remote, ref_to_push + ':refs/for/' + branch]
   RunGit(git_command)
+
+  if options.squash:
+    head = RunGit(['rev-parse', 'HEAD']).strip()
+    RunGit(['update-ref', '-m', 'Uploaded ' + head, shadow_branch, ref_to_push])
+
   # TODO(ukai): parse Change-Id: and set issue number?
   return 0
 
 
+def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
+  """Computes the remote branch ref to use for the CL.
+
+  Args:
+    remote (str): The git remote for the CL.
+    remote_branch (str): The git remote branch for the CL.
+    target_branch (str): The target branch specified by the user.
+    pending_prefix (str): The pending prefix from the settings.
+  """
+  if not (remote and remote_branch):
+    return None
+
+  if target_branch:
+    # Cannonicalize branch references to the equivalent local full symbolic
+    # refs, which are then translated into the remote full symbolic refs
+    # below.
+    if '/' not in target_branch:
+      remote_branch = 'refs/remotes/%s/%s' % (remote, target_branch)
+    else:
+      prefix_replacements = (
+        ('^((refs/)?remotes/)?branch-heads/', 'refs/remotes/branch-heads/'),
+        ('^((refs/)?remotes/)?%s/' % remote,  'refs/remotes/%s/' % remote),
+        ('^(refs/)?heads/',                   'refs/remotes/%s/' % remote),
+      )
+      match = None
+      for regex, replacement in prefix_replacements:
+        match = re.search(regex, target_branch)
+        if match:
+          remote_branch = target_branch.replace(match.group(0), replacement)
+          break
+      if not match:
+        # This is a branch path but not one we recognize; use as-is.
+        remote_branch = target_branch
+  elif remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS:
+    # Handle the refs that need to land in different refs.
+    remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
+
+  # Create the true path to the remote branch.
+  # Does the following translation:
+  # * refs/remotes/origin/refs/diff/test -> refs/diff/test
+  # * refs/remotes/origin/master -> refs/heads/master
+  # * refs/remotes/branch-heads/test -> refs/branch-heads/test
+  if remote_branch.startswith('refs/remotes/%s/refs/' % remote):
+    remote_branch = remote_branch.replace('refs/remotes/%s/' % remote, '')
+  elif remote_branch.startswith('refs/remotes/%s/' % remote):
+    remote_branch = remote_branch.replace('refs/remotes/%s/' % remote,
+                                          'refs/heads/')
+  elif remote_branch.startswith('refs/remotes/branch-heads'):
+    remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
+  # If a pending prefix exists then replace refs/ with it.
+  if pending_prefix:
+    remote_branch = remote_branch.replace('refs/', pending_prefix)
+  return remote_branch
+
+
 def RietveldUpload(options, args, cl, change):
   """upload the patch to rietveld."""
   upload_args = ['--assume_yes']  # Don't ask about untracked files.
   upload_args.extend(['--server', cl.GetRietveldServer()])
+  upload_args.extend(auth.auth_config_to_command_options(cl.auth_config))
   if options.emulate_svn_auto_props:
     upload_args.append('--emulate_svn_auto_props')
 
@@ -1717,23 +2210,54 @@
   remote_url = cl.GetGitBaseUrlFromConfig()
   if not remote_url:
     if settings.GetIsGitSvn():
-      # URL is dependent on the current directory.
-      data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
-      if data:
-        keys = dict(line.split(': ', 1) for line in data.splitlines()
-                    if ': ' in line)
-        remote_url = keys.get('URL', None)
+      remote_url = cl.GetGitSvnRemoteUrl()
     else:
       if cl.GetRemoteUrl() and '/' in cl.GetUpstreamBranch():
         remote_url = (cl.GetRemoteUrl() + '@'
                       + cl.GetUpstreamBranch().split('/')[-1])
   if remote_url:
     upload_args.extend(['--base_url', remote_url])
+    remote, remote_branch = cl.GetRemoteBranch()
+    target_ref = GetTargetRef(remote, remote_branch, options.target_branch,
+                              settings.GetPendingRefPrefix())
+    if target_ref:
+      upload_args.extend(['--target_ref', target_ref])
+
+    # Look for dependent patchsets. See crbug.com/480453 for more details.
+    remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
+    upstream_branch = ShortBranchName(upstream_branch)
+    if remote is '.':
+      # A local branch is being tracked.
+      local_branch = ShortBranchName(upstream_branch)
+      if settings.GetIsSkipDependencyUpload(local_branch):
+        print
+        print ('Skipping dependency patchset upload because git config '
+               'branch.%s.skip-deps-uploads is set to True.' % local_branch)
+        print
+      else:
+        auth_config = auth.extract_auth_config_from_options(options)
+        branch_cl = Changelist(branchref=local_branch, auth_config=auth_config)
+        branch_cl_issue_url = branch_cl.GetIssueURL()
+        branch_cl_issue = branch_cl.GetIssue()
+        branch_cl_patchset = branch_cl.GetPatchset()
+        if branch_cl_issue_url and branch_cl_issue and branch_cl_patchset:
+          upload_args.extend(
+              ['--depends_on_patchset', '%s:%s' % (
+                   branch_cl_issue, branch_cl_patchset)])
+          print
+          print ('The current branch (%s) is tracking a local branch (%s) with '
+                 'an associated CL.') % (cl.GetBranch(), local_branch)
+          print 'Adding %s/#ps%s as a dependency patchset.' % (
+              branch_cl_issue_url, branch_cl_patchset)
+          print
 
   project = settings.GetProject()
   if project:
     upload_args.extend(['--project', project])
 
+  if options.cq_dry_run:
+    upload_args.extend(['--cq_dry_run'])
+
   try:
     upload_args = ['upload'] + upload_args + args
     logging.info('upload.RealMain(%s)', upload_args)
@@ -1776,7 +2300,14 @@
 
 @subcommand.usage('[args to "git diff"]')
 def CMDupload(parser, args):
-  """Uploads the current changelist to codereview."""
+  """Uploads the current changelist to codereview.
+
+  Can skip dependency patchset uploads for a branch by running:
+    git config branch.branch_name.skip-deps-uploads True
+  To unset run:
+    git config --unset branch.branch_name.skip-deps-uploads
+  Can also set the above globally by using the --global flag.
+  """
   parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
                     help='bypass upload presubmit hook')
   parser.add_option('--bypass-watchlists', action='store_true',
@@ -1805,34 +2336,55 @@
                     help='set the review private (rietveld only)')
   parser.add_option('--target_branch',
                     '--target-branch',
-                    help='When uploading to gerrit, remote branch to '
-                         'use for CL.  Default: master')
+                    metavar='TARGET',
+                    help='Apply CL to remote ref TARGET.  ' +
+                         'Default: remote branch head, or master')
+  parser.add_option('--squash', action='store_true',
+                    help='Squash multiple commits into one (Gerrit only)')
   parser.add_option('--email', default=None,
                     help='email address to use to connect to Rietveld')
   parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true',
                     help='add a set of OWNERS to TBR')
+  parser.add_option('--cq-dry-run', dest='cq_dry_run', action='store_true',
+                    help='Send the patchset to do a CQ dry run right after '
+                         'upload.')
+  parser.add_option('--dependencies', action='store_true',
+                    help='Uploads CLs of all the local branches that depend on '
+                         'the current branch')
 
+  orig_args = args
   add_git_similarity(parser)
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
-  if options.target_branch and not settings.GetIsGerrit():
-    parser.error('Use --target_branch for non gerrit repository.')
-
-  if is_dirty_git_tree('upload'):
+  if git_common.is_dirty_git_tree('upload'):
     return 1
 
   options.reviewers = cleanup_list(options.reviewers)
   options.cc = cleanup_list(options.cc)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if args:
     # TODO(ukai): is it ok for gerrit case?
     base_branch = args[0]
   else:
+    if cl.GetBranch() is None:
+      DieWithError('Can\'t upload from detached HEAD state. Get on a branch!')
+
     # Default to diffing against common ancestor of upstream branch
     base_branch = cl.GetCommonAncestorWithUpstream()
     args = [base_branch, 'HEAD']
 
+  # Make sure authenticated to Rietveld before running expensive hooks. It is
+  # a fast, best efforts check. Rietveld still can reject the authentication
+  # during the actual upload.
+  if not settings.GetIsGerrit() and auth_config.use_oauth2:
+    authenticator = auth.get_authenticator_for_host(
+        cl.GetRietveldServer(), auth_config)
+    if not authenticator.has_cached_credentials():
+      raise auth.LoginRequiredError(cl.GetRietveldServer())
+
   # Apply watchlists on upload.
   change = cl.GetChange(base_branch, None)
   watchlist = watchlists.Watchlists(change.RepositoryRoot())
@@ -1876,7 +2428,25 @@
   if not ret:
     git_set_branch_value('last-upload-hash',
                          RunGit(['rev-parse', 'HEAD']).strip())
+    # Run post upload hooks, if specified.
+    if settings.GetRunPostUploadHook():
+      presubmit_support.DoPostUploadExecuter(
+          change,
+          cl,
+          settings.GetRoot(),
+          options.verbose,
+          sys.stdout)
 
+    # Upload all dependencies if specified.
+    if options.dependencies:
+      print
+      print '--dependencies has been specified.'
+      print 'All dependent local branches will be re-uploaded.'
+      print
+      # Remove the dependencies flag from args so that we do not end up in a
+      # loop.
+      orig_args.remove('--dependencies')
+      upload_branch_deps(cl, orig_args)
   return ret
 
 
@@ -1907,8 +2477,11 @@
                          "description and used as author for git). Should be " +
                          "formatted as 'First Last <email@example.com>'")
   add_git_similarity(parser)
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
-  cl = Changelist()
+  auth_config = auth.extract_auth_config_from_options(options)
+
+  cl = Changelist(auth_config=auth_config)
 
   current = cl.GetBranch()
   remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
@@ -1937,7 +2510,7 @@
   base_branch = args[0]
   base_has_submodules = IsSubmoduleMergeCommit(base_branch)
 
-  if is_dirty_git_tree(cmd):
+  if git_common.is_dirty_git_tree(cmd):
     return 1
 
   # This rev-list syntax means "show all commits not in my branch that
@@ -2022,7 +2595,11 @@
 
   commit_desc = ChangeDescription(change_desc.description)
   if cl.GetIssue():
-    commit_desc.append_footer('Review URL: %s' % cl.GetIssueURL())
+    # Xcode won't linkify this URL unless there is a non-whitespace character
+    # after it. Add a period on a new line to circumvent this. Also add a space
+    # before the period to make sure that Gitiles continues to correctly resolve
+    # the URL.
+    commit_desc.append_footer('Review URL: %s .' % cl.GetIssueURL())
   if options.contributor:
     commit_desc.append_footer('Patch from %s.' % options.contributor)
 
@@ -2097,9 +2674,19 @@
         revision = RunGit(['rev-parse', 'HEAD']).strip()
     else:
       # dcommit the merge branch.
-      _, output = RunGitWithCode(['svn', 'dcommit',
-                                  '-C%s' % options.similarity,
-                                  '--no-rebase', '--rmdir'])
+      cmd_args = [
+        'svn', 'dcommit',
+        '-C%s' % options.similarity,
+        '--no-rebase', '--rmdir',
+      ]
+      if settings.GetForceHttpsCommitUrl():
+        # Allow forcing https commit URLs for some projects that don't allow
+        # committing to http URLs (like Google Code).
+        remote_url = cl.GetGitSvnRemoteUrl()
+        if urlparse.urlparse(remote_url).scheme == 'http':
+          remote_url = remote_url.replace('http://', 'https://')
+        cmd_args.append('--commit-url=%s' % remote_url)
+      _, output = RunGitWithCode(cmd_args)
       if 'Committed r' in output:
         revision = re.match(
           '.*?\nCommitted r(\\d+)', output, re.DOTALL).group(1)
@@ -2141,7 +2728,7 @@
     props = cl.GetIssueProperties()
     patch_num = len(props['patchsets'])
     comment = "Committed patchset #%d (id:%d)%s manually as %s" % (
-        patch_num, props['patchsets'][-1], to_pending, revision[:7])
+        patch_num, props['patchsets'][-1], to_pending, revision)
     if options.bypass_hooks:
       comment += ' (tree was closed).' if GetTreeStatus() == 'closed' else '.'
     else:
@@ -2260,13 +2847,20 @@
 def CMDdcommit(parser, args):
   """Commits the current changelist via git-svn."""
   if not settings.GetIsGitSvn():
-    message = """This doesn't appear to be an SVN repository.
-If your project has a git mirror with an upstream SVN master, you probably need
-to run 'git svn init', see your project's git mirror documentation.
-If your project has a true writeable upstream repository, you probably want
-to run 'git cl land' instead.
-Choose wisely, if you get this wrong, your commit might appear to succeed but
-will instead be silently ignored."""
+    if get_footer_svn_id():
+      # If it looks like previous commits were mirrored with git-svn.
+      message = """This repository appears to be a git-svn mirror, but no
+upstream SVN master is set. You probably need to run 'git auto-svn' once."""
+    else:
+      message = """This doesn't appear to be an SVN repository.
+If your project has a true, writeable git repository, you probably want to run
+'git cl land' instead.
+If your project has a git mirror of an upstream SVN master, you probably need
+to run 'git svn init'.
+
+Using the wrong command might cause your commit to appear to succeed, and the
+review to be closed, without actually landing upstream. If you choose to
+proceed, please verify that the commit lands upstream as expected."""
     print(message)
     ask_for_data('[Press enter to dcommit or ctrl-C to quit]')
   return SendUpstream(parser, args, 'dcommit')
@@ -2276,9 +2870,10 @@
 @subcommand.usage('[upstream branch to apply against]')
 def CMDland(parser, args):
   """Commits the current changelist via git."""
-  if settings.GetIsGitSvn():
+  if settings.GetIsGitSvn() or get_footer_svn_id():
     print('This appears to be an SVN repository.')
     print('Are you sure you didn\'t mean \'git cl dcommit\'?')
+    print('(Ignore if this is the first commit after migrating from svn->git)')
     ask_for_data('[Press enter to push or ctrl-C to quit]')
   return SendUpstream(parser, args, 'land')
 
@@ -2299,12 +2894,19 @@
                         'attempting a 3-way merge')
   parser.add_option('-n', '--no-commit', action='store_true', dest='nocommit',
                     help="don't commit after patch applies")
+  auth.add_auth_options(parser)
   (options, args) = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+
   if len(args) != 1:
     parser.print_help()
     return 1
   issue_arg = args[0]
 
+  # We don't want uncommitted changes mixed up with the patch.
+  if git_common.is_dirty_git_tree('patch'):
+    return 1
+
   # TODO(maruel): Use apply_issue.py
   # TODO(ukai): use gerrit-cherry-pick for gerrit repository?
 
@@ -2316,25 +2918,32 @@
             Changelist().GetUpstreamBranch()])
 
   return PatchIssue(issue_arg, options.reject, options.nocommit,
-                    options.directory)
+                    options.directory, auth_config)
 
 
-def PatchIssue(issue_arg, reject, nocommit, directory):
+def PatchIssue(issue_arg, reject, nocommit, directory, auth_config):
+  # PatchIssue should never be called with a dirty tree.  It is up to the
+  # caller to check this, but just in case we assert here since the
+  # consequences of the caller not checking this could be dire.
+  assert(not git_common.is_dirty_git_tree('apply'))
+
   if type(issue_arg) is int or issue_arg.isdigit():
     # Input is an issue id.  Figure out the URL.
     issue = int(issue_arg)
-    cl = Changelist(issue=issue)
+    cl = Changelist(issue=issue, auth_config=auth_config)
     patchset = cl.GetMostRecentPatchset()
     patch_data = cl.GetPatchSetDiff(issue, patchset)
   else:
     # Assume it's a URL to the patch. Default to https.
     issue_url = gclient_utils.UpgradeToHttps(issue_arg)
-    match = re.match(r'.*?/issue(\d+)_(\d+).diff', issue_url)
+    match = re.match(r'(.*?)/download/issue(\d+)_(\d+).diff', issue_url)
     if not match:
       DieWithError('Must pass an issue ID or full URL for '
           '\'Download raw patch set\'')
-    issue = int(match.group(1))
-    patchset = int(match.group(2))
+    issue = int(match.group(2))
+    cl = Changelist(issue=issue, auth_config=auth_config)
+    cl.rietveld_server = match.group(1)
+    patchset = int(match.group(3))
     patch_data = urllib2.urlopen(issue_arg).read()
 
   # Switch up to the top-level directory, if necessary, in preparation for
@@ -2368,12 +2977,16 @@
     subprocess2.check_call(cmd, env=GetNoGitPagerEnv(),
                            stdin=patch_data, stdout=subprocess2.VOID)
   except subprocess2.CalledProcessError:
-    DieWithError('Failed to apply the patch')
+    print 'Failed to apply the patch'
+    return 1
 
   # If we had an issue, commit the current state and register the issue.
   if not nocommit:
-    RunGit(['commit', '-m', 'patch from issue %s' % issue])
-    cl = Changelist()
+    RunGit(['commit', '-m', (cl.GetDescription() + '\n\n' +
+                             'patch from issue %(i)s at patchset '
+                             '%(p)s (http://crrev.com/%(i)s#ps%(p)s)'
+                             % {'i': issue, 'p': patchset})])
+    cl = Changelist(auth_config=auth_config)
     cl.SetIssue(issue)
     cl.SetPatchset(patchset)
     print "Committed patch locally."
@@ -2474,10 +3087,9 @@
       "-b", "--bot", action="append",
       help=("IMPORTANT: specify ONE builder per --bot flag. Use it multiple "
             "times to specify multiple builders. ex: "
-            "'-b win_rel:ui_tests,webkit_unit_tests -b win_layout'. See "
+            "'-b win_rel -b win_layout'. See "
             "the try server waterfall for the builders name and the tests "
-            "available. Can also be used to specify gtest_filter, e.g. "
-            "-b win_rel:base_unittests:ValuesTest.*Value"))
+            "available."))
   group.add_option(
       "-m", "--master", default='',
       help=("Specify a try master where to run the tries."))
@@ -2495,19 +3107,19 @@
       help="Override which project to use. Projects are defined "
            "server-side to define what default bot set to use")
   group.add_option(
-      "-t", "--testfilter", action="append", default=[],
-      help=("Apply a testfilter to all the selected builders. Unless the "
-            "builders configurations are similar, use multiple "
-            "--bot <builder>:<test> arguments."))
-  group.add_option(
       "-n", "--name", help="Try job name; default to current branch name")
+  group.add_option(
+      "--use-buildbucket", action="store_true", default=False,
+      help="Use buildbucket to trigger try jobs.")
   parser.add_option_group(group)
+  auth.add_auth_options(parser)
   options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   if args:
     parser.error('Unknown arguments: %s' % args)
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   if not cl.GetIssue():
     parser.error('Need to upload first')
 
@@ -2529,7 +3141,7 @@
                    ', e.g. "-m tryserver.chromium.linux".' % err_msg)
 
   def GetMasterMap():
-    # Process --bot and --testfilter.
+    # Process --bot.
     if not options.bot:
       change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
 
@@ -2565,8 +3177,7 @@
 
     for bot in old_style:
       if ':' in bot:
-        builder, tests = bot.split(':', 1)
-        builders_and_tests.setdefault(builder, []).extend(tests.split(','))
+        parser.error('Specifying testfilter is no longer supported')
       elif ',' in bot:
         parser.error('Specify one bot per --bot flag')
       else:
@@ -2582,12 +3193,6 @@
 
   masters = GetMasterMap()
 
-  if options.testfilter:
-    forced_tests = sum((t.split(',') for t in options.testfilter), [])
-    masters = dict((master, dict(
-        (b, forced_tests) for b, t in slaves.iteritems()
-        if t != ['compile'])) for master, slaves in masters.iteritems())
-
   for builders in masters.itervalues():
     if any('triggered' in b for b in builders):
       print >> sys.stderr, (
@@ -2603,23 +3208,35 @@
         '\nWARNING Mismatch between local config and server. Did a previous '
         'upload fail?\ngit-cl try always uses latest patchset from rietveld. '
         'Continuing using\npatchset %s.\n' % patchset)
-  try:
-    cl.RpcServer().trigger_distributed_try_jobs(
-        cl.GetIssue(), patchset, options.name, options.clobber,
-        options.revision, masters)
-  except urllib2.HTTPError, e:
-    if e.code == 404:
-      print('404 from rietveld; '
-            'did you mean to use "git try" instead of "git cl try"?')
+  if options.use_buildbucket:
+    try:
+      trigger_try_jobs(auth_config, cl, options, masters, 'git_cl_try')
+    except BuildbucketResponseException as ex:
+      print 'ERROR: %s' % ex
       return 1
-  print('Tried jobs on:')
+    except Exception as e:
+      stacktrace = (''.join(traceback.format_stack()) + traceback.format_exc())
+      print 'ERROR: Exception when trying to trigger tryjobs: %s\n%s' % (
+          e, stacktrace)
+      return 1
+  else:
+    try:
+      cl.RpcServer().trigger_distributed_try_jobs(
+          cl.GetIssue(), patchset, options.name, options.clobber,
+          options.revision, masters)
+    except urllib2.HTTPError as e:
+      if e.code == 404:
+        print('404 from rietveld; '
+              'did you mean to use "git try" instead of "git cl try"?')
+        return 1
+    print('Tried jobs on:')
 
-  for (master, builders) in masters.iteritems():
-    if master:
-      print 'Master: %s' % master
-    length = max(len(builder) for builder in builders)
-    for builder in sorted(builders):
-      print '  %*s: %s' % (length, builder, ','.join(builders[builder]))
+    for (master, builders) in sorted(masters.iteritems()):
+      if master:
+        print 'Master: %s' % master
+      length = max(len(builder) for builder in builders)
+      for builder in sorted(builders):
+        print '  %*s: %s' % (length, builder, ','.join(builders[builder]))
   return 0
 
 
@@ -2664,10 +3281,12 @@
 @subcommand.hidden
 def CMDset_commit(parser, args):
   """Sets the commit bit to trigger the Commit Queue."""
-  _, args = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
   if args:
     parser.error('Unrecognized args: %s' % ' '.join(args))
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   props = cl.GetIssueProperties()
   if props.get('private'):
     parser.error('Cannot set commit on private issue')
@@ -2678,10 +3297,12 @@
 @subcommand.hidden
 def CMDset_close(parser, args):
   """Closes the issue."""
-  _, args = parser.parse_args(args)
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
   if args:
     parser.error('Unrecognized args: %s' % ' '.join(args))
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
   # Ensure there actually is an issue to close.
   cl.GetDescription()
   cl.CloseIssue()
@@ -2690,8 +3311,22 @@
 
 @subcommand.hidden
 def CMDdiff(parser, args):
-  """shows differences between local tree and last upload."""
-  cl = Changelist()
+  """Shows differences between local tree and last upload."""
+  auth.add_auth_options(parser)
+  options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
+  if args:
+    parser.error('Unrecognized args: %s' % ' '.join(args))
+
+  # Uncommitted (staged and unstaged) changes will be destroyed by
+  # "git reset --hard" if there are merging conflicts in PatchIssue().
+  # Staged changes would be committed along with the patch from last
+  # upload, hence counted toward the "last upload" side in the final
+  # diff output, and this is not what we want.
+  if git_common.is_dirty_git_tree('diff'):
+    return 1
+
+  cl = Changelist(auth_config=auth_config)
   issue = cl.GetIssue()
   branch = cl.GetBranch()
   if not issue:
@@ -2703,13 +3338,14 @@
   RunGit(['checkout', '-q', '-b', TMP_BRANCH, base_branch])
   try:
     # Patch in the latest changes from rietveld.
-    rtn = PatchIssue(issue, False, False, None)
+    rtn = PatchIssue(issue, False, False, None, auth_config)
     if rtn != 0:
+      RunGit(['reset', '--hard'])
       return rtn
 
-    # Switch back to starting brand and diff against the temporary
+    # Switch back to starting branch and diff against the temporary
     # branch containing the latest rietveld patch.
-    subprocess2.check_call(['git', 'diff', TMP_BRANCH, branch])
+    subprocess2.check_call(['git', 'diff', TMP_BRANCH, branch, '--'])
   finally:
     RunGit(['checkout', '-q', branch])
     RunGit(['branch', '-D', TMP_BRANCH])
@@ -2718,16 +3354,18 @@
 
 
 def CMDowners(parser, args):
-  """interactively find the owners for reviewing"""
+  """Interactively find the owners for reviewing."""
   parser.add_option(
       '--no-color',
       action='store_true',
       help='Use this option to disable color output')
+  auth.add_auth_options(parser)
   options, args = parser.parse_args(args)
+  auth_config = auth.extract_auth_config_from_options(options)
 
   author = RunGit(['config', 'user.email']).strip() or None
 
-  cl = Changelist()
+  cl = Changelist(auth_config=auth_config)
 
   if args:
     if len(args) > 1:
@@ -2746,14 +3384,36 @@
       disable_color=options.no_color).run()
 
 
+def BuildGitDiffCmd(diff_type, upstream_commit, args, extensions):
+  """Generates a diff command."""
+  # Generate diff for the current branch's changes.
+  diff_cmd = ['diff', '--no-ext-diff', '--no-prefix', diff_type,
+              upstream_commit, '--' ]
+
+  if args:
+    for arg in args:
+      if os.path.isdir(arg):
+        diff_cmd.extend(os.path.join(arg, '*' + ext) for ext in extensions)
+      elif os.path.isfile(arg):
+        diff_cmd.append(arg)
+      else:
+        DieWithError('Argument "%s" is not a file or a directory' % arg)
+  else:
+    diff_cmd.extend('*' + ext for ext in extensions)
+
+  return diff_cmd
+
+
 @subcommand.usage('[files or directories to diff]')
 def CMDformat(parser, args):
-  """Runs clang-format on the diff."""
-  CLANG_EXTS = ['.cc', '.cpp', '.h', '.mm', '.proto']
+  """Runs auto-formatting tools (clang-format etc.) on the diff."""
+  CLANG_EXTS = ['.cc', '.cpp', '.h', '.mm', '.proto', '.java']
   parser.add_option('--full', action='store_true',
                     help='Reformat the full content of all touched files')
   parser.add_option('--dry-run', action='store_true',
                     help='Don\'t modify any file on disk.')
+  parser.add_option('--python', action='store_true',
+                    help='Format python code with yapf (experimental).')
   parser.add_option('--diff', action='store_true',
                     help='Print diff to stdout rather than modifying files.')
   opts, args = parser.parse_args(args)
@@ -2764,15 +3424,6 @@
   if rel_base_path:
     os.chdir(rel_base_path)
 
-  # Generate diff for the current branch's changes.
-  diff_cmd = ['diff', '--no-ext-diff', '--no-prefix']
-  if opts.full:
-    # Only list the names of modified files.
-    diff_cmd.append('--name-only')
-  else:
-    # Only generate context-less patches.
-    diff_cmd.append('-U0')
-
   # Grab the merge-base commit, i.e. the upstream commit of the current
   # branch when it was created or the last time it was rebased. This is
   # to cover the case where the user may have called "git fetch origin",
@@ -2788,20 +3439,14 @@
     DieWithError('Could not find base commit for this branch. '
                  'Are you in detached state?')
 
-  diff_cmd.append(upstream_commit)
-
-  # Handle source file filtering.
-  diff_cmd.append('--')
-  if args:
-    for arg in args:
-      if os.path.isdir(arg):
-        diff_cmd += [os.path.join(arg, '*' + ext) for ext in CLANG_EXTS]
-      elif os.path.isfile(arg):
-        diff_cmd.append(arg)
-      else:
-        DieWithError('Argument "%s" is not a file or a directory' % arg)
+  if opts.full:
+    # Only list the names of modified files.
+    diff_type = '--name-only'
   else:
-    diff_cmd += ['*' + ext for ext in CLANG_EXTS]
+    # Only generate context-less patches.
+    diff_type = '-U0'
+
+  diff_cmd = BuildGitDiffCmd(diff_type, upstream_commit, args, CLANG_EXTS)
   diff_output = RunGit(diff_cmd)
 
   top_dir = os.path.normpath(
@@ -2813,18 +3458,20 @@
   except clang_format.NotFoundError, e:
     DieWithError(e)
 
+  # Set to 2 to signal to CheckPatchFormatted() that this patch isn't
+  # formatted. This is used to block during the presubmit.
+  return_value = 0
+
   if opts.full:
     # diff_output is a list of files to send to clang-format.
     files = diff_output.splitlines()
-    if not files:
-      print "Nothing to format."
-      return 0
-    cmd = [clang_format_tool]
-    if not opts.dry_run and not opts.diff:
-      cmd.append('-i')
-    stdout = RunCommand(cmd + files, cwd=top_dir)
-    if opts.diff:
-      sys.stdout.write(stdout)
+    if files:
+      cmd = [clang_format_tool]
+      if not opts.dry_run and not opts.diff:
+        cmd.append('-i')
+      stdout = RunCommand(cmd + files, cwd=top_dir)
+      if opts.diff:
+        sys.stdout.write(stdout)
   else:
     env = os.environ.copy()
     env['PATH'] = str(os.path.dirname(clang_format_tool))
@@ -2843,9 +3490,52 @@
     if opts.diff:
       sys.stdout.write(stdout)
     if opts.dry_run and len(stdout) > 0:
-      return 2
+      return_value = 2
 
-  return 0
+  # Similar code to above, but using yapf on .py files rather than clang-format
+  # on C/C++ files
+  if opts.python:
+    diff_cmd = BuildGitDiffCmd(diff_type, upstream_commit, args, ['.py'])
+    diff_output = RunGit(diff_cmd)
+    yapf_tool = gclient_utils.FindExecutable('yapf')
+    if yapf_tool is None:
+      DieWithError('yapf not found in PATH')
+
+    if opts.full:
+      files = diff_output.splitlines()
+      if files:
+        cmd = [yapf_tool]
+        if not opts.dry_run and not opts.diff:
+          cmd.append('-i')
+        stdout = RunCommand(cmd + files, cwd=top_dir)
+        if opts.diff:
+          sys.stdout.write(stdout)
+    else:
+      # TODO(sbc): yapf --lines mode still has some issues.
+      # https://github.com/google/yapf/issues/154
+      DieWithError('--python currently only works with --full')
+
+  # Build a diff command that only operates on dart files. dart's formatter
+  # does not have the nice property of only operating on modified chunks, so
+  # hard code full.
+  dart_diff_cmd = BuildGitDiffCmd('--name-only', upstream_commit,
+                                  args, ['.dart'])
+  dart_diff_output = RunGit(dart_diff_cmd)
+  if dart_diff_output:
+    try:
+      command = [dart_format.FindDartFmtToolInChromiumTree()]
+      if not opts.dry_run and not opts.diff:
+        command.append('-w')
+      command.extend(dart_diff_output.splitlines())
+
+      stdout = RunCommand(command, cwd=top_dir, env=env)
+      if opts.dry_run and stdout:
+        return_value = 2
+    except dart_format.NotFoundError as e:
+      print ('Unable to check dart code formatting. Dart SDK is not in ' +
+             'this checkout.')
+
+  return return_value
 
 
 def CMDlol(parser, args):
@@ -2903,12 +3593,15 @@
   dispatcher = subcommand.CommandDispatcher(__name__)
   try:
     return dispatcher.execute(OptionParser(), argv)
+  except auth.AuthenticationError as e:
+    DieWithError(str(e))
   except urllib2.HTTPError, e:
     if e.code != 500:
       raise
     DieWithError(
         ('AppEngine is misbehaving and returned HTTP %d, again. Keep faith '
           'and retry or visit go/isgaeup.\n%s') % (e.code, str(e)))
+  return 0
 
 
 if __name__ == '__main__':
@@ -2916,4 +3609,8 @@
   # unit testing.
   fix_encoding.fix_encoding()
   colorama.init()
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_common.py b/git_common.py
index 2e268da..2b7d258 100644
--- a/git_common.py
+++ b/git_common.py
@@ -81,19 +81,21 @@
     # crbug.com/187444
     r'RPC failed; result=\d+, HTTP code = \d+',
 
-    # crbug.com/315421
-    r'The requested URL returned error: 500 while accessing',
-
     # crbug.com/388876
     r'Connection timed out',
+
+    # crbug.com/430343
+    # TODO(dnj): Resync with Chromite.
+    r'The requested URL returned error: 5\d+',
 )
 
 GIT_TRANSIENT_ERRORS_RE = re.compile('|'.join(GIT_TRANSIENT_ERRORS),
                                      re.IGNORECASE)
 
-# First version where the for-each-ref command's format string supported the
-# upstream:track token.
-MIN_UPSTREAM_TRACK_GIT_VERSION = (1, 9)
+# git's for-each-ref command first supported the upstream:track token in its
+# format string in version 1.9.0, but some usages were broken until 2.3.0.
+# See git commit b6160d95 for more information.
+MIN_UPSTREAM_TRACK_GIT_VERSION = (2, 3)
 
 class BadCommitRefException(Exception):
   def __init__(self, refs):
@@ -292,7 +294,7 @@
 
 
 def branches(*args):
-  NO_BRANCH = ('* (no branch', '* (detached from ')
+  NO_BRANCH = ('* (no branch', '* (detached', '* (HEAD detached')
 
   key = 'depot-tools.branch-limit'
   limit = 20
@@ -319,15 +321,6 @@
     yield line.split()[-1]
 
 
-def run_with_retcode(*cmd, **kwargs):
-  """Run a command but only return the status code."""
-  try:
-    run(*cmd, **kwargs)
-    return 0
-  except subprocess2.CalledProcessError as cpe:
-    return cpe.returncode
-
-
 def config(option, default=None):
   try:
     return run('config', '--get', option) or default
@@ -407,7 +400,7 @@
   base = branch_config(branch, 'base')
   base_upstream = branch_config(branch, 'base-upstream')
   parent = parent or upstream(branch)
-  if not parent:
+  if parent is None or branch is None:
     return None
   actual_merge_base = run('merge-base', parent, branch)
 
@@ -506,7 +499,7 @@
     raise BadCommitRefException(commitrefs)
 
 
-RebaseRet = collections.namedtuple('RebaseRet', 'success message')
+RebaseRet = collections.namedtuple('RebaseRet', 'success stdout stderr')
 
 
 def rebase(parent, start, branch, abort=False):
@@ -530,11 +523,11 @@
     if TEST_MODE:
       args.insert(0, '--committer-date-is-author-date')
     run('rebase', *args)
-    return RebaseRet(True, '')
+    return RebaseRet(True, '', '')
   except subprocess2.CalledProcessError as cpe:
     if abort:
-      run('rebase', '--abort')
-    return RebaseRet(False, cpe.stdout)
+      run_with_retcode('rebase', '--abort')  # ignore failure
+    return RebaseRet(False, cpe.stdout, cpe.stderr)
 
 
 def remove_merge_base(branch):
@@ -551,6 +544,15 @@
   return run_with_stderr(*cmd, **kwargs)[0]
 
 
+def run_with_retcode(*cmd, **kwargs):
+  """Run a command but only return the status code."""
+  try:
+    run(*cmd, **kwargs)
+    return 0
+  except subprocess2.CalledProcessError as cpe:
+    return cpe.returncode
+
+
 def run_stream(*cmd, **kwargs):
   """Runs a git command. Returns stdout as a PIPE (file-like object).
 
@@ -564,6 +566,28 @@
   return proc.stdout
 
 
+@contextlib.contextmanager
+def run_stream_with_retcode(*cmd, **kwargs):
+  """Runs a git command as context manager yielding stdout as a PIPE.
+
+  stderr is dropped to avoid races if the process outputs to both stdout and
+  stderr.
+
+  Raises subprocess2.CalledProcessError on nonzero return code.
+  """
+  kwargs.setdefault('stderr', subprocess2.VOID)
+  kwargs.setdefault('stdout', subprocess2.PIPE)
+  cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
+  try:
+    proc = subprocess2.Popen(cmd, **kwargs)
+    yield proc.stdout
+  finally:
+    retcode = proc.wait()
+    if retcode != 0:
+      raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(),
+                                           None, None)
+
+
 def run_with_stderr(*cmd, **kwargs):
   """Runs a git command.
 
@@ -600,6 +624,25 @@
 def set_config(option, value, scope='local'):
   run('config', '--' + scope, option, value)
 
+
+def get_dirty_files():
+  # Make sure index is up-to-date before running diff-index.
+  run_with_retcode('update-index', '--refresh', '-q')
+  return run('diff-index', '--name-status', 'HEAD')
+
+
+def is_dirty_git_tree(cmd):
+  dirty = get_dirty_files()
+  if dirty:
+    print 'Cannot %s with a dirty tree. You must commit locally first.' % cmd
+    print 'Uncommitted files: (git diff-index --name-status HEAD)'
+    print dirty[:4096]
+    if len(dirty) > 4096: # pragma: no cover
+      print '... (run "git diff-index --name-status HEAD" to see full output).'
+    return True
+  return False
+
+
 def squash_current_branch(header=None, merge_base=None):
   header = header or 'git squash commit.'
   merge_base = merge_base or get_or_create_merge_base(current_branch())
@@ -608,7 +651,14 @@
     log_msg += '\n'
   log_msg += run('log', '--reverse', '--format=%H%n%B', '%s..HEAD' % merge_base)
   run('reset', '--soft', merge_base)
-  run('commit', '-a', '-F', '-', indata=log_msg)
+
+  if not get_dirty_files():
+    # Sometimes the squash can result in the same tree, meaning that there is
+    # nothing to commit at this point.
+    print 'Nothing to commit; squashed branch is empty'
+    return False
+  run('commit', '--no-verify', '-a', '-F', '-', indata=log_msg)
+  return True
 
 
 def tags(*args):
@@ -723,6 +773,7 @@
   except subprocess2.CalledProcessError:
     return None
 
+
 def get_git_version():
   """Returns a tuple that contains the numeric components of the current git
   version."""
diff --git a/git_footers.py b/git_footers.py
index 6e8136b..3e3ea82 100755
--- a/git_footers.py
+++ b/git_footers.py
@@ -11,10 +11,12 @@
 
 import git_common as git
 
+
 FOOTER_PATTERN = re.compile(r'^\s*([\w-]+): (.*)$')
 CHROME_COMMIT_POSITION_PATTERN = re.compile(r'^([\w/-]+)@{#(\d+)}$')
 GIT_SVN_ID_PATTERN = re.compile('^([^\s@]+)@(\d+)')
 
+
 def normalize_name(header):
   return '-'.join([ word.title() for word in header.strip().split('-') ])
 
@@ -46,6 +48,20 @@
   return footer_map
 
 
+def get_footer_svn_id(branch=None):
+  if not branch:
+    branch = git.root()
+  svn_id = None
+  message = git.run('log', '-1', '--format=%B', branch)
+  footers = parse_footers(message)
+  git_svn_id = get_unique(footers, 'git-svn-id')
+  if git_svn_id:
+    match = GIT_SVN_ID_PATTERN.match(git_svn_id)
+    if match:
+      svn_id = match.group(1)
+  return svn_id
+
+
 def get_unique(footers, key):
   key = normalize_name(key)
   values = footers[key]
@@ -79,6 +95,14 @@
   if svn_commit:
     match = GIT_SVN_ID_PATTERN.match(svn_commit)
     assert match, 'Invalid git-svn-id value: %s' % svn_commit
+    # V8 has different semantics than Chromium.
+    if re.match(r'.*https?://v8\.googlecode\.com/svn/trunk',
+                match.group(1)):
+      return ('refs/heads/candidates', match.group(2))
+    if re.match(r'.*https?://v8\.googlecode\.com/svn/branches/bleeding_edge',
+                match.group(1)):
+      return ('refs/heads/master', match.group(2))
+
     # Assume that any trunk svn revision will match the commit-position
     # semantics.
     if re.match('.*/trunk.*$', match.group(1)):
@@ -129,7 +153,12 @@
     for k in footers.keys():
       for v in footers[k]:
         print '%s: %s' % (k, v)
+  return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_freezer.py b/git_freezer.py
index 7a23be2..91a4ec0 100755
--- a/git_freezer.py
+++ b/git_freezer.py
@@ -22,12 +22,17 @@
   return thaw()
 
 
-def main():
+def main(args):
   dispatcher = subcommand.CommandDispatcher(__name__)
-  ret = dispatcher.execute(optparse.OptionParser(), sys.argv[1:])
+  ret = dispatcher.execute(optparse.OptionParser(), args)
   if ret:
     print ret
+  return 0
 
 
 if __name__ == '__main__':
-  main()
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_map.py b/git_map.py
index 65814b9..99c8b05 100755
--- a/git_map.py
+++ b/git_map.py
@@ -37,13 +37,13 @@
 # Git emits combined color
 BRIGHT_RED = '\x1b[1;31m'
 
-def main():
+def main(argv):
   map_extra = config_list('depot_tools.map_extra')
   fmt = '%C(red bold)%h%x09%Creset%C(green)%d%Creset %C(yellow)%ad%Creset ~ %s'
   log_proc = subprocess2.Popen(
     [GIT_EXE, 'log', '--graph', '--branches', '--tags', root(),
      '--color=always', '--date=short', ('--pretty=format:' + fmt)
-    ] + map_extra + sys.argv[1:],
+    ] + map_extra + argv,
     stdout=subprocess2.PIPE,
     shell=False)
 
@@ -110,5 +110,8 @@
 
 
 if __name__ == '__main__':
-  sys.exit(main())
-
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_map_branches.py b/git_map_branches.py
index 350fed3..cd43900 100755
--- a/git_map_branches.py
+++ b/git_map_branches.py
@@ -19,6 +19,7 @@
     * Note that multiple branches may be Cyan, if they are all on the same
       commit, and you have that commit checked out.
   * Green - a local branch
+  * Blue - a 'branch-heads' branch
   * Magenta - a tag
   * Magenta '{NO UPSTREAM}' - If you have local branches which do not track any
     upstream, then you will see this.
@@ -27,12 +28,14 @@
 import argparse
 import collections
 import sys
+import subprocess2
 
 from third_party import colorama
 from third_party.colorama import Fore, Style
 
 from git_common import current_branch, upstream, tags, get_branches_info
 from git_common import get_git_version, MIN_UPSTREAM_TRACK_GIT_VERSION, hash_one
+from git_common import run
 
 DEFAULT_SEPARATOR = ' ' * 4
 
@@ -107,6 +110,8 @@
 
   def __init__(self):
     self.verbosity = 0
+    self.maxjobs = 0
+    self.show_subject = False
     self.output = OutputManager()
     self.__gone_branches = set()
     self.__branches_info = None
@@ -114,10 +119,25 @@
     self.__current_branch = None
     self.__current_hash = None
     self.__tag_set = None
+    self.__status_info = {}
 
   def start(self):
     self.__branches_info = get_branches_info(
         include_tracking_status=self.verbosity >= 1)
+    if (self.verbosity >= 2):
+      # Avoid heavy import unless necessary.
+      from git_cl import get_cl_statuses, color_for_status
+
+      status_info = get_cl_statuses(self.__branches_info.keys(),
+                                    fine_grained=self.verbosity > 2,
+                                    max_processes=self.maxjobs)
+
+      for _ in xrange(len(self.__branches_info)):
+        # This is a blocking get which waits for the remote CL status to be
+        # retrieved.
+        (branch, url, status) = status_info.next()
+        self.__status_info[branch] = (url, color_for_status(status))
+
     roots = set()
 
     # A map of parents to a list of their children.
@@ -126,7 +146,7 @@
         continue
 
       parent = branch_info.upstream
-      if parent and not self.__branches_info[parent]:
+      if not self.__branches_info[parent]:
         branch_upstream = upstream(branch)
         # If git can't find the upstream, mark the upstream as gone.
         if branch_upstream:
@@ -156,6 +176,8 @@
   def __color_for_branch(self, branch, branch_hash):
     if branch.startswith('origin'):
       color = Fore.RED
+    elif branch.startswith('branch-heads'):
+      color = Fore.BLUE
     elif self.__is_invalid_parent(branch) or branch in self.__tag_set:
       color = Fore.MAGENTA
     elif self.__current_hash.startswith(branch_hash):
@@ -163,7 +185,7 @@
     else:
       color = Fore.GREEN
 
-    if self.__current_hash.startswith(branch_hash):
+    if branch_hash and self.__current_hash.startswith(branch_hash):
       color += Style.BRIGHT
     else:
       color += Style.NORMAL
@@ -177,7 +199,10 @@
     if branch_info:
       branch_hash = branch_info.hash
     else:
-      branch_hash = hash_one(branch, short=True)
+      try:
+        branch_hash = hash_one(branch, short=True)
+      except subprocess2.CalledProcessError:
+        branch_hash = None
 
     line = OutputLine()
 
@@ -231,10 +256,13 @@
 
     # The Rietveld issue associated with the branch.
     if self.verbosity >= 2:
-      import git_cl  # avoid heavy import cost unless we need it
       none_text = '' if self.__is_invalid_parent(branch) else 'None'
-      url = git_cl.Changelist(branchref=branch).GetIssueURL()
-      line.append(url or none_text, color=Fore.BLUE if url else Fore.WHITE)
+      (url, color) = self.__status_info[branch]
+      line.append(url or none_text, color=color)
+
+    # The subject of the most recent commit on the branch.
+    if self.show_subject:
+      line.append(run('log', '-n1', '--format=%s', branch))
 
     self.output.append(line)
 
@@ -257,14 +285,26 @@
                       help='Display branch hash and Rietveld URL')
   parser.add_argument('--no-color', action='store_true', dest='nocolor',
                       help='Turn off colors.')
+  parser.add_argument(
+      '-j', '--maxjobs', action='store', type=int,
+      help='The number of jobs to use when retrieving review status')
+  parser.add_argument('--show-subject', action='store_true',
+                      dest='show_subject', help='Show the commit subject.')
 
-  opts = parser.parse_args(argv[1:])
+  opts = parser.parse_args(argv)
 
   mapper = BranchMapper()
   mapper.verbosity = opts.v
   mapper.output.nocolor = opts.nocolor
+  mapper.maxjobs = opts.maxjobs
+  mapper.show_subject = opts.show_subject
   mapper.start()
   print mapper.output.as_formatted_string()
+  return 0
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_mark_merge_base.py b/git_mark_merge_base.py
index 673e2b4..214b3df 100755
--- a/git_mark_merge_base.py
+++ b/git_mark_merge_base.py
@@ -39,7 +39,7 @@
     try:
       remove_merge_base(cur)
     except CalledProcessError:
-      print "No merge base currently exists for %s." % cur
+      print 'No merge base currently exists for %s.' % cur
     return 0
 
   if opts.merge_base:
@@ -60,9 +60,12 @@
     print "Invalid merge_base %s" % opts.merge_base
 
   print "merge_base(%s): %s" % (cur, actual)
-
   return ret
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_nav_downstream.py b/git_nav_downstream.py
index ed3110b..6ea085b 100755
--- a/git_nav_downstream.py
+++ b/git_nav_downstream.py
@@ -34,7 +34,8 @@
     cur = hash_one(cur)
   downstreams = [b for b in branches() if upfn(b) == cur]
   if not downstreams:
-    return "No downstream branches"
+    print "No downstream branches"
+    return 1
   elif len(downstreams) == 1:
     run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
   else:
@@ -55,10 +56,12 @@
         run('checkout', downstreams[int(r)], stdout=sys.stdout,
             stderr=sys.stderr)
         break
+  return 0
 
 
 if __name__ == '__main__':
   try:
     sys.exit(main(sys.argv[1:]))
   except KeyboardInterrupt:
-    pass
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_new_branch.py b/git_new_branch.py
index 18ce018..03b0fcc 100755
--- a/git_new_branch.py
+++ b/git_new_branch.py
@@ -48,7 +48,12 @@
     sys.stderr.write(cpe.stderr)
     return 1
   sys.stderr.write('Switched to branch %s.\n' % opts.branch_name)
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_number.py b/git_number.py
index 6997f51..1867b97 100755
--- a/git_number.py
+++ b/git_number.py
@@ -259,25 +259,26 @@
                   "use the 'Cr-Commit-Position' value in the commit's message.")
     return 1
 
+  if opts.reset:
+    clear_caches(on_disk=True)
+    return
+
   try:
-    if opts.reset:
-      clear_caches(on_disk=True)
-      return
+    targets = git.parse_commitrefs(*(args or ['HEAD']))
+  except git.BadCommitRefException as e:
+    parser.error(e)
 
-    try:
-      targets = git.parse_commitrefs(*(args or ['HEAD']))
-    except git.BadCommitRefException as e:
-      parser.error(e)
+  load_generation_numbers(targets)
+  if not opts.no_cache:
+    finalize(targets)
 
-    load_generation_numbers(targets)
-    if not opts.no_cache:
-      finalize(targets)
-
-    print '\n'.join(map(str, map(get_num, targets)))
-    return 0
-  except KeyboardInterrupt:
-    return 1
+  print '\n'.join(map(str, map(get_num, targets)))
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main())
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_rebase_update.py b/git_rebase_update.py
index 09eaffa..f98b8b2 100755
--- a/git_rebase_update.py
+++ b/git_rebase_update.py
@@ -12,28 +12,35 @@
 import logging
 import sys
 import textwrap
+import os
 
+from fnmatch import fnmatch
 from pprint import pformat
 
 import git_common as git
 
 
 STARTING_BRANCH_KEY = 'depot-tools.rebase-update.starting-branch'
+STARTING_WORKDIR_KEY = 'depot-tools.rebase-update.starting-workdir'
 
 
-def find_return_branch():
-  """Finds the branch which we should return to after rebase-update completes.
+def find_return_branch_workdir():
+  """Finds the branch and working directory which we should return to after
+  rebase-update completes.
 
-  This value may persist across multiple invocations of rebase-update, if
+  These values may persist across multiple invocations of rebase-update, if
   rebase-update runs into a conflict mid-way.
   """
   return_branch = git.config(STARTING_BRANCH_KEY)
+  workdir = git.config(STARTING_WORKDIR_KEY)
   if not return_branch:
+    workdir = os.getcwd()
+    git.set_config(STARTING_WORKDIR_KEY, workdir)
     return_branch = git.current_branch()
     if return_branch != 'HEAD':
       git.set_config(STARTING_BRANCH_KEY, return_branch)
 
-  return return_branch
+  return return_branch, workdir
 
 
 def fetch_remotes(branch_tree):
@@ -41,15 +48,23 @@
   fetch_tags = False
   remotes = set()
   tag_set = git.tags()
+  fetchspec_map = {}
+  all_fetchspec_configs = git.run(
+      'config', '--get-regexp', r'^remote\..*\.fetch').strip()
+  for fetchspec_config in all_fetchspec_configs.splitlines():
+    key, _, fetchspec = fetchspec_config.partition(' ')
+    dest_spec = fetchspec.partition(':')[2]
+    remote_name = key.split('.')[1]
+    fetchspec_map[dest_spec] = remote_name
   for parent in branch_tree.itervalues():
     if parent in tag_set:
       fetch_tags = True
     else:
       full_ref = git.run('rev-parse', '--symbolic-full-name', parent)
-      if full_ref.startswith('refs/remotes'):
-        parts = full_ref.split('/')
-        remote_name = parts[2]
-        remotes.add(remote_name)
+      for dest_spec, remote_name in fetchspec_map.iteritems():
+        if fnmatch(full_ref, dest_spec):
+          remotes.add(remote_name)
+          break
 
   fetch_args = []
   if fetch_tags:
@@ -121,7 +136,8 @@
   if git.hash_one(parent) != start_hash:
     # Try a plain rebase first
     print 'Rebasing:', branch
-    if not git.rebase(parent, start_hash, branch, abort=True).success:
+    rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
+    if not rebase_ret.success:
       # TODO(iannucci): Find collapsible branches in a smarter way?
       print "Failed! Attempting to squash", branch, "...",
       squash_branch = branch+"_squash_attempt"
@@ -138,25 +154,36 @@
         git.squash_current_branch(merge_base=start_hash)
         git.rebase(parent, start_hash, branch)
       else:
-        # rebase and leave in mid-rebase state.
-        git.rebase(parent, start_hash, branch)
         print "Failed!"
         print
-        print "Here's what git-rebase had to say:"
-        print squash_ret.message
-        print
-        print textwrap.dedent(
-        """
-        Squashing failed. You probably have a real merge conflict.
 
-        Your working copy is in mid-rebase. Either:
-         * completely resolve like a normal git-rebase; OR
-         * abort the rebase and mark this branch as dormant:
-               git config branch.%s.dormant true
+        # rebase and leave in mid-rebase state.
+        # This second rebase attempt should always fail in the same
+        # way that the first one does.  If it magically succeeds then
+        # something very strange has happened.
+        second_rebase_ret = git.rebase(parent, start_hash, branch)
+        if second_rebase_ret.success: # pragma: no cover
+          print "Second rebase succeeded unexpectedly!"
+          print "Please see: http://crbug.com/425696"
+          print "First rebased failed with:"
+          print rebase_ret.stderr
+        else:
+          print "Here's what git-rebase (squashed) had to say:"
+          print
+          print squash_ret.stdout
+          print squash_ret.stderr
+          print textwrap.dedent(
+          """\
+          Squashing failed. You probably have a real merge conflict.
 
-        And then run `git rebase-update` again to resume.
-        """ % branch)
-        return False
+          Your working copy is in mid-rebase. Either:
+           * completely resolve like a normal git-rebase; OR
+           * abort the rebase and mark this branch as dormant:
+                 git config branch.%s.dormant true
+
+          And then run `git rebase-update` again to resume.
+          """ % branch)
+          return False
   else:
     print '%s up-to-date' % branch
 
@@ -166,7 +193,7 @@
   return True
 
 
-def main(args=()):
+def main(args=None):
   parser = argparse.ArgumentParser()
   parser.add_argument('--verbose', '-v', action='store_true')
   parser.add_argument('--no_fetch', '--no-fetch', '-n',
@@ -193,7 +220,8 @@
     )
     return 1
 
-  return_branch = find_return_branch()
+  return_branch, return_workdir = find_return_branch_workdir()
+  os.chdir(git.run('rev-parse', '--show-toplevel'))
 
   if git.current_branch() == 'HEAD':
     if git.run('status', '--porcelain'):
@@ -243,10 +271,17 @@
           % (return_branch, root_branch)
         )
       git.run('checkout', root_branch)
+    if return_workdir:
+      os.chdir(return_workdir)
     git.set_config(STARTING_BRANCH_KEY, '')
+    git.set_config(STARTING_WORKDIR_KEY, '')
 
   return retcode
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main())
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_rename_branch.py b/git_rename_branch.py
index cefa012..c0ac42e 100755
--- a/git_rename_branch.py
+++ b/git_rename_branch.py
@@ -44,7 +44,12 @@
   except subprocess2.CalledProcessError as cpe:
     sys.stderr.write(cpe.stderr)
     return 1
+  return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_reparent_branch.py b/git_reparent_branch.py
index fe79d3c..f24f52f 100755
--- a/git_reparent_branch.py
+++ b/git_reparent_branch.py
@@ -73,4 +73,8 @@
 
 
 if __name__ == '__main__':  # pragma: no cover
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_retry.py b/git_retry.py
index b40e6d2..d6dee65 100755
--- a/git_retry.py
+++ b/git_retry.py
@@ -153,4 +153,8 @@
 if __name__ == '__main__':
   logging.basicConfig()
   logging.getLogger().setLevel(logging.WARNING)
-  sys.exit(main(sys.argv[2:]))
+  try:
+    sys.exit(main(sys.argv[2:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_squash_branch.py b/git_squash_branch.py
index 0e02539..33366d2 100755
--- a/git_squash_branch.py
+++ b/git_squash_branch.py
@@ -6,7 +6,7 @@
 import argparse
 import sys
 
-from git_common import squash_current_branch
+import git_common
 
 def main(args):
   parser = argparse.ArgumentParser()
@@ -14,7 +14,15 @@
       '-m', '--message', metavar='<msg>', default='git squash commit.',
       help='Use the given <msg> as the first line of the commit message.')
   opts = parser.parse_args(args)
-  squash_current_branch(opts.message)
+  if git_common.is_dirty_git_tree('squash-branch'):
+    return 1
+  git_common.squash_current_branch(opts.message)
+  return 0
+
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_try.py b/git_try.py
index 45a5217..f8f6d30 100755
--- a/git_try.py
+++ b/git_try.py
@@ -41,8 +41,7 @@
     return None
 
 
-if __name__ == '__main__':
-  args = sys.argv[1:]
+def main(args):
   patchset = GetRietveldPatchsetNumber()
   if patchset:
     args.extend([
@@ -68,3 +67,12 @@
   except third_party.upload.ClientLoginError, e:
     print('Got an exception while trying to log in to Rietveld.')
     print(str(e))
+  return 0
+
+
+if __name__ == '__main__':
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/git_upstream_diff.py b/git_upstream_diff.py
index 9d9db95..3e38c78 100755
--- a/git_upstream_diff.py
+++ b/git_upstream_diff.py
@@ -38,8 +38,12 @@
 
   cmd += extra_args
 
-  subprocess2.check_call(cmd)
+  return subprocess2.check_call(cmd)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  try:
+    sys.exit(main(sys.argv[1:]))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gn.py b/gn.py
index 325e685..32c2fa0 100755
--- a/gn.py
+++ b/gn.py
@@ -30,8 +30,12 @@
     print >> sys.stderr, 'gn.py: Could not find gn executable at: %s' % gn_path
     return 2
   else:
-    return subprocess.call([gn_path] + sys.argv[1:])
+    return subprocess.call([gn_path] + args[1:])
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+  try:
+    sys.exit(main(sys.argv))
+  except KeyboardInterrupt:
+    sys.stderr.write('interrupted\n')
+    sys.exit(1)
diff --git a/gsutil.py b/gsutil.py
new file mode 100755
index 0000000..53589a2
--- /dev/null
+++ b/gsutil.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run a pinned gsutil."""
+
+
+import argparse
+import base64
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import urllib2
+import zipfile
+
+
+GSUTIL_URL = 'https://storage.googleapis.com/pub/'
+API_URL = 'https://www.googleapis.com/storage/v1/b/pub/o/'
+
+THIS_DIR = os.path.dirname(os.path.abspath(__file__))
+DEFAULT_BIN_DIR = os.path.join(THIS_DIR, 'external_bin', 'gsutil')
+DEFAULT_FALLBACK_GSUTIL = os.path.join(
+    THIS_DIR, 'third_party', 'gsutil', 'gsutil')
+
+
+class InvalidGsutilError(Exception):
+  pass
+
+
+def download_gsutil(version, target_dir):
+  """Downloads gsutil into the target_dir."""
+  filename = 'gsutil_%s.zip' % version
+  target_filename = os.path.join(target_dir, filename)
+
+  # Check if the target exists already.
+  if os.path.exists(target_filename):
+    md5_calc = hashlib.md5()
+    with open(target_filename, 'rb') as f:
+      while True:
+        buf = f.read(4096)
+        if not buf:
+          break
+        md5_calc.update(buf)
+    local_md5 = md5_calc.hexdigest()
+
+    metadata_url = '%s%s' % (API_URL, filename)
+    metadata = json.load(urllib2.urlopen(metadata_url))
+    remote_md5 = base64.b64decode(metadata['md5Hash'])
+
+    if local_md5 == remote_md5:
+      return target_filename
+    os.remove(target_filename)
+
+  # Do the download.
+  url = '%s%s' % (GSUTIL_URL, filename)
+  u = urllib2.urlopen(url)
+  with open(target_filename, 'wb') as f:
+    while True:
+      buf = u.read(4096)
+      if not buf:
+        break
+      f.write(buf)
+  return target_filename
+
+
+def check_gsutil(gsutil_bin):
+  """Run gsutil version and make sure it runs."""
+  return subprocess.call(
+      [sys.executable, gsutil_bin, 'version'],
+      stdout=subprocess.PIPE, stderr=subprocess.STDOUT) == 0
+
+def ensure_gsutil(version, target):
+  bin_dir = os.path.join(target, 'gsutil_%s' % version)
+  gsutil_bin = os.path.join(bin_dir, 'gsutil', 'gsutil')
+  if os.path.isfile(gsutil_bin) and check_gsutil(gsutil_bin):
+    # Everything is awesome! we're all done here.
+    return gsutil_bin
+
+  if os.path.isdir(bin_dir):
+    # Clean up if we're redownloading a corrupted gsutil.
+    shutil.rmtree(bin_dir)
+  cache_dir = os.path.join(target, '.cache_dir')
+  if not os.path.isdir(cache_dir):
+    os.makedirs(cache_dir)
+  target_zip_filename = download_gsutil(version, cache_dir)
+  with zipfile.ZipFile(target_zip_filename, 'r') as target_zip:
+    target_zip.extractall(bin_dir)
+
+  # Final check that the gsutil bin is okay.  This should never fail.
+  if not check_gsutil(gsutil_bin):
+    raise InvalidGsutilError()
+
+  return gsutil_bin
+
+
+def run_gsutil(force_version, fallback, target, args):
+  if force_version:
+    gsutil_bin = ensure_gsutil(force_version, target)
+  else:
+    gsutil_bin = fallback
+  cmd = [sys.executable, gsutil_bin] + args
+  return subprocess.call(cmd)
+
+
+def parse_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--force-version', default='4.13')
+  parser.add_argument('--fallback', default=DEFAULT_FALLBACK_GSUTIL)
+  parser.add_argument('--target', default=DEFAULT_BIN_DIR)
+  parser.add_argument('args', nargs=argparse.REMAINDER)
+
+  args, extras = parser.parse_known_args()
+  if args.args and args.args[0] == '--':
+    args.args.pop(0)
+  if extras:
+    args.args = extras + args.args
+  return args.force_version, args.fallback, args.target, args.args
+
+
+def main():
+  force_version, fallback, target, args = parse_args()
+  return run_gsutil(force_version, fallback, target, args)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/infra/README.md b/infra/README.md
new file mode 100644
index 0000000..5788e8a
--- /dev/null
+++ b/infra/README.md
@@ -0,0 +1 @@
+This directory contains infra-specific files.
diff --git a/infra/config/OWNERS b/infra/config/OWNERS
new file mode 100644
index 0000000..2aa95ea
--- /dev/null
+++ b/infra/config/OWNERS
@@ -0,0 +1,5 @@
+set noparent
+akuegel@chromium.org
+phajdan.jr@chromium.org
+sergiyb@chromium.org
+tandrii@chromium.org
diff --git a/infra/config/README.md b/infra/config/README.md
new file mode 100644
index 0000000..c036d61
--- /dev/null
+++ b/infra/config/README.md
@@ -0,0 +1 @@
+This directory contains configuration files for infra services.
diff --git a/infra/config/cq.cfg b/infra/config/cq.cfg
new file mode 100644
index 0000000..3469cdc
--- /dev/null
+++ b/infra/config/cq.cfg
@@ -0,0 +1,38 @@
+# Commit Queue configuration file. The documentation of the format can be found
+# at http://luci-config.appspot.com/schemas/projects/refs:cq.cfg.
+
+version: 1
+cq_name: "depot_tools"
+cq_status_url: "https://chromium-cq-status.appspot.com"
+svn_repo_url: "svn://svn.chromium.org/chrome/trunk/tools/depot_tools"
+
+rietveld {
+  url: "https://codereview.chromium.org"
+  project_bases: "^svn\\:\\/\\/svn\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\.google\\.com\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
+  project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/git\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+  project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/a\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
+}
+
+verifiers {
+  reviewer_lgtm {
+    committer_list: "chromium"
+  }
+
+  try_job {
+    buckets {
+      name: "tryserver.chromium.linux"
+      builders {
+        name: "depot_tools_presubmit"
+      }
+    }
+  }
+}
diff --git a/man/html/depot_tools_tutorial.html b/man/html/depot_tools_tutorial.html
index ffe62c1..2c59b80 100644
--- a/man/html/depot_tools_tutorial.html
+++ b/man/html/depot_tools_tutorial.html
@@ -873,7 +873,7 @@
 <div class="paragraph"><p>Clone the <em>depot_tools</em> repository:</p></div>
 <div class="listingblock">
 <div class="content">
-<pre><code><strong><span class="white">$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools</span></strong></code></pre>
+<pre><code><strong><span class="white">$ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git</span></strong></code></pre>
 </div></div>
 <div class="paragraph"><p>Add <em>depot_tools</em> to the <em>end</em> of your PATH (you will probably want to put this
 in your <code>~/.bashrc</code> or <code>~/.zshrc</code>). Assuming you cloned <em>depot_tools</em> to
@@ -965,8 +965,8 @@
 commands:</p></div>
 <div class="listingblock">
 <div class="content">
-<pre><code><strong><span class="white">$ git config --global user.name &#8220;John Doe&#8221;</span></strong>
-<strong><span class="white">$ git config --global user.email &#8220;jdoe@email.com&#8221;</span></strong>
+<pre><code><strong><span class="white">$ git config --global user.name "John Doe"</span></strong>
+<strong><span class="white">$ git config --global user.email "jdoe@email.com"</span></strong>
 <strong><span class="white">$ git config --global core.autocrlf false</span></strong>
 <strong><span class="white">$ git config --global core.filemode false</span></strong>
 <strong><span class="white">$</span></strong> # and for fun!
@@ -1238,9 +1238,9 @@
 </code></pre></div></div><p><div class="paragraph"> Let's fix something!</p></div><div class="listingblock"><div class="content"><pre><code><span style="font-weight: bold; color: #ffffff">$ git new-branch fix_typo</span>
 <span style="font-weight: bold; color: #ffffff">$ echo -e '/Banana\ns/Banana/Kuun\nwq' | ed build/whitespace_file.txt</span>
 1503
-1501
 It was a Domo-Banana.
 It was a Domo-Kuun.
+1501
 <span style="font-weight: bold; color: #ffffff">$ git commit -am 'Fix terrible typo.'</span>
 [fix_typo 615ffa7] Fix terrible typo.
  1 file changed, 1 insertion(+), 1 deletion(-)
@@ -1290,9 +1290,9 @@
   (use "git push" to publish your local commits)
 <span style="font-weight: bold; color: #ffffff">$ echo -e '/Kuun\ns/Kuun/Kun\nwq' | ed build/whitespace_file.txt</span>
 1501
-1500
 It was a Domo-Kuun.
 It was a Domo-Kun.
+1500
 <span style="font-weight: bold; color: #ffffff">$ git upstream-diff --wordwise</span>
 <span style="font-weight: bold">diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt</span>
 <span style="font-weight: bold">index 3eba355..57cdcee 100644</span>
@@ -1342,14 +1342,14 @@
 
 HEAD is now at beec6f4... Make ReflectorImpl use mailboxes
 <span style="font-weight: bold; color: #ffffff">$ git nav-downstream</span>
-Please select a downstream branch
-  0. chap2
-  1. fix_typo
-Selection (0-1)[0]: 0
 Previous HEAD position was beec6f4... Make ReflectorImpl use mailboxes
 Switched to branch 'chap2'
 Your branch is ahead of 'origin/master' by 1 commit.
   (use "git push" to publish your local commits)
+Please select a downstream branch
+  0. chap2
+  1. fix_typo
+Selection (0-1)[0]: 0
 <span style="font-weight: bold; color: #ffffff">$ git map-branches</span>
 <span style="color: #e42e16"></span><span style="color: #e42e16">origin/master
 </span><span style="color: #33d6e5"></span><span style="font-weight: bold; color: #33d6e5">  chap2 *
@@ -1581,7 +1581,7 @@
 <div id="footnotes"><hr /></div>
 <div id="footer">
 <div id="footer-text">
-Last updated 2014-05-09 17:43:43 PDT
+Last updated 2015-01-13 15:27:56 PST
 </div>
 </div>
 </body>
diff --git a/man/html/git-auto-svn.html b/man/html/git-auto-svn.html
new file mode 100644
index 0000000..24445d0
--- /dev/null
+++ b/man/html/git-auto-svn.html
@@ -0,0 +1,837 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+    "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+<head>
+<meta http-equiv="Content-Type" content="application/xhtml+xml; charset=UTF-8" />
+<meta name="generator" content="AsciiDoc 8.6.9" />
+<title>git-auto-svn(1)</title>
+<style type="text/css">
+/* Shared CSS for AsciiDoc xhtml11 and html5 backends */
+
+/* Default font. */
+body {
+  font-family: Georgia,serif;
+}
+
+/* Title font. */
+h1, h2, h3, h4, h5, h6,
+div.title, caption.title,
+thead, p.table.header,
+#toctitle,
+#author, #revnumber, #revdate, #revremark,
+#footer {
+  font-family: Arial,Helvetica,sans-serif;
+}
+
+body {
+  margin: 1em 5% 1em 5%;
+}
+
+a {
+  color: blue;
+  text-decoration: underline;
+}
+a:visited {
+  color: fuchsia;
+}
+
+em {
+  font-style: italic;
+  color: navy;
+}
+
+strong {
+  font-weight: bold;
+  color: #083194;
+}
+
+h1, h2, h3, h4, h5, h6 {
+  color: #527bbd;
+  margin-top: 1.2em;
+  margin-bottom: 0.5em;
+  line-height: 1.3;
+}
+
+h1, h2, h3 {
+  border-bottom: 2px solid silver;
+}
+h2 {
+  padding-top: 0.5em;
+}
+h3 {
+  float: left;
+}
+h3 + * {
+  clear: left;
+}
+h5 {
+  font-size: 1.0em;
+}
+
+div.sectionbody {
+  margin-left: 0;
+}
+
+hr {
+  border: 1px solid silver;
+}
+
+p {
+  margin-top: 0.5em;
+  margin-bottom: 0.5em;
+}
+
+ul, ol, li > p {
+  margin-top: 0;
+}
+ul > li     { color: #aaa; }
+ul > li > * { color: black; }
+
+.monospaced, code, pre {
+  font-family: "Courier New", Courier, monospace;
+  font-size: inherit;
+  color: navy;
+  padding: 0;
+  margin: 0;
+}
+pre {
+  white-space: pre-wrap;
+}
+
+#author {
+  color: #527bbd;
+  font-weight: bold;
+  font-size: 1.1em;
+}
+#email {
+}
+#revnumber, #revdate, #revremark {
+}
+
+#footer {
+  font-size: small;
+  border-top: 2px solid silver;
+  padding-top: 0.5em;
+  margin-top: 4.0em;
+}
+#footer-text {
+  float: left;
+  padding-bottom: 0.5em;
+}
+#footer-badges {
+  float: right;
+  padding-bottom: 0.5em;
+}
+
+#preamble {
+  margin-top: 1.5em;
+  margin-bottom: 1.5em;
+}
+div.imageblock, div.exampleblock, div.verseblock,
+div.quoteblock, div.literalblock, div.listingblock, div.sidebarblock,
+div.admonitionblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+div.admonitionblock {
+  margin-top: 2.0em;
+  margin-bottom: 2.0em;
+  margin-right: 10%;
+  color: #606060;
+}
+
+div.content { /* Block element content. */
+  padding: 0;
+}
+
+/* Block element titles. */
+div.title, caption.title {
+  color: #527bbd;
+  font-weight: bold;
+  text-align: left;
+  margin-top: 1.0em;
+  margin-bottom: 0.5em;
+}
+div.title + * {
+  margin-top: 0;
+}
+
+td div.title:first-child {
+  margin-top: 0.0em;
+}
+div.content div.title:first-child {
+  margin-top: 0.0em;
+}
+div.content + div.title {
+  margin-top: 0.0em;
+}
+
+div.sidebarblock > div.content {
+  background: #ffffee;
+  border: 1px solid #dddddd;
+  border-left: 4px solid #f0f0f0;
+  padding: 0.5em;
+}
+
+div.listingblock > div.content {
+  border: 1px solid #dddddd;
+  border-left: 5px solid #f0f0f0;
+  background: #f8f8f8;
+  padding: 0.5em;
+}
+
+div.quoteblock, div.verseblock {
+  padding-left: 1.0em;
+  margin-left: 1.0em;
+  margin-right: 10%;
+  border-left: 5px solid #f0f0f0;
+  color: #888;
+}
+
+div.quoteblock > div.attribution {
+  padding-top: 0.5em;
+  text-align: right;
+}
+
+div.verseblock > pre.content {
+  font-family: inherit;
+  font-size: inherit;
+}
+div.verseblock > div.attribution {
+  padding-top: 0.75em;
+  text-align: left;
+}
+/* DEPRECATED: Pre version 8.2.7 verse style literal block. */
+div.verseblock + div.attribution {
+  text-align: left;
+}
+
+div.admonitionblock .icon {
+  vertical-align: top;
+  font-size: 1.1em;
+  font-weight: bold;
+  text-decoration: underline;
+  color: #527bbd;
+  padding-right: 0.5em;
+}
+div.admonitionblock td.content {
+  padding-left: 0.5em;
+  border-left: 3px solid #dddddd;
+}
+
+div.exampleblock > div.content {
+  border-left: 3px solid #dddddd;
+  padding-left: 0.5em;
+}
+
+div.imageblock div.content { padding-left: 0; }
+span.image img { border-style: none; vertical-align: text-bottom; }
+a.image:visited { color: white; }
+
+dl {
+  margin-top: 0.8em;
+  margin-bottom: 0.8em;
+}
+dt {
+  margin-top: 0.5em;
+  margin-bottom: 0;
+  font-style: normal;
+  color: navy;
+}
+dd > *:first-child {
+  margin-top: 0.1em;
+}
+
+ul, ol {
+    list-style-position: outside;
+}
+ol.arabic {
+  list-style-type: decimal;
+}
+ol.loweralpha {
+  list-style-type: lower-alpha;
+}
+ol.upperalpha {
+  list-style-type: upper-alpha;
+}
+ol.lowerroman {
+  list-style-type: lower-roman;
+}
+ol.upperroman {
+  list-style-type: upper-roman;
+}
+
+div.compact ul, div.compact ol,
+div.compact p, div.compact p,
+div.compact div, div.compact div {
+  margin-top: 0.1em;
+  margin-bottom: 0.1em;
+}
+
+tfoot {
+  font-weight: bold;
+}
+td > div.verse {
+  white-space: pre;
+}
+
+div.hdlist {
+  margin-top: 0.8em;
+  margin-bottom: 0.8em;
+}
+div.hdlist tr {
+  padding-bottom: 15px;
+}
+dt.hdlist1.strong, td.hdlist1.strong {
+  font-weight: bold;
+}
+td.hdlist1 {
+  vertical-align: top;
+  font-style: normal;
+  padding-right: 0.8em;
+  color: navy;
+}
+td.hdlist2 {
+  vertical-align: top;
+}
+div.hdlist.compact tr {
+  margin: 0;
+  padding-bottom: 0;
+}
+
+.comment {
+  background: yellow;
+}
+
+.footnote, .footnoteref {
+  font-size: 0.8em;
+}
+
+span.footnote, span.footnoteref {
+  vertical-align: super;
+}
+
+#footnotes {
+  margin: 20px 0 20px 0;
+  padding: 7px 0 0 0;
+}
+
+#footnotes div.footnote {
+  margin: 0 0 5px 0;
+}
+
+#footnotes hr {
+  border: none;
+  border-top: 1px solid silver;
+  height: 1px;
+  text-align: left;
+  margin-left: 0;
+  width: 20%;
+  min-width: 100px;
+}
+
+div.colist td {
+  padding-right: 0.5em;
+  padding-bottom: 0.3em;
+  vertical-align: top;
+}
+div.colist td img {
+  margin-top: 0.3em;
+}
+
+@media print {
+  #footer-badges { display: none; }
+}
+
+#toc {
+  margin-bottom: 2.5em;
+}
+
+#toctitle {
+  color: #527bbd;
+  font-size: 1.1em;
+  font-weight: bold;
+  margin-top: 1.0em;
+  margin-bottom: 0.1em;
+}
+
+div.toclevel0, div.toclevel1, div.toclevel2, div.toclevel3, div.toclevel4 {
+  margin-top: 0;
+  margin-bottom: 0;
+}
+div.toclevel2 {
+  margin-left: 2em;
+  font-size: 0.9em;
+}
+div.toclevel3 {
+  margin-left: 4em;
+  font-size: 0.9em;
+}
+div.toclevel4 {
+  margin-left: 6em;
+  font-size: 0.9em;
+}
+
+span.aqua { color: aqua; }
+span.black { color: black; }
+span.blue { color: blue; }
+span.fuchsia { color: fuchsia; }
+span.gray { color: gray; }
+span.green { color: green; }
+span.lime { color: lime; }
+span.maroon { color: maroon; }
+span.navy { color: navy; }
+span.olive { color: olive; }
+span.purple { color: purple; }
+span.red { color: red; }
+span.silver { color: silver; }
+span.teal { color: teal; }
+span.white { color: white; }
+span.yellow { color: yellow; }
+
+span.aqua-background { background: aqua; }
+span.black-background { background: black; }
+span.blue-background { background: blue; }
+span.fuchsia-background { background: fuchsia; }
+span.gray-background { background: gray; }
+span.green-background { background: green; }
+span.lime-background { background: lime; }
+span.maroon-background { background: maroon; }
+span.navy-background { background: navy; }
+span.olive-background { background: olive; }
+span.purple-background { background: purple; }
+span.red-background { background: red; }
+span.silver-background { background: silver; }
+span.teal-background { background: teal; }
+span.white-background { background: white; }
+span.yellow-background { background: yellow; }
+
+span.big { font-size: 2em; }
+span.small { font-size: 0.6em; }
+
+span.underline { text-decoration: underline; }
+span.overline { text-decoration: overline; }
+span.line-through { text-decoration: line-through; }
+
+div.unbreakable { page-break-inside: avoid; }
+
+
+/*
+ * xhtml11 specific
+ *
+ * */
+
+div.tableblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+div.tableblock > table {
+  border: 3px solid #527bbd;
+}
+thead, p.table.header {
+  font-weight: bold;
+  color: #527bbd;
+}
+p.table {
+  margin-top: 0;
+}
+/* Because the table frame attribute is overriden by CSS in most browsers. */
+div.tableblock > table[frame="void"] {
+  border-style: none;
+}
+div.tableblock > table[frame="hsides"] {
+  border-left-style: none;
+  border-right-style: none;
+}
+div.tableblock > table[frame="vsides"] {
+  border-top-style: none;
+  border-bottom-style: none;
+}
+
+
+/*
+ * html5 specific
+ *
+ * */
+
+table.tableblock {
+  margin-top: 1.0em;
+  margin-bottom: 1.5em;
+}
+thead, p.tableblock.header {
+  font-weight: bold;
+  color: #527bbd;
+}
+p.tableblock {
+  margin-top: 0;
+}
+table.tableblock {
+  border-width: 3px;
+  border-spacing: 0px;
+  border-style: solid;
+  border-color: #527bbd;
+  border-collapse: collapse;
+}
+th.tableblock, td.tableblock {
+  border-width: 1px;
+  padding: 4px;
+  border-style: solid;
+  border-color: #527bbd;
+}
+
+table.tableblock.frame-topbot {
+  border-left-style: hidden;
+  border-right-style: hidden;
+}
+table.tableblock.frame-sides {
+  border-top-style: hidden;
+  border-bottom-style: hidden;
+}
+table.tableblock.frame-none {
+  border-style: hidden;
+}
+
+th.tableblock.halign-left, td.tableblock.halign-left {
+  text-align: left;
+}
+th.tableblock.halign-center, td.tableblock.halign-center {
+  text-align: center;
+}
+th.tableblock.halign-right, td.tableblock.halign-right {
+  text-align: right;
+}
+
+th.tableblock.valign-top, td.tableblock.valign-top {
+  vertical-align: top;
+}
+th.tableblock.valign-middle, td.tableblock.valign-middle {
+  vertical-align: middle;
+}
+th.tableblock.valign-bottom, td.tableblock.valign-bottom {
+  vertical-align: bottom;
+}
+
+
+/*
+ * manpage specific
+ *
+ * */
+
+body.manpage h1 {
+  padding-top: 0.5em;
+  padding-bottom: 0.5em;
+  border-top: 2px solid silver;
+  border-bottom: 2px solid silver;
+}
+body.manpage h2 {
+  border-style: none;
+}
+body.manpage div.sectionbody {
+  margin-left: 3em;
+}
+
+@media print {
+  body.manpage div#toc { display: none; }
+}
+
+
+div.listingblock > div.content {
+  background: rgb(28, 28, 28);
+}
+
+div.listingblock > div > pre > code {
+  color: rgb(187, 187, 187);
+}
+</style>
+<script type="text/javascript">
+/*<![CDATA[*/
+var asciidoc = {  // Namespace.
+
+/////////////////////////////////////////////////////////////////////
+// Table Of Contents generator
+/////////////////////////////////////////////////////////////////////
+
+/* Author: Mihai Bazon, September 2002
+ * http://students.infoiasi.ro/~mishoo
+ *
+ * Table Of Content generator
+ * Version: 0.4
+ *
+ * Feel free to use this script under the terms of the GNU General Public
+ * License, as long as you do not remove or alter this notice.
+ */
+
+ /* modified by Troy D. Hanson, September 2006. License: GPL */
+ /* modified by Stuart Rackham, 2006, 2009. License: GPL */
+
+// toclevels = 1..4.
+toc: function (toclevels) {
+
+  function getText(el) {
+    var text = "";
+    for (var i = el.firstChild; i != null; i = i.nextSibling) {
+      if (i.nodeType == 3 /* Node.TEXT_NODE */) // IE doesn't speak constants.
+        text += i.data;
+      else if (i.firstChild != null)
+        text += getText(i);
+    }
+    return text;
+  }
+
+  function TocEntry(el, text, toclevel) {
+    this.element = el;
+    this.text = text;
+    this.toclevel = toclevel;
+  }
+
+  function tocEntries(el, toclevels) {
+    var result = new Array;
+    var re = new RegExp('[hH]([1-'+(toclevels+1)+'])');
+    // Function that scans the DOM tree for header elements (the DOM2
+    // nodeIterator API would be a better technique but not supported by all
+    // browsers).
+    var iterate = function (el) {
+      for (var i = el.firstChild; i != null; i = i.nextSibling) {
+        if (i.nodeType == 1 /* Node.ELEMENT_NODE */) {
+          var mo = re.exec(i.tagName);
+          if (mo && (i.getAttribute("class") || i.getAttribute("className")) != "float") {
+            result[result.length] = new TocEntry(i, getText(i), mo[1]-1);
+          }
+          iterate(i);
+        }
+      }
+    }
+    iterate(el);
+    return result;
+  }
+
+  var toc = document.getElementById("toc");
+  if (!toc) {
+    return;
+  }
+
+  // Delete existing TOC entries in case we're reloading the TOC.
+  var tocEntriesToRemove = [];
+  var i;
+  for (i = 0; i < toc.childNodes.length; i++) {
+    var entry = toc.childNodes[i];
+    if (entry.nodeName.toLowerCase() == 'div'
+     && entry.getAttribute("class")
+     && entry.getAttribute("class").match(/^toclevel/))
+      tocEntriesToRemove.push(entry);
+  }
+  for (i = 0; i < tocEntriesToRemove.length; i++) {
+    toc.removeChild(tocEntriesToRemove[i]);
+  }
+
+  // Rebuild TOC entries.
+  var entries = tocEntries(document.getElementById("content"), toclevels);
+  for (var i = 0; i < entries.length; ++i) {
+    var entry = entries[i];
+    if (entry.element.id == "")
+      entry.element.id = "_toc_" + i;
+    var a = document.createElement("a");
+    a.href = "#" + entry.element.id;
+    a.appendChild(document.createTextNode(entry.text));
+    var div = document.createElement("div");
+    div.appendChild(a);
+    div.className = "toclevel" + entry.toclevel;
+    toc.appendChild(div);
+  }
+  if (entries.length == 0)
+    toc.parentNode.removeChild(toc);
+},
+
+
+/////////////////////////////////////////////////////////////////////
+// Footnotes generator
+/////////////////////////////////////////////////////////////////////
+
+/* Based on footnote generation code from:
+ * http://www.brandspankingnew.net/archive/2005/07/format_footnote.html
+ */
+
+footnotes: function () {
+  // Delete existing footnote entries in case we're reloading the footnodes.
+  var i;
+  var noteholder = document.getElementById("footnotes");
+  if (!noteholder) {
+    return;
+  }
+  var entriesToRemove = [];
+  for (i = 0; i < noteholder.childNodes.length; i++) {
+    var entry = noteholder.childNodes[i];
+    if (entry.nodeName.toLowerCase() == 'div' && entry.getAttribute("class") == "footnote")
+      entriesToRemove.push(entry);
+  }
+  for (i = 0; i < entriesToRemove.length; i++) {
+    noteholder.removeChild(entriesToRemove[i]);
+  }
+
+  // Rebuild footnote entries.
+  var cont = document.getElementById("content");
+  var spans = cont.getElementsByTagName("span");
+  var refs = {};
+  var n = 0;
+  for (i=0; i<spans.length; i++) {
+    if (spans[i].className == "footnote") {
+      n++;
+      var note = spans[i].getAttribute("data-note");
+      if (!note) {
+        // Use [\s\S] in place of . so multi-line matches work.
+        // Because JavaScript has no s (dotall) regex flag.
+        note = spans[i].innerHTML.match(/\s*\[([\s\S]*)]\s*/)[1];
+        spans[i].innerHTML =
+          "[<a id='_footnoteref_" + n + "' href='#_footnote_" + n +
+          "' title='View footnote' class='footnote'>" + n + "</a>]";
+        spans[i].setAttribute("data-note", note);
+      }
+      noteholder.innerHTML +=
+        "<div class='footnote' id='_footnote_" + n + "'>" +
+        "<a href='#_footnoteref_" + n + "' title='Return to text'>" +
+        n + "</a>. " + note + "</div>";
+      var id =spans[i].getAttribute("id");
+      if (id != null) refs["#"+id] = n;
+    }
+  }
+  if (n == 0)
+    noteholder.parentNode.removeChild(noteholder);
+  else {
+    // Process footnoterefs.
+    for (i=0; i<spans.length; i++) {
+      if (spans[i].className == "footnoteref") {
+        var href = spans[i].getElementsByTagName("a")[0].getAttribute("href");
+        href = href.match(/#.*/)[0];  // Because IE return full URL.
+        n = refs[href];
+        spans[i].innerHTML =